]> git.proxmox.com Git - qemu.git/blob - target-ppc/translate.c
56bdef2b69f799e6cd923b4b835fcac011827e6a
[qemu.git] / target-ppc / translate.c
1 /*
2 * PowerPC emulation for qemu: main translation routines.
3 *
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25
26 #include "cpu.h"
27 #include "exec-all.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31 #include "qemu-common.h"
32
33 #define CPU_SINGLE_STEP 0x1
34 #define CPU_BRANCH_STEP 0x2
35 #define GDBSTUB_SINGLE_STEP 0x4
36
37 /* Include definitions for instructions classes and implementations flags */
38 //#define DO_SINGLE_STEP
39 //#define PPC_DEBUG_DISAS
40 //#define DEBUG_MEMORY_ACCESSES
41 //#define DO_PPC_STATISTICS
42 //#define OPTIMIZE_FPRF_UPDATE
43
44 /*****************************************************************************/
45 /* Code translation helpers */
46
47 /* global register indexes */
48 static TCGv cpu_env;
49 static char cpu_reg_names[10*3 + 22*4 /* GPR */
50 #if !defined(TARGET_PPC64)
51 + 10*4 + 22*5 /* SPE GPRh */
52 #endif
53 + 10*4 + 22*5 /* FPR */
54 + 2*(10*6 + 22*7) /* AVRh, AVRl */
55 + 8*5 /* CRF */];
56 static TCGv cpu_gpr[32];
57 #if !defined(TARGET_PPC64)
58 static TCGv cpu_gprh[32];
59 #endif
60 static TCGv cpu_fpr[32];
61 static TCGv cpu_avrh[32], cpu_avrl[32];
62 static TCGv cpu_crf[8];
63 static TCGv cpu_nip;
64 static TCGv cpu_ctr;
65 static TCGv cpu_lr;
66
67 /* dyngen register indexes */
68 static TCGv cpu_T[3];
69 #if defined(TARGET_PPC64)
70 #define cpu_T64 cpu_T
71 #else
72 static TCGv cpu_T64[3];
73 #endif
74 static TCGv cpu_FT[3];
75 static TCGv cpu_AVRh[3], cpu_AVRl[3];
76
77 #include "gen-icount.h"
78
79 void ppc_translate_init(void)
80 {
81 int i;
82 char* p;
83 static int done_init = 0;
84
85 if (done_init)
86 return;
87
88 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
89 #if TARGET_LONG_BITS > HOST_LONG_BITS
90 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
91 TCG_AREG0, offsetof(CPUState, t0), "T0");
92 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
93 TCG_AREG0, offsetof(CPUState, t1), "T1");
94 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
95 TCG_AREG0, offsetof(CPUState, t2), "T2");
96 #else
97 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
98 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
99 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
100 #endif
101 #if !defined(TARGET_PPC64)
102 cpu_T64[0] = tcg_global_mem_new(TCG_TYPE_I64,
103 TCG_AREG0, offsetof(CPUState, t0_64),
104 "T0_64");
105 cpu_T64[1] = tcg_global_mem_new(TCG_TYPE_I64,
106 TCG_AREG0, offsetof(CPUState, t1_64),
107 "T1_64");
108 cpu_T64[2] = tcg_global_mem_new(TCG_TYPE_I64,
109 TCG_AREG0, offsetof(CPUState, t2_64),
110 "T2_64");
111 #endif
112
113 cpu_FT[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
114 offsetof(CPUState, ft0), "FT0");
115 cpu_FT[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
116 offsetof(CPUState, ft1), "FT1");
117 cpu_FT[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
118 offsetof(CPUState, ft2), "FT2");
119
120 cpu_AVRh[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
121 offsetof(CPUState, avr0.u64[0]), "AVR0H");
122 cpu_AVRl[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
123 offsetof(CPUState, avr0.u64[1]), "AVR0L");
124 cpu_AVRh[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
125 offsetof(CPUState, avr1.u64[0]), "AVR1H");
126 cpu_AVRl[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
127 offsetof(CPUState, avr1.u64[1]), "AVR1L");
128 cpu_AVRh[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
129 offsetof(CPUState, avr2.u64[0]), "AVR2H");
130 cpu_AVRl[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
131 offsetof(CPUState, avr2.u64[1]), "AVR2L");
132
133 p = cpu_reg_names;
134
135 for (i = 0; i < 8; i++) {
136 sprintf(p, "crf%d", i);
137 cpu_crf[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
138 offsetof(CPUState, crf[i]), p);
139 p += 5;
140 }
141
142 for (i = 0; i < 32; i++) {
143 sprintf(p, "r%d", i);
144 cpu_gpr[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
145 offsetof(CPUState, gpr[i]), p);
146 p += (i < 10) ? 3 : 4;
147 #if !defined(TARGET_PPC64)
148 sprintf(p, "r%dH", i);
149 cpu_gprh[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
150 offsetof(CPUState, gprh[i]), p);
151 p += (i < 10) ? 4 : 5;
152 #endif
153
154 sprintf(p, "fp%d", i);
155 cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
156 offsetof(CPUState, fpr[i]), p);
157 p += (i < 10) ? 4 : 5;
158
159 sprintf(p, "avr%dH", i);
160 cpu_avrh[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
161 offsetof(CPUState, avr[i].u64[0]), p);
162 p += (i < 10) ? 6 : 7;
163
164 sprintf(p, "avr%dL", i);
165 cpu_avrl[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
166 offsetof(CPUState, avr[i].u64[1]), p);
167 p += (i < 10) ? 6 : 7;
168 }
169
170 cpu_nip = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
171 offsetof(CPUState, nip), "nip");
172
173 cpu_ctr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
174 offsetof(CPUState, ctr), "ctr");
175
176 cpu_lr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
177 offsetof(CPUState, lr), "lr");
178
179 /* register helpers */
180 #undef DEF_HELPER
181 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
182 #include "helper.h"
183
184 done_init = 1;
185 }
186
187 #if defined(OPTIMIZE_FPRF_UPDATE)
188 static uint16_t *gen_fprf_buf[OPC_BUF_SIZE];
189 static uint16_t **gen_fprf_ptr;
190 #endif
191
192 /* internal defines */
193 typedef struct DisasContext {
194 struct TranslationBlock *tb;
195 target_ulong nip;
196 uint32_t opcode;
197 uint32_t exception;
198 /* Routine used to access memory */
199 int mem_idx;
200 /* Translation flags */
201 #if !defined(CONFIG_USER_ONLY)
202 int supervisor;
203 #endif
204 #if defined(TARGET_PPC64)
205 int sf_mode;
206 #endif
207 int fpu_enabled;
208 int altivec_enabled;
209 int spe_enabled;
210 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
211 int singlestep_enabled;
212 int dcache_line_size;
213 } DisasContext;
214
215 struct opc_handler_t {
216 /* invalid bits */
217 uint32_t inval;
218 /* instruction type */
219 uint64_t type;
220 /* handler */
221 void (*handler)(DisasContext *ctx);
222 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
223 const unsigned char *oname;
224 #endif
225 #if defined(DO_PPC_STATISTICS)
226 uint64_t count;
227 #endif
228 };
229
230 static always_inline void gen_set_Rc0 (DisasContext *ctx)
231 {
232 #if defined(TARGET_PPC64)
233 if (ctx->sf_mode)
234 gen_op_cmpi_64(0);
235 else
236 #endif
237 gen_op_cmpi(0);
238 gen_op_set_Rc0();
239 }
240
241 static always_inline void gen_reset_fpstatus (void)
242 {
243 #ifdef CONFIG_SOFTFLOAT
244 gen_op_reset_fpstatus();
245 #endif
246 }
247
248 static always_inline void gen_compute_fprf (int set_fprf, int set_rc)
249 {
250 if (set_fprf != 0) {
251 /* This case might be optimized later */
252 #if defined(OPTIMIZE_FPRF_UPDATE)
253 *gen_fprf_ptr++ = gen_opc_ptr;
254 #endif
255 gen_op_compute_fprf(1);
256 if (unlikely(set_rc))
257 tcg_gen_andi_i32(cpu_crf[1], cpu_T[0], 0xf);
258 gen_op_float_check_status();
259 } else if (unlikely(set_rc)) {
260 /* We always need to compute fpcc */
261 gen_op_compute_fprf(0);
262 tcg_gen_andi_i32(cpu_crf[1], cpu_T[0], 0xf);
263 if (set_fprf)
264 gen_op_float_check_status();
265 }
266 }
267
268 static always_inline void gen_optimize_fprf (void)
269 {
270 #if defined(OPTIMIZE_FPRF_UPDATE)
271 uint16_t **ptr;
272
273 for (ptr = gen_fprf_buf; ptr != (gen_fprf_ptr - 1); ptr++)
274 *ptr = INDEX_op_nop1;
275 gen_fprf_ptr = gen_fprf_buf;
276 #endif
277 }
278
279 static always_inline void gen_update_nip (DisasContext *ctx, target_ulong nip)
280 {
281 #if defined(TARGET_PPC64)
282 if (ctx->sf_mode)
283 tcg_gen_movi_tl(cpu_nip, nip);
284 else
285 #endif
286 tcg_gen_movi_tl(cpu_nip, (uint32_t)nip);
287 }
288
289 #define GEN_EXCP(ctx, excp, error) \
290 do { \
291 if ((ctx)->exception == POWERPC_EXCP_NONE) { \
292 gen_update_nip(ctx, (ctx)->nip); \
293 } \
294 gen_op_raise_exception_err((excp), (error)); \
295 ctx->exception = (excp); \
296 } while (0)
297
298 #define GEN_EXCP_INVAL(ctx) \
299 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
300 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_INVAL)
301
302 #define GEN_EXCP_PRIVOPC(ctx) \
303 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
304 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_OPC)
305
306 #define GEN_EXCP_PRIVREG(ctx) \
307 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
308 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_REG)
309
310 #define GEN_EXCP_NO_FP(ctx) \
311 GEN_EXCP(ctx, POWERPC_EXCP_FPU, 0)
312
313 #define GEN_EXCP_NO_AP(ctx) \
314 GEN_EXCP(ctx, POWERPC_EXCP_APU, 0)
315
316 #define GEN_EXCP_NO_VR(ctx) \
317 GEN_EXCP(ctx, POWERPC_EXCP_VPU, 0)
318
319 /* Stop translation */
320 static always_inline void GEN_STOP (DisasContext *ctx)
321 {
322 gen_update_nip(ctx, ctx->nip);
323 ctx->exception = POWERPC_EXCP_STOP;
324 }
325
326 /* No need to update nip here, as execution flow will change */
327 static always_inline void GEN_SYNC (DisasContext *ctx)
328 {
329 ctx->exception = POWERPC_EXCP_SYNC;
330 }
331
332 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
333 static void gen_##name (DisasContext *ctx); \
334 GEN_OPCODE(name, opc1, opc2, opc3, inval, type); \
335 static void gen_##name (DisasContext *ctx)
336
337 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
338 static void gen_##name (DisasContext *ctx); \
339 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type); \
340 static void gen_##name (DisasContext *ctx)
341
342 typedef struct opcode_t {
343 unsigned char opc1, opc2, opc3;
344 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
345 unsigned char pad[5];
346 #else
347 unsigned char pad[1];
348 #endif
349 opc_handler_t handler;
350 const unsigned char *oname;
351 } opcode_t;
352
353 /*****************************************************************************/
354 /*** Instruction decoding ***/
355 #define EXTRACT_HELPER(name, shift, nb) \
356 static always_inline uint32_t name (uint32_t opcode) \
357 { \
358 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
359 }
360
361 #define EXTRACT_SHELPER(name, shift, nb) \
362 static always_inline int32_t name (uint32_t opcode) \
363 { \
364 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
365 }
366
367 /* Opcode part 1 */
368 EXTRACT_HELPER(opc1, 26, 6);
369 /* Opcode part 2 */
370 EXTRACT_HELPER(opc2, 1, 5);
371 /* Opcode part 3 */
372 EXTRACT_HELPER(opc3, 6, 5);
373 /* Update Cr0 flags */
374 EXTRACT_HELPER(Rc, 0, 1);
375 /* Destination */
376 EXTRACT_HELPER(rD, 21, 5);
377 /* Source */
378 EXTRACT_HELPER(rS, 21, 5);
379 /* First operand */
380 EXTRACT_HELPER(rA, 16, 5);
381 /* Second operand */
382 EXTRACT_HELPER(rB, 11, 5);
383 /* Third operand */
384 EXTRACT_HELPER(rC, 6, 5);
385 /*** Get CRn ***/
386 EXTRACT_HELPER(crfD, 23, 3);
387 EXTRACT_HELPER(crfS, 18, 3);
388 EXTRACT_HELPER(crbD, 21, 5);
389 EXTRACT_HELPER(crbA, 16, 5);
390 EXTRACT_HELPER(crbB, 11, 5);
391 /* SPR / TBL */
392 EXTRACT_HELPER(_SPR, 11, 10);
393 static always_inline uint32_t SPR (uint32_t opcode)
394 {
395 uint32_t sprn = _SPR(opcode);
396
397 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
398 }
399 /*** Get constants ***/
400 EXTRACT_HELPER(IMM, 12, 8);
401 /* 16 bits signed immediate value */
402 EXTRACT_SHELPER(SIMM, 0, 16);
403 /* 16 bits unsigned immediate value */
404 EXTRACT_HELPER(UIMM, 0, 16);
405 /* Bit count */
406 EXTRACT_HELPER(NB, 11, 5);
407 /* Shift count */
408 EXTRACT_HELPER(SH, 11, 5);
409 /* Mask start */
410 EXTRACT_HELPER(MB, 6, 5);
411 /* Mask end */
412 EXTRACT_HELPER(ME, 1, 5);
413 /* Trap operand */
414 EXTRACT_HELPER(TO, 21, 5);
415
416 EXTRACT_HELPER(CRM, 12, 8);
417 EXTRACT_HELPER(FM, 17, 8);
418 EXTRACT_HELPER(SR, 16, 4);
419 EXTRACT_HELPER(FPIMM, 12, 4);
420
421 /*** Jump target decoding ***/
422 /* Displacement */
423 EXTRACT_SHELPER(d, 0, 16);
424 /* Immediate address */
425 static always_inline target_ulong LI (uint32_t opcode)
426 {
427 return (opcode >> 0) & 0x03FFFFFC;
428 }
429
430 static always_inline uint32_t BD (uint32_t opcode)
431 {
432 return (opcode >> 0) & 0xFFFC;
433 }
434
435 EXTRACT_HELPER(BO, 21, 5);
436 EXTRACT_HELPER(BI, 16, 5);
437 /* Absolute/relative address */
438 EXTRACT_HELPER(AA, 1, 1);
439 /* Link */
440 EXTRACT_HELPER(LK, 0, 1);
441
442 /* Create a mask between <start> and <end> bits */
443 static always_inline target_ulong MASK (uint32_t start, uint32_t end)
444 {
445 target_ulong ret;
446
447 #if defined(TARGET_PPC64)
448 if (likely(start == 0)) {
449 ret = UINT64_MAX << (63 - end);
450 } else if (likely(end == 63)) {
451 ret = UINT64_MAX >> start;
452 }
453 #else
454 if (likely(start == 0)) {
455 ret = UINT32_MAX << (31 - end);
456 } else if (likely(end == 31)) {
457 ret = UINT32_MAX >> start;
458 }
459 #endif
460 else {
461 ret = (((target_ulong)(-1ULL)) >> (start)) ^
462 (((target_ulong)(-1ULL) >> (end)) >> 1);
463 if (unlikely(start > end))
464 return ~ret;
465 }
466
467 return ret;
468 }
469
470 /*****************************************************************************/
471 /* PowerPC Instructions types definitions */
472 enum {
473 PPC_NONE = 0x0000000000000000ULL,
474 /* PowerPC base instructions set */
475 PPC_INSNS_BASE = 0x0000000000000001ULL,
476 /* integer operations instructions */
477 #define PPC_INTEGER PPC_INSNS_BASE
478 /* flow control instructions */
479 #define PPC_FLOW PPC_INSNS_BASE
480 /* virtual memory instructions */
481 #define PPC_MEM PPC_INSNS_BASE
482 /* ld/st with reservation instructions */
483 #define PPC_RES PPC_INSNS_BASE
484 /* spr/msr access instructions */
485 #define PPC_MISC PPC_INSNS_BASE
486 /* Deprecated instruction sets */
487 /* Original POWER instruction set */
488 PPC_POWER = 0x0000000000000002ULL,
489 /* POWER2 instruction set extension */
490 PPC_POWER2 = 0x0000000000000004ULL,
491 /* Power RTC support */
492 PPC_POWER_RTC = 0x0000000000000008ULL,
493 /* Power-to-PowerPC bridge (601) */
494 PPC_POWER_BR = 0x0000000000000010ULL,
495 /* 64 bits PowerPC instruction set */
496 PPC_64B = 0x0000000000000020ULL,
497 /* New 64 bits extensions (PowerPC 2.0x) */
498 PPC_64BX = 0x0000000000000040ULL,
499 /* 64 bits hypervisor extensions */
500 PPC_64H = 0x0000000000000080ULL,
501 /* New wait instruction (PowerPC 2.0x) */
502 PPC_WAIT = 0x0000000000000100ULL,
503 /* Time base mftb instruction */
504 PPC_MFTB = 0x0000000000000200ULL,
505
506 /* Fixed-point unit extensions */
507 /* PowerPC 602 specific */
508 PPC_602_SPEC = 0x0000000000000400ULL,
509 /* isel instruction */
510 PPC_ISEL = 0x0000000000000800ULL,
511 /* popcntb instruction */
512 PPC_POPCNTB = 0x0000000000001000ULL,
513 /* string load / store */
514 PPC_STRING = 0x0000000000002000ULL,
515
516 /* Floating-point unit extensions */
517 /* Optional floating point instructions */
518 PPC_FLOAT = 0x0000000000010000ULL,
519 /* New floating-point extensions (PowerPC 2.0x) */
520 PPC_FLOAT_EXT = 0x0000000000020000ULL,
521 PPC_FLOAT_FSQRT = 0x0000000000040000ULL,
522 PPC_FLOAT_FRES = 0x0000000000080000ULL,
523 PPC_FLOAT_FRSQRTE = 0x0000000000100000ULL,
524 PPC_FLOAT_FRSQRTES = 0x0000000000200000ULL,
525 PPC_FLOAT_FSEL = 0x0000000000400000ULL,
526 PPC_FLOAT_STFIWX = 0x0000000000800000ULL,
527
528 /* Vector/SIMD extensions */
529 /* Altivec support */
530 PPC_ALTIVEC = 0x0000000001000000ULL,
531 /* PowerPC 2.03 SPE extension */
532 PPC_SPE = 0x0000000002000000ULL,
533 /* PowerPC 2.03 SPE floating-point extension */
534 PPC_SPEFPU = 0x0000000004000000ULL,
535
536 /* Optional memory control instructions */
537 PPC_MEM_TLBIA = 0x0000000010000000ULL,
538 PPC_MEM_TLBIE = 0x0000000020000000ULL,
539 PPC_MEM_TLBSYNC = 0x0000000040000000ULL,
540 /* sync instruction */
541 PPC_MEM_SYNC = 0x0000000080000000ULL,
542 /* eieio instruction */
543 PPC_MEM_EIEIO = 0x0000000100000000ULL,
544
545 /* Cache control instructions */
546 PPC_CACHE = 0x0000000200000000ULL,
547 /* icbi instruction */
548 PPC_CACHE_ICBI = 0x0000000400000000ULL,
549 /* dcbz instruction with fixed cache line size */
550 PPC_CACHE_DCBZ = 0x0000000800000000ULL,
551 /* dcbz instruction with tunable cache line size */
552 PPC_CACHE_DCBZT = 0x0000001000000000ULL,
553 /* dcba instruction */
554 PPC_CACHE_DCBA = 0x0000002000000000ULL,
555 /* Freescale cache locking instructions */
556 PPC_CACHE_LOCK = 0x0000004000000000ULL,
557
558 /* MMU related extensions */
559 /* external control instructions */
560 PPC_EXTERN = 0x0000010000000000ULL,
561 /* segment register access instructions */
562 PPC_SEGMENT = 0x0000020000000000ULL,
563 /* PowerPC 6xx TLB management instructions */
564 PPC_6xx_TLB = 0x0000040000000000ULL,
565 /* PowerPC 74xx TLB management instructions */
566 PPC_74xx_TLB = 0x0000080000000000ULL,
567 /* PowerPC 40x TLB management instructions */
568 PPC_40x_TLB = 0x0000100000000000ULL,
569 /* segment register access instructions for PowerPC 64 "bridge" */
570 PPC_SEGMENT_64B = 0x0000200000000000ULL,
571 /* SLB management */
572 PPC_SLBI = 0x0000400000000000ULL,
573
574 /* Embedded PowerPC dedicated instructions */
575 PPC_WRTEE = 0x0001000000000000ULL,
576 /* PowerPC 40x exception model */
577 PPC_40x_EXCP = 0x0002000000000000ULL,
578 /* PowerPC 405 Mac instructions */
579 PPC_405_MAC = 0x0004000000000000ULL,
580 /* PowerPC 440 specific instructions */
581 PPC_440_SPEC = 0x0008000000000000ULL,
582 /* BookE (embedded) PowerPC specification */
583 PPC_BOOKE = 0x0010000000000000ULL,
584 /* mfapidi instruction */
585 PPC_MFAPIDI = 0x0020000000000000ULL,
586 /* tlbiva instruction */
587 PPC_TLBIVA = 0x0040000000000000ULL,
588 /* tlbivax instruction */
589 PPC_TLBIVAX = 0x0080000000000000ULL,
590 /* PowerPC 4xx dedicated instructions */
591 PPC_4xx_COMMON = 0x0100000000000000ULL,
592 /* PowerPC 40x ibct instructions */
593 PPC_40x_ICBT = 0x0200000000000000ULL,
594 /* rfmci is not implemented in all BookE PowerPC */
595 PPC_RFMCI = 0x0400000000000000ULL,
596 /* rfdi instruction */
597 PPC_RFDI = 0x0800000000000000ULL,
598 /* DCR accesses */
599 PPC_DCR = 0x1000000000000000ULL,
600 /* DCR extended accesse */
601 PPC_DCRX = 0x2000000000000000ULL,
602 /* user-mode DCR access, implemented in PowerPC 460 */
603 PPC_DCRUX = 0x4000000000000000ULL,
604 };
605
606 /*****************************************************************************/
607 /* PowerPC instructions table */
608 #if HOST_LONG_BITS == 64
609 #define OPC_ALIGN 8
610 #else
611 #define OPC_ALIGN 4
612 #endif
613 #if defined(__APPLE__)
614 #define OPCODES_SECTION \
615 __attribute__ ((section("__TEXT,__opcodes"), unused, aligned (OPC_ALIGN) ))
616 #else
617 #define OPCODES_SECTION \
618 __attribute__ ((section(".opcodes"), unused, aligned (OPC_ALIGN) ))
619 #endif
620
621 #if defined(DO_PPC_STATISTICS)
622 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
623 OPCODES_SECTION opcode_t opc_##name = { \
624 .opc1 = op1, \
625 .opc2 = op2, \
626 .opc3 = op3, \
627 .pad = { 0, }, \
628 .handler = { \
629 .inval = invl, \
630 .type = _typ, \
631 .handler = &gen_##name, \
632 .oname = stringify(name), \
633 }, \
634 .oname = stringify(name), \
635 }
636 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
637 OPCODES_SECTION opcode_t opc_##name = { \
638 .opc1 = op1, \
639 .opc2 = op2, \
640 .opc3 = op3, \
641 .pad = { 0, }, \
642 .handler = { \
643 .inval = invl, \
644 .type = _typ, \
645 .handler = &gen_##name, \
646 .oname = onam, \
647 }, \
648 .oname = onam, \
649 }
650 #else
651 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
652 OPCODES_SECTION opcode_t opc_##name = { \
653 .opc1 = op1, \
654 .opc2 = op2, \
655 .opc3 = op3, \
656 .pad = { 0, }, \
657 .handler = { \
658 .inval = invl, \
659 .type = _typ, \
660 .handler = &gen_##name, \
661 }, \
662 .oname = stringify(name), \
663 }
664 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
665 OPCODES_SECTION opcode_t opc_##name = { \
666 .opc1 = op1, \
667 .opc2 = op2, \
668 .opc3 = op3, \
669 .pad = { 0, }, \
670 .handler = { \
671 .inval = invl, \
672 .type = _typ, \
673 .handler = &gen_##name, \
674 }, \
675 .oname = onam, \
676 }
677 #endif
678
679 #define GEN_OPCODE_MARK(name) \
680 OPCODES_SECTION opcode_t opc_##name = { \
681 .opc1 = 0xFF, \
682 .opc2 = 0xFF, \
683 .opc3 = 0xFF, \
684 .pad = { 0, }, \
685 .handler = { \
686 .inval = 0x00000000, \
687 .type = 0x00, \
688 .handler = NULL, \
689 }, \
690 .oname = stringify(name), \
691 }
692
693 /* Start opcode list */
694 GEN_OPCODE_MARK(start);
695
696 /* Invalid instruction */
697 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE)
698 {
699 GEN_EXCP_INVAL(ctx);
700 }
701
702 static opc_handler_t invalid_handler = {
703 .inval = 0xFFFFFFFF,
704 .type = PPC_NONE,
705 .handler = gen_invalid,
706 };
707
708 /*** Integer arithmetic ***/
709 #define __GEN_INT_ARITH2(name, opc1, opc2, opc3, inval, type) \
710 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
711 { \
712 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
713 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
714 gen_op_##name(); \
715 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
716 if (unlikely(Rc(ctx->opcode) != 0)) \
717 gen_set_Rc0(ctx); \
718 }
719
720 #define __GEN_INT_ARITH2_O(name, opc1, opc2, opc3, inval, type) \
721 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
722 { \
723 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
724 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
725 gen_op_##name(); \
726 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
727 if (unlikely(Rc(ctx->opcode) != 0)) \
728 gen_set_Rc0(ctx); \
729 }
730
731 #define __GEN_INT_ARITH1(name, opc1, opc2, opc3, type) \
732 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
733 { \
734 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
735 gen_op_##name(); \
736 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
737 if (unlikely(Rc(ctx->opcode) != 0)) \
738 gen_set_Rc0(ctx); \
739 }
740 #define __GEN_INT_ARITH1_O(name, opc1, opc2, opc3, type) \
741 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
742 { \
743 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
744 gen_op_##name(); \
745 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
746 if (unlikely(Rc(ctx->opcode) != 0)) \
747 gen_set_Rc0(ctx); \
748 }
749
750 /* Two operands arithmetic functions */
751 #define GEN_INT_ARITH2(name, opc1, opc2, opc3, type) \
752 __GEN_INT_ARITH2(name, opc1, opc2, opc3, 0x00000000, type) \
753 __GEN_INT_ARITH2_O(name##o, opc1, opc2, opc3 | 0x10, 0x00000000, type)
754
755 /* Two operands arithmetic functions with no overflow allowed */
756 #define GEN_INT_ARITHN(name, opc1, opc2, opc3, type) \
757 __GEN_INT_ARITH2(name, opc1, opc2, opc3, 0x00000400, type)
758
759 /* One operand arithmetic functions */
760 #define GEN_INT_ARITH1(name, opc1, opc2, opc3, type) \
761 __GEN_INT_ARITH1(name, opc1, opc2, opc3, type) \
762 __GEN_INT_ARITH1_O(name##o, opc1, opc2, opc3 | 0x10, type)
763
764 #if defined(TARGET_PPC64)
765 #define __GEN_INT_ARITH2_64(name, opc1, opc2, opc3, inval, type) \
766 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
767 { \
768 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
769 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
770 if (ctx->sf_mode) \
771 gen_op_##name##_64(); \
772 else \
773 gen_op_##name(); \
774 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
775 if (unlikely(Rc(ctx->opcode) != 0)) \
776 gen_set_Rc0(ctx); \
777 }
778
779 #define __GEN_INT_ARITH2_O_64(name, opc1, opc2, opc3, inval, type) \
780 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
781 { \
782 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
783 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
784 if (ctx->sf_mode) \
785 gen_op_##name##_64(); \
786 else \
787 gen_op_##name(); \
788 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
789 if (unlikely(Rc(ctx->opcode) != 0)) \
790 gen_set_Rc0(ctx); \
791 }
792
793 #define __GEN_INT_ARITH1_64(name, opc1, opc2, opc3, type) \
794 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
795 { \
796 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
797 if (ctx->sf_mode) \
798 gen_op_##name##_64(); \
799 else \
800 gen_op_##name(); \
801 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
802 if (unlikely(Rc(ctx->opcode) != 0)) \
803 gen_set_Rc0(ctx); \
804 }
805 #define __GEN_INT_ARITH1_O_64(name, opc1, opc2, opc3, type) \
806 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
807 { \
808 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
809 if (ctx->sf_mode) \
810 gen_op_##name##_64(); \
811 else \
812 gen_op_##name(); \
813 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
814 if (unlikely(Rc(ctx->opcode) != 0)) \
815 gen_set_Rc0(ctx); \
816 }
817
818 /* Two operands arithmetic functions */
819 #define GEN_INT_ARITH2_64(name, opc1, opc2, opc3, type) \
820 __GEN_INT_ARITH2_64(name, opc1, opc2, opc3, 0x00000000, type) \
821 __GEN_INT_ARITH2_O_64(name##o, opc1, opc2, opc3 | 0x10, 0x00000000, type)
822
823 /* Two operands arithmetic functions with no overflow allowed */
824 #define GEN_INT_ARITHN_64(name, opc1, opc2, opc3, type) \
825 __GEN_INT_ARITH2_64(name, opc1, opc2, opc3, 0x00000400, type)
826
827 /* One operand arithmetic functions */
828 #define GEN_INT_ARITH1_64(name, opc1, opc2, opc3, type) \
829 __GEN_INT_ARITH1_64(name, opc1, opc2, opc3, type) \
830 __GEN_INT_ARITH1_O_64(name##o, opc1, opc2, opc3 | 0x10, type)
831 #else
832 #define GEN_INT_ARITH2_64 GEN_INT_ARITH2
833 #define GEN_INT_ARITHN_64 GEN_INT_ARITHN
834 #define GEN_INT_ARITH1_64 GEN_INT_ARITH1
835 #endif
836
837 /* add add. addo addo. */
838 static always_inline void gen_op_add (void)
839 {
840 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
841 }
842 static always_inline void gen_op_addo (void)
843 {
844 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
845 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
846 gen_op_check_addo();
847 }
848 #if defined(TARGET_PPC64)
849 #define gen_op_add_64 gen_op_add
850 static always_inline void gen_op_addo_64 (void)
851 {
852 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
853 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
854 gen_op_check_addo_64();
855 }
856 #endif
857 GEN_INT_ARITH2_64 (add, 0x1F, 0x0A, 0x08, PPC_INTEGER);
858 /* addc addc. addco addco. */
859 static always_inline void gen_op_addc (void)
860 {
861 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
862 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
863 gen_op_check_addc();
864 }
865 static always_inline void gen_op_addco (void)
866 {
867 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
868 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
869 gen_op_check_addc();
870 gen_op_check_addo();
871 }
872 #if defined(TARGET_PPC64)
873 static always_inline void gen_op_addc_64 (void)
874 {
875 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
876 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
877 gen_op_check_addc_64();
878 }
879 static always_inline void gen_op_addco_64 (void)
880 {
881 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
882 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
883 gen_op_check_addc_64();
884 gen_op_check_addo_64();
885 }
886 #endif
887 GEN_INT_ARITH2_64 (addc, 0x1F, 0x0A, 0x00, PPC_INTEGER);
888 /* adde adde. addeo addeo. */
889 static always_inline void gen_op_addeo (void)
890 {
891 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
892 gen_op_adde();
893 gen_op_check_addo();
894 }
895 #if defined(TARGET_PPC64)
896 static always_inline void gen_op_addeo_64 (void)
897 {
898 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
899 gen_op_adde_64();
900 gen_op_check_addo_64();
901 }
902 #endif
903 GEN_INT_ARITH2_64 (adde, 0x1F, 0x0A, 0x04, PPC_INTEGER);
904 /* addme addme. addmeo addmeo. */
905 static always_inline void gen_op_addme (void)
906 {
907 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
908 gen_op_add_me();
909 }
910 #if defined(TARGET_PPC64)
911 static always_inline void gen_op_addme_64 (void)
912 {
913 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
914 gen_op_add_me_64();
915 }
916 #endif
917 GEN_INT_ARITH1_64 (addme, 0x1F, 0x0A, 0x07, PPC_INTEGER);
918 /* addze addze. addzeo addzeo. */
919 static always_inline void gen_op_addze (void)
920 {
921 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
922 gen_op_add_ze();
923 gen_op_check_addc();
924 }
925 static always_inline void gen_op_addzeo (void)
926 {
927 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
928 gen_op_add_ze();
929 gen_op_check_addc();
930 gen_op_check_addo();
931 }
932 #if defined(TARGET_PPC64)
933 static always_inline void gen_op_addze_64 (void)
934 {
935 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
936 gen_op_add_ze();
937 gen_op_check_addc_64();
938 }
939 static always_inline void gen_op_addzeo_64 (void)
940 {
941 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
942 gen_op_add_ze();
943 gen_op_check_addc_64();
944 gen_op_check_addo_64();
945 }
946 #endif
947 GEN_INT_ARITH1_64 (addze, 0x1F, 0x0A, 0x06, PPC_INTEGER);
948 /* divw divw. divwo divwo. */
949 GEN_INT_ARITH2 (divw, 0x1F, 0x0B, 0x0F, PPC_INTEGER);
950 /* divwu divwu. divwuo divwuo. */
951 GEN_INT_ARITH2 (divwu, 0x1F, 0x0B, 0x0E, PPC_INTEGER);
952 /* mulhw mulhw. */
953 GEN_INT_ARITHN (mulhw, 0x1F, 0x0B, 0x02, PPC_INTEGER);
954 /* mulhwu mulhwu. */
955 GEN_INT_ARITHN (mulhwu, 0x1F, 0x0B, 0x00, PPC_INTEGER);
956 /* mullw mullw. mullwo mullwo. */
957 GEN_INT_ARITH2 (mullw, 0x1F, 0x0B, 0x07, PPC_INTEGER);
958 /* neg neg. nego nego. */
959 GEN_INT_ARITH1_64 (neg, 0x1F, 0x08, 0x03, PPC_INTEGER);
960 /* subf subf. subfo subfo. */
961 static always_inline void gen_op_subf (void)
962 {
963 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
964 }
965 static always_inline void gen_op_subfo (void)
966 {
967 tcg_gen_not_tl(cpu_T[2], cpu_T[0]);
968 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
969 gen_op_check_addo();
970 }
971 #if defined(TARGET_PPC64)
972 #define gen_op_subf_64 gen_op_subf
973 static always_inline void gen_op_subfo_64 (void)
974 {
975 tcg_gen_not_i64(cpu_T[2], cpu_T[0]);
976 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
977 gen_op_check_addo_64();
978 }
979 #endif
980 GEN_INT_ARITH2_64 (subf, 0x1F, 0x08, 0x01, PPC_INTEGER);
981 /* subfc subfc. subfco subfco. */
982 static always_inline void gen_op_subfc (void)
983 {
984 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
985 gen_op_check_subfc();
986 }
987 static always_inline void gen_op_subfco (void)
988 {
989 tcg_gen_not_tl(cpu_T[2], cpu_T[0]);
990 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
991 gen_op_check_subfc();
992 gen_op_check_addo();
993 }
994 #if defined(TARGET_PPC64)
995 static always_inline void gen_op_subfc_64 (void)
996 {
997 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
998 gen_op_check_subfc_64();
999 }
1000 static always_inline void gen_op_subfco_64 (void)
1001 {
1002 tcg_gen_not_i64(cpu_T[2], cpu_T[0]);
1003 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
1004 gen_op_check_subfc_64();
1005 gen_op_check_addo_64();
1006 }
1007 #endif
1008 GEN_INT_ARITH2_64 (subfc, 0x1F, 0x08, 0x00, PPC_INTEGER);
1009 /* subfe subfe. subfeo subfeo. */
1010 static always_inline void gen_op_subfeo (void)
1011 {
1012 tcg_gen_not_tl(cpu_T[2], cpu_T[0]);
1013 gen_op_subfe();
1014 gen_op_check_addo();
1015 }
1016 #if defined(TARGET_PPC64)
1017 #define gen_op_subfe_64 gen_op_subfe
1018 static always_inline void gen_op_subfeo_64 (void)
1019 {
1020 tcg_gen_not_i64(cpu_T[2], cpu_T[0]);
1021 gen_op_subfe_64();
1022 gen_op_check_addo_64();
1023 }
1024 #endif
1025 GEN_INT_ARITH2_64 (subfe, 0x1F, 0x08, 0x04, PPC_INTEGER);
1026 /* subfme subfme. subfmeo subfmeo. */
1027 GEN_INT_ARITH1_64 (subfme, 0x1F, 0x08, 0x07, PPC_INTEGER);
1028 /* subfze subfze. subfzeo subfzeo. */
1029 GEN_INT_ARITH1_64 (subfze, 0x1F, 0x08, 0x06, PPC_INTEGER);
1030 /* addi */
1031 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1032 {
1033 target_long simm = SIMM(ctx->opcode);
1034
1035 if (rA(ctx->opcode) == 0) {
1036 /* li case */
1037 tcg_gen_movi_tl(cpu_T[0], simm);
1038 } else {
1039 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1040 if (likely(simm != 0))
1041 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm);
1042 }
1043 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1044 }
1045 /* addic */
1046 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1047 {
1048 target_long simm = SIMM(ctx->opcode);
1049
1050 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1051 if (likely(simm != 0)) {
1052 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
1053 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm);
1054 #if defined(TARGET_PPC64)
1055 if (ctx->sf_mode)
1056 gen_op_check_addc_64();
1057 else
1058 #endif
1059 gen_op_check_addc();
1060 } else {
1061 gen_op_clear_xer_ca();
1062 }
1063 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1064 }
1065 /* addic. */
1066 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1067 {
1068 target_long simm = SIMM(ctx->opcode);
1069
1070 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1071 if (likely(simm != 0)) {
1072 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
1073 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm);
1074 #if defined(TARGET_PPC64)
1075 if (ctx->sf_mode)
1076 gen_op_check_addc_64();
1077 else
1078 #endif
1079 gen_op_check_addc();
1080 } else {
1081 gen_op_clear_xer_ca();
1082 }
1083 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1084 gen_set_Rc0(ctx);
1085 }
1086 /* addis */
1087 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1088 {
1089 target_long simm = SIMM(ctx->opcode);
1090
1091 if (rA(ctx->opcode) == 0) {
1092 /* lis case */
1093 tcg_gen_movi_tl(cpu_T[0], simm << 16);
1094 } else {
1095 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1096 if (likely(simm != 0))
1097 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm << 16);
1098 }
1099 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1100 }
1101 /* mulli */
1102 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1103 {
1104 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1105 gen_op_mulli(SIMM(ctx->opcode));
1106 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1107 }
1108 /* subfic */
1109 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1110 {
1111 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1112 #if defined(TARGET_PPC64)
1113 if (ctx->sf_mode)
1114 gen_op_subfic_64(SIMM(ctx->opcode));
1115 else
1116 #endif
1117 gen_op_subfic(SIMM(ctx->opcode));
1118 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1119 }
1120
1121 #if defined(TARGET_PPC64)
1122 /* mulhd mulhd. */
1123 GEN_INT_ARITHN (mulhd, 0x1F, 0x09, 0x02, PPC_64B);
1124 /* mulhdu mulhdu. */
1125 GEN_INT_ARITHN (mulhdu, 0x1F, 0x09, 0x00, PPC_64B);
1126 /* mulld mulld. mulldo mulldo. */
1127 GEN_INT_ARITH2 (mulld, 0x1F, 0x09, 0x07, PPC_64B);
1128 /* divd divd. divdo divdo. */
1129 GEN_INT_ARITH2 (divd, 0x1F, 0x09, 0x0F, PPC_64B);
1130 /* divdu divdu. divduo divduo. */
1131 GEN_INT_ARITH2 (divdu, 0x1F, 0x09, 0x0E, PPC_64B);
1132 #endif
1133
1134 /*** Integer comparison ***/
1135 #if defined(TARGET_PPC64)
1136 #define GEN_CMP(name, opc, type) \
1137 GEN_HANDLER(name, 0x1F, 0x00, opc, 0x00400000, type) \
1138 { \
1139 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
1140 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
1141 if (ctx->sf_mode && (ctx->opcode & 0x00200000)) \
1142 gen_op_##name##_64(); \
1143 else \
1144 gen_op_##name(); \
1145 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf); \
1146 }
1147 #else
1148 #define GEN_CMP(name, opc, type) \
1149 GEN_HANDLER(name, 0x1F, 0x00, opc, 0x00400000, type) \
1150 { \
1151 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
1152 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
1153 gen_op_##name(); \
1154 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf); \
1155 }
1156 #endif
1157
1158 /* cmp */
1159 GEN_CMP(cmp, 0x00, PPC_INTEGER);
1160 /* cmpi */
1161 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
1162 {
1163 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1164 #if defined(TARGET_PPC64)
1165 if (ctx->sf_mode && (ctx->opcode & 0x00200000))
1166 gen_op_cmpi_64(SIMM(ctx->opcode));
1167 else
1168 #endif
1169 gen_op_cmpi(SIMM(ctx->opcode));
1170 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
1171 }
1172 /* cmpl */
1173 GEN_CMP(cmpl, 0x01, PPC_INTEGER);
1174 /* cmpli */
1175 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
1176 {
1177 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1178 #if defined(TARGET_PPC64)
1179 if (ctx->sf_mode && (ctx->opcode & 0x00200000))
1180 gen_op_cmpli_64(UIMM(ctx->opcode));
1181 else
1182 #endif
1183 gen_op_cmpli(UIMM(ctx->opcode));
1184 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
1185 }
1186
1187 /* isel (PowerPC 2.03 specification) */
1188 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL)
1189 {
1190 uint32_t bi = rC(ctx->opcode);
1191 uint32_t mask;
1192
1193 if (rA(ctx->opcode) == 0) {
1194 tcg_gen_movi_tl(cpu_T[0], 0);
1195 } else {
1196 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1197 }
1198 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
1199 mask = 1 << (3 - (bi & 0x03));
1200 tcg_gen_mov_i32(cpu_T[0], cpu_crf[bi >> 2]);
1201 gen_op_test_true(mask);
1202 gen_op_isel();
1203 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1204 }
1205
1206 /*** Integer logical ***/
1207 #define __GEN_LOGICAL2(name, opc2, opc3, type) \
1208 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000000, type) \
1209 { \
1210 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]); \
1211 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
1212 gen_op_##name(); \
1213 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
1214 if (unlikely(Rc(ctx->opcode) != 0)) \
1215 gen_set_Rc0(ctx); \
1216 }
1217 #define GEN_LOGICAL2(name, opc, type) \
1218 __GEN_LOGICAL2(name, 0x1C, opc, type)
1219
1220 #define GEN_LOGICAL1(name, opc, type) \
1221 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) \
1222 { \
1223 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]); \
1224 gen_op_##name(); \
1225 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
1226 if (unlikely(Rc(ctx->opcode) != 0)) \
1227 gen_set_Rc0(ctx); \
1228 }
1229
1230 /* and & and. */
1231 GEN_LOGICAL2(and, 0x00, PPC_INTEGER);
1232 /* andc & andc. */
1233 GEN_LOGICAL2(andc, 0x01, PPC_INTEGER);
1234 /* andi. */
1235 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1236 {
1237 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1238 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], UIMM(ctx->opcode));
1239 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1240 gen_set_Rc0(ctx);
1241 }
1242 /* andis. */
1243 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1244 {
1245 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1246 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], UIMM(ctx->opcode) << 16);
1247 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1248 gen_set_Rc0(ctx);
1249 }
1250
1251 /* cntlzw */
1252 GEN_LOGICAL1(cntlzw, 0x00, PPC_INTEGER);
1253 /* eqv & eqv. */
1254 GEN_LOGICAL2(eqv, 0x08, PPC_INTEGER);
1255 /* extsb & extsb. */
1256 GEN_LOGICAL1(extsb, 0x1D, PPC_INTEGER);
1257 /* extsh & extsh. */
1258 GEN_LOGICAL1(extsh, 0x1C, PPC_INTEGER);
1259 /* nand & nand. */
1260 GEN_LOGICAL2(nand, 0x0E, PPC_INTEGER);
1261 /* nor & nor. */
1262 GEN_LOGICAL2(nor, 0x03, PPC_INTEGER);
1263
1264 /* or & or. */
1265 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER)
1266 {
1267 int rs, ra, rb;
1268
1269 rs = rS(ctx->opcode);
1270 ra = rA(ctx->opcode);
1271 rb = rB(ctx->opcode);
1272 /* Optimisation for mr. ri case */
1273 if (rs != ra || rs != rb) {
1274 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rs]);
1275 if (rs != rb) {
1276 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rb]);
1277 gen_op_or();
1278 }
1279 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
1280 if (unlikely(Rc(ctx->opcode) != 0))
1281 gen_set_Rc0(ctx);
1282 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1283 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rs]);
1284 gen_set_Rc0(ctx);
1285 #if defined(TARGET_PPC64)
1286 } else {
1287 switch (rs) {
1288 case 1:
1289 /* Set process priority to low */
1290 gen_op_store_pri(2);
1291 break;
1292 case 6:
1293 /* Set process priority to medium-low */
1294 gen_op_store_pri(3);
1295 break;
1296 case 2:
1297 /* Set process priority to normal */
1298 gen_op_store_pri(4);
1299 break;
1300 #if !defined(CONFIG_USER_ONLY)
1301 case 31:
1302 if (ctx->supervisor > 0) {
1303 /* Set process priority to very low */
1304 gen_op_store_pri(1);
1305 }
1306 break;
1307 case 5:
1308 if (ctx->supervisor > 0) {
1309 /* Set process priority to medium-hight */
1310 gen_op_store_pri(5);
1311 }
1312 break;
1313 case 3:
1314 if (ctx->supervisor > 0) {
1315 /* Set process priority to high */
1316 gen_op_store_pri(6);
1317 }
1318 break;
1319 case 7:
1320 if (ctx->supervisor > 1) {
1321 /* Set process priority to very high */
1322 gen_op_store_pri(7);
1323 }
1324 break;
1325 #endif
1326 default:
1327 /* nop */
1328 break;
1329 }
1330 #endif
1331 }
1332 }
1333
1334 /* orc & orc. */
1335 GEN_LOGICAL2(orc, 0x0C, PPC_INTEGER);
1336 /* xor & xor. */
1337 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER)
1338 {
1339 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1340 /* Optimisation for "set to zero" case */
1341 if (rS(ctx->opcode) != rB(ctx->opcode)) {
1342 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
1343 gen_op_xor();
1344 } else {
1345 tcg_gen_movi_tl(cpu_T[0], 0);
1346 }
1347 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1348 if (unlikely(Rc(ctx->opcode) != 0))
1349 gen_set_Rc0(ctx);
1350 }
1351 /* ori */
1352 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1353 {
1354 target_ulong uimm = UIMM(ctx->opcode);
1355
1356 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1357 /* NOP */
1358 /* XXX: should handle special NOPs for POWER series */
1359 return;
1360 }
1361 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1362 if (likely(uimm != 0))
1363 gen_op_ori(uimm);
1364 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1365 }
1366 /* oris */
1367 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1368 {
1369 target_ulong uimm = UIMM(ctx->opcode);
1370
1371 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1372 /* NOP */
1373 return;
1374 }
1375 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1376 if (likely(uimm != 0))
1377 gen_op_ori(uimm << 16);
1378 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1379 }
1380 /* xori */
1381 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1382 {
1383 target_ulong uimm = UIMM(ctx->opcode);
1384
1385 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1386 /* NOP */
1387 return;
1388 }
1389 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1390 if (likely(uimm != 0))
1391 gen_op_xori(uimm);
1392 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1393 }
1394
1395 /* xoris */
1396 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1397 {
1398 target_ulong uimm = UIMM(ctx->opcode);
1399
1400 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1401 /* NOP */
1402 return;
1403 }
1404 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1405 if (likely(uimm != 0))
1406 gen_op_xori(uimm << 16);
1407 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1408 }
1409
1410 /* popcntb : PowerPC 2.03 specification */
1411 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB)
1412 {
1413 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1414 #if defined(TARGET_PPC64)
1415 if (ctx->sf_mode)
1416 gen_op_popcntb_64();
1417 else
1418 #endif
1419 gen_op_popcntb();
1420 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1421 }
1422
1423 #if defined(TARGET_PPC64)
1424 /* extsw & extsw. */
1425 GEN_LOGICAL1(extsw, 0x1E, PPC_64B);
1426 /* cntlzd */
1427 GEN_LOGICAL1(cntlzd, 0x01, PPC_64B);
1428 #endif
1429
1430 /*** Integer rotate ***/
1431 /* rlwimi & rlwimi. */
1432 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1433 {
1434 target_ulong mask;
1435 uint32_t mb, me, sh;
1436
1437 mb = MB(ctx->opcode);
1438 me = ME(ctx->opcode);
1439 sh = SH(ctx->opcode);
1440 if (likely(sh == 0)) {
1441 if (likely(mb == 0 && me == 31)) {
1442 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1443 goto do_store;
1444 } else if (likely(mb == 31 && me == 0)) {
1445 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1446 goto do_store;
1447 }
1448 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1449 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1450 goto do_mask;
1451 }
1452 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1453 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1454 gen_op_rotli32_T0(SH(ctx->opcode));
1455 do_mask:
1456 #if defined(TARGET_PPC64)
1457 mb += 32;
1458 me += 32;
1459 #endif
1460 mask = MASK(mb, me);
1461 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], mask);
1462 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], ~mask);
1463 gen_op_or();
1464 do_store:
1465 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1466 if (unlikely(Rc(ctx->opcode) != 0))
1467 gen_set_Rc0(ctx);
1468 }
1469 /* rlwinm & rlwinm. */
1470 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1471 {
1472 uint32_t mb, me, sh;
1473
1474 sh = SH(ctx->opcode);
1475 mb = MB(ctx->opcode);
1476 me = ME(ctx->opcode);
1477 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1478 if (likely(sh == 0)) {
1479 goto do_mask;
1480 }
1481 if (likely(mb == 0)) {
1482 if (likely(me == 31)) {
1483 gen_op_rotli32_T0(sh);
1484 goto do_store;
1485 } else if (likely(me == (31 - sh))) {
1486 gen_op_sli_T0(sh);
1487 goto do_store;
1488 }
1489 } else if (likely(me == 31)) {
1490 if (likely(sh == (32 - mb))) {
1491 gen_op_srli_T0(mb);
1492 goto do_store;
1493 }
1494 }
1495 gen_op_rotli32_T0(sh);
1496 do_mask:
1497 #if defined(TARGET_PPC64)
1498 mb += 32;
1499 me += 32;
1500 #endif
1501 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], MASK(mb, me));
1502 do_store:
1503 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1504 if (unlikely(Rc(ctx->opcode) != 0))
1505 gen_set_Rc0(ctx);
1506 }
1507 /* rlwnm & rlwnm. */
1508 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1509 {
1510 uint32_t mb, me;
1511
1512 mb = MB(ctx->opcode);
1513 me = ME(ctx->opcode);
1514 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1515 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
1516 gen_op_rotl32_T0_T1();
1517 if (unlikely(mb != 0 || me != 31)) {
1518 #if defined(TARGET_PPC64)
1519 mb += 32;
1520 me += 32;
1521 #endif
1522 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], MASK(mb, me));
1523 }
1524 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1525 if (unlikely(Rc(ctx->opcode) != 0))
1526 gen_set_Rc0(ctx);
1527 }
1528
1529 #if defined(TARGET_PPC64)
1530 #define GEN_PPC64_R2(name, opc1, opc2) \
1531 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1532 { \
1533 gen_##name(ctx, 0); \
1534 } \
1535 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1536 PPC_64B) \
1537 { \
1538 gen_##name(ctx, 1); \
1539 }
1540 #define GEN_PPC64_R4(name, opc1, opc2) \
1541 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1542 { \
1543 gen_##name(ctx, 0, 0); \
1544 } \
1545 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
1546 PPC_64B) \
1547 { \
1548 gen_##name(ctx, 0, 1); \
1549 } \
1550 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1551 PPC_64B) \
1552 { \
1553 gen_##name(ctx, 1, 0); \
1554 } \
1555 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
1556 PPC_64B) \
1557 { \
1558 gen_##name(ctx, 1, 1); \
1559 }
1560
1561 static always_inline void gen_rldinm (DisasContext *ctx, uint32_t mb,
1562 uint32_t me, uint32_t sh)
1563 {
1564 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1565 if (likely(sh == 0)) {
1566 goto do_mask;
1567 }
1568 if (likely(mb == 0)) {
1569 if (likely(me == 63)) {
1570 gen_op_rotli64_T0(sh);
1571 goto do_store;
1572 } else if (likely(me == (63 - sh))) {
1573 gen_op_sli_T0(sh);
1574 goto do_store;
1575 }
1576 } else if (likely(me == 63)) {
1577 if (likely(sh == (64 - mb))) {
1578 gen_op_srli_T0_64(mb);
1579 goto do_store;
1580 }
1581 }
1582 gen_op_rotli64_T0(sh);
1583 do_mask:
1584 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], MASK(mb, me));
1585 do_store:
1586 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1587 if (unlikely(Rc(ctx->opcode) != 0))
1588 gen_set_Rc0(ctx);
1589 }
1590 /* rldicl - rldicl. */
1591 static always_inline void gen_rldicl (DisasContext *ctx, int mbn, int shn)
1592 {
1593 uint32_t sh, mb;
1594
1595 sh = SH(ctx->opcode) | (shn << 5);
1596 mb = MB(ctx->opcode) | (mbn << 5);
1597 gen_rldinm(ctx, mb, 63, sh);
1598 }
1599 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1600 /* rldicr - rldicr. */
1601 static always_inline void gen_rldicr (DisasContext *ctx, int men, int shn)
1602 {
1603 uint32_t sh, me;
1604
1605 sh = SH(ctx->opcode) | (shn << 5);
1606 me = MB(ctx->opcode) | (men << 5);
1607 gen_rldinm(ctx, 0, me, sh);
1608 }
1609 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1610 /* rldic - rldic. */
1611 static always_inline void gen_rldic (DisasContext *ctx, int mbn, int shn)
1612 {
1613 uint32_t sh, mb;
1614
1615 sh = SH(ctx->opcode) | (shn << 5);
1616 mb = MB(ctx->opcode) | (mbn << 5);
1617 gen_rldinm(ctx, mb, 63 - sh, sh);
1618 }
1619 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1620
1621 static always_inline void gen_rldnm (DisasContext *ctx, uint32_t mb,
1622 uint32_t me)
1623 {
1624 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1625 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
1626 gen_op_rotl64_T0_T1();
1627 if (unlikely(mb != 0 || me != 63)) {
1628 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], MASK(mb, me));
1629 }
1630 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1631 if (unlikely(Rc(ctx->opcode) != 0))
1632 gen_set_Rc0(ctx);
1633 }
1634
1635 /* rldcl - rldcl. */
1636 static always_inline void gen_rldcl (DisasContext *ctx, int mbn)
1637 {
1638 uint32_t mb;
1639
1640 mb = MB(ctx->opcode) | (mbn << 5);
1641 gen_rldnm(ctx, mb, 63);
1642 }
1643 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1644 /* rldcr - rldcr. */
1645 static always_inline void gen_rldcr (DisasContext *ctx, int men)
1646 {
1647 uint32_t me;
1648
1649 me = MB(ctx->opcode) | (men << 5);
1650 gen_rldnm(ctx, 0, me);
1651 }
1652 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1653 /* rldimi - rldimi. */
1654 static always_inline void gen_rldimi (DisasContext *ctx, int mbn, int shn)
1655 {
1656 uint64_t mask;
1657 uint32_t sh, mb, me;
1658
1659 sh = SH(ctx->opcode) | (shn << 5);
1660 mb = MB(ctx->opcode) | (mbn << 5);
1661 me = 63 - sh;
1662 if (likely(sh == 0)) {
1663 if (likely(mb == 0)) {
1664 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1665 goto do_store;
1666 }
1667 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1668 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1669 goto do_mask;
1670 }
1671 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1672 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1673 gen_op_rotli64_T0(sh);
1674 do_mask:
1675 mask = MASK(mb, me);
1676 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], mask);
1677 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], ~mask);
1678 gen_op_or();
1679 do_store:
1680 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1681 if (unlikely(Rc(ctx->opcode) != 0))
1682 gen_set_Rc0(ctx);
1683 }
1684 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1685 #endif
1686
1687 /*** Integer shift ***/
1688 /* slw & slw. */
1689 __GEN_LOGICAL2(slw, 0x18, 0x00, PPC_INTEGER);
1690 /* sraw & sraw. */
1691 __GEN_LOGICAL2(sraw, 0x18, 0x18, PPC_INTEGER);
1692 /* srawi & srawi. */
1693 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER)
1694 {
1695 int mb, me;
1696 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1697 if (SH(ctx->opcode) != 0) {
1698 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
1699 mb = 32 - SH(ctx->opcode);
1700 me = 31;
1701 #if defined(TARGET_PPC64)
1702 mb += 32;
1703 me += 32;
1704 #endif
1705 gen_op_srawi(SH(ctx->opcode), MASK(mb, me));
1706 }
1707 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1708 if (unlikely(Rc(ctx->opcode) != 0))
1709 gen_set_Rc0(ctx);
1710 }
1711 /* srw & srw. */
1712 __GEN_LOGICAL2(srw, 0x18, 0x10, PPC_INTEGER);
1713
1714 #if defined(TARGET_PPC64)
1715 /* sld & sld. */
1716 __GEN_LOGICAL2(sld, 0x1B, 0x00, PPC_64B);
1717 /* srad & srad. */
1718 __GEN_LOGICAL2(srad, 0x1A, 0x18, PPC_64B);
1719 /* sradi & sradi. */
1720 static always_inline void gen_sradi (DisasContext *ctx, int n)
1721 {
1722 uint64_t mask;
1723 int sh, mb, me;
1724
1725 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1726 sh = SH(ctx->opcode) + (n << 5);
1727 if (sh != 0) {
1728 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
1729 mb = 64 - SH(ctx->opcode);
1730 me = 63;
1731 mask = MASK(mb, me);
1732 gen_op_sradi(sh, mask >> 32, mask);
1733 }
1734 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1735 if (unlikely(Rc(ctx->opcode) != 0))
1736 gen_set_Rc0(ctx);
1737 }
1738 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B)
1739 {
1740 gen_sradi(ctx, 0);
1741 }
1742 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B)
1743 {
1744 gen_sradi(ctx, 1);
1745 }
1746 /* srd & srd. */
1747 __GEN_LOGICAL2(srd, 0x1B, 0x10, PPC_64B);
1748 #endif
1749
1750 /*** Floating-Point arithmetic ***/
1751 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
1752 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type) \
1753 { \
1754 if (unlikely(!ctx->fpu_enabled)) { \
1755 GEN_EXCP_NO_FP(ctx); \
1756 return; \
1757 } \
1758 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
1759 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rC(ctx->opcode)]); \
1760 tcg_gen_mov_i64(cpu_FT[2], cpu_fpr[rB(ctx->opcode)]); \
1761 gen_reset_fpstatus(); \
1762 gen_op_f##op(); \
1763 if (isfloat) { \
1764 gen_op_frsp(); \
1765 } \
1766 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1767 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1768 }
1769
1770 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
1771 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
1772 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
1773
1774 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
1775 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
1776 { \
1777 if (unlikely(!ctx->fpu_enabled)) { \
1778 GEN_EXCP_NO_FP(ctx); \
1779 return; \
1780 } \
1781 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
1782 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]); \
1783 gen_reset_fpstatus(); \
1784 gen_op_f##op(); \
1785 if (isfloat) { \
1786 gen_op_frsp(); \
1787 } \
1788 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1789 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1790 }
1791 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
1792 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
1793 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
1794
1795 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
1796 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
1797 { \
1798 if (unlikely(!ctx->fpu_enabled)) { \
1799 GEN_EXCP_NO_FP(ctx); \
1800 return; \
1801 } \
1802 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
1803 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rC(ctx->opcode)]); \
1804 gen_reset_fpstatus(); \
1805 gen_op_f##op(); \
1806 if (isfloat) { \
1807 gen_op_frsp(); \
1808 } \
1809 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1810 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1811 }
1812 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
1813 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
1814 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
1815
1816 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
1817 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type) \
1818 { \
1819 if (unlikely(!ctx->fpu_enabled)) { \
1820 GEN_EXCP_NO_FP(ctx); \
1821 return; \
1822 } \
1823 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]); \
1824 gen_reset_fpstatus(); \
1825 gen_op_f##name(); \
1826 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1827 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1828 }
1829
1830 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
1831 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type) \
1832 { \
1833 if (unlikely(!ctx->fpu_enabled)) { \
1834 GEN_EXCP_NO_FP(ctx); \
1835 return; \
1836 } \
1837 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]); \
1838 gen_reset_fpstatus(); \
1839 gen_op_f##name(); \
1840 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1841 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1842 }
1843
1844 /* fadd - fadds */
1845 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
1846 /* fdiv - fdivs */
1847 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
1848 /* fmul - fmuls */
1849 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
1850
1851 /* fre */
1852 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
1853
1854 /* fres */
1855 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
1856
1857 /* frsqrte */
1858 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
1859
1860 /* frsqrtes */
1861 static always_inline void gen_op_frsqrtes (void)
1862 {
1863 gen_op_frsqrte();
1864 gen_op_frsp();
1865 }
1866 GEN_FLOAT_BS(rsqrtes, 0x3B, 0x1A, 1, PPC_FLOAT_FRSQRTES);
1867
1868 /* fsel */
1869 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
1870 /* fsub - fsubs */
1871 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
1872 /* Optional: */
1873 /* fsqrt */
1874 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
1875 {
1876 if (unlikely(!ctx->fpu_enabled)) {
1877 GEN_EXCP_NO_FP(ctx);
1878 return;
1879 }
1880 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
1881 gen_reset_fpstatus();
1882 gen_op_fsqrt();
1883 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
1884 gen_compute_fprf(1, Rc(ctx->opcode) != 0);
1885 }
1886
1887 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
1888 {
1889 if (unlikely(!ctx->fpu_enabled)) {
1890 GEN_EXCP_NO_FP(ctx);
1891 return;
1892 }
1893 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
1894 gen_reset_fpstatus();
1895 gen_op_fsqrt();
1896 gen_op_frsp();
1897 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
1898 gen_compute_fprf(1, Rc(ctx->opcode) != 0);
1899 }
1900
1901 /*** Floating-Point multiply-and-add ***/
1902 /* fmadd - fmadds */
1903 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
1904 /* fmsub - fmsubs */
1905 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
1906 /* fnmadd - fnmadds */
1907 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
1908 /* fnmsub - fnmsubs */
1909 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
1910
1911 /*** Floating-Point round & convert ***/
1912 /* fctiw */
1913 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
1914 /* fctiwz */
1915 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
1916 /* frsp */
1917 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
1918 #if defined(TARGET_PPC64)
1919 /* fcfid */
1920 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
1921 /* fctid */
1922 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
1923 /* fctidz */
1924 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
1925 #endif
1926
1927 /* frin */
1928 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
1929 /* friz */
1930 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
1931 /* frip */
1932 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
1933 /* frim */
1934 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
1935
1936 /*** Floating-Point compare ***/
1937 /* fcmpo */
1938 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT)
1939 {
1940 if (unlikely(!ctx->fpu_enabled)) {
1941 GEN_EXCP_NO_FP(ctx);
1942 return;
1943 }
1944 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]);
1945 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]);
1946 gen_reset_fpstatus();
1947 gen_op_fcmpo();
1948 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
1949 gen_op_float_check_status();
1950 }
1951
1952 /* fcmpu */
1953 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT)
1954 {
1955 if (unlikely(!ctx->fpu_enabled)) {
1956 GEN_EXCP_NO_FP(ctx);
1957 return;
1958 }
1959 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]);
1960 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]);
1961 gen_reset_fpstatus();
1962 gen_op_fcmpu();
1963 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
1964 gen_op_float_check_status();
1965 }
1966
1967 /*** Floating-point move ***/
1968 /* fabs */
1969 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
1970 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT);
1971
1972 /* fmr - fmr. */
1973 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
1974 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT)
1975 {
1976 if (unlikely(!ctx->fpu_enabled)) {
1977 GEN_EXCP_NO_FP(ctx);
1978 return;
1979 }
1980 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
1981 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
1982 gen_compute_fprf(0, Rc(ctx->opcode) != 0);
1983 }
1984
1985 /* fnabs */
1986 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
1987 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT);
1988 /* fneg */
1989 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
1990 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT);
1991
1992 /*** Floating-Point status & ctrl register ***/
1993 /* mcrfs */
1994 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT)
1995 {
1996 int bfa;
1997
1998 if (unlikely(!ctx->fpu_enabled)) {
1999 GEN_EXCP_NO_FP(ctx);
2000 return;
2001 }
2002 gen_optimize_fprf();
2003 bfa = 4 * (7 - crfS(ctx->opcode));
2004 gen_op_load_fpscr_T0(bfa);
2005 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
2006 gen_op_fpscr_resetbit(~(0xF << bfa));
2007 }
2008
2009 /* mffs */
2010 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT)
2011 {
2012 if (unlikely(!ctx->fpu_enabled)) {
2013 GEN_EXCP_NO_FP(ctx);
2014 return;
2015 }
2016 gen_optimize_fprf();
2017 gen_reset_fpstatus();
2018 gen_op_load_fpscr_FT0();
2019 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
2020 gen_compute_fprf(0, Rc(ctx->opcode) != 0);
2021 }
2022
2023 /* mtfsb0 */
2024 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT)
2025 {
2026 uint8_t crb;
2027
2028 if (unlikely(!ctx->fpu_enabled)) {
2029 GEN_EXCP_NO_FP(ctx);
2030 return;
2031 }
2032 crb = 32 - (crbD(ctx->opcode) >> 2);
2033 gen_optimize_fprf();
2034 gen_reset_fpstatus();
2035 if (likely(crb != 30 && crb != 29))
2036 gen_op_fpscr_resetbit(~(1 << crb));
2037 if (unlikely(Rc(ctx->opcode) != 0)) {
2038 gen_op_load_fpcc();
2039 gen_op_set_Rc0();
2040 }
2041 }
2042
2043 /* mtfsb1 */
2044 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT)
2045 {
2046 uint8_t crb;
2047
2048 if (unlikely(!ctx->fpu_enabled)) {
2049 GEN_EXCP_NO_FP(ctx);
2050 return;
2051 }
2052 crb = 32 - (crbD(ctx->opcode) >> 2);
2053 gen_optimize_fprf();
2054 gen_reset_fpstatus();
2055 /* XXX: we pretend we can only do IEEE floating-point computations */
2056 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI))
2057 gen_op_fpscr_setbit(crb);
2058 if (unlikely(Rc(ctx->opcode) != 0)) {
2059 gen_op_load_fpcc();
2060 gen_op_set_Rc0();
2061 }
2062 /* We can raise a differed exception */
2063 gen_op_float_check_status();
2064 }
2065
2066 /* mtfsf */
2067 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x02010000, PPC_FLOAT)
2068 {
2069 if (unlikely(!ctx->fpu_enabled)) {
2070 GEN_EXCP_NO_FP(ctx);
2071 return;
2072 }
2073 gen_optimize_fprf();
2074 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
2075 gen_reset_fpstatus();
2076 gen_op_store_fpscr(FM(ctx->opcode));
2077 if (unlikely(Rc(ctx->opcode) != 0)) {
2078 gen_op_load_fpcc();
2079 gen_op_set_Rc0();
2080 }
2081 /* We can raise a differed exception */
2082 gen_op_float_check_status();
2083 }
2084
2085 /* mtfsfi */
2086 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT)
2087 {
2088 int bf, sh;
2089
2090 if (unlikely(!ctx->fpu_enabled)) {
2091 GEN_EXCP_NO_FP(ctx);
2092 return;
2093 }
2094 bf = crbD(ctx->opcode) >> 2;
2095 sh = 7 - bf;
2096 gen_optimize_fprf();
2097 tcg_gen_movi_i64(cpu_FT[0], FPIMM(ctx->opcode) << (4 * sh));
2098 gen_reset_fpstatus();
2099 gen_op_store_fpscr(1 << sh);
2100 if (unlikely(Rc(ctx->opcode) != 0)) {
2101 gen_op_load_fpcc();
2102 gen_op_set_Rc0();
2103 }
2104 /* We can raise a differed exception */
2105 gen_op_float_check_status();
2106 }
2107
2108 /*** Addressing modes ***/
2109 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2110 static always_inline void gen_addr_imm_index (DisasContext *ctx,
2111 target_long maskl)
2112 {
2113 target_long simm = SIMM(ctx->opcode);
2114
2115 simm &= ~maskl;
2116 if (rA(ctx->opcode) == 0) {
2117 tcg_gen_movi_tl(cpu_T[0], simm);
2118 } else {
2119 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
2120 if (likely(simm != 0))
2121 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm);
2122 }
2123 #ifdef DEBUG_MEMORY_ACCESSES
2124 gen_op_print_mem_EA();
2125 #endif
2126 }
2127
2128 static always_inline void gen_addr_reg_index (DisasContext *ctx)
2129 {
2130 if (rA(ctx->opcode) == 0) {
2131 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
2132 } else {
2133 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
2134 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
2135 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2136 }
2137 #ifdef DEBUG_MEMORY_ACCESSES
2138 gen_op_print_mem_EA();
2139 #endif
2140 }
2141
2142 static always_inline void gen_addr_register (DisasContext *ctx)
2143 {
2144 if (rA(ctx->opcode) == 0) {
2145 tcg_gen_movi_tl(cpu_T[0], 0);
2146 } else {
2147 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
2148 }
2149 #ifdef DEBUG_MEMORY_ACCESSES
2150 gen_op_print_mem_EA();
2151 #endif
2152 }
2153
2154 #if defined(TARGET_PPC64)
2155 #define _GEN_MEM_FUNCS(name, mode) \
2156 &gen_op_##name##_##mode, \
2157 &gen_op_##name##_le_##mode, \
2158 &gen_op_##name##_64_##mode, \
2159 &gen_op_##name##_le_64_##mode
2160 #else
2161 #define _GEN_MEM_FUNCS(name, mode) \
2162 &gen_op_##name##_##mode, \
2163 &gen_op_##name##_le_##mode
2164 #endif
2165 #if defined(CONFIG_USER_ONLY)
2166 #if defined(TARGET_PPC64)
2167 #define NB_MEM_FUNCS 4
2168 #else
2169 #define NB_MEM_FUNCS 2
2170 #endif
2171 #define GEN_MEM_FUNCS(name) \
2172 _GEN_MEM_FUNCS(name, raw)
2173 #else
2174 #if defined(TARGET_PPC64)
2175 #define NB_MEM_FUNCS 12
2176 #else
2177 #define NB_MEM_FUNCS 6
2178 #endif
2179 #define GEN_MEM_FUNCS(name) \
2180 _GEN_MEM_FUNCS(name, user), \
2181 _GEN_MEM_FUNCS(name, kernel), \
2182 _GEN_MEM_FUNCS(name, hypv)
2183 #endif
2184
2185 /*** Integer load ***/
2186 #define op_ldst(name) (*gen_op_##name[ctx->mem_idx])()
2187 /* Byte access routine are endian safe */
2188 #define gen_op_lbz_le_raw gen_op_lbz_raw
2189 #define gen_op_lbz_le_user gen_op_lbz_user
2190 #define gen_op_lbz_le_kernel gen_op_lbz_kernel
2191 #define gen_op_lbz_le_hypv gen_op_lbz_hypv
2192 #define gen_op_lbz_le_64_raw gen_op_lbz_64_raw
2193 #define gen_op_lbz_le_64_user gen_op_lbz_64_user
2194 #define gen_op_lbz_le_64_kernel gen_op_lbz_64_kernel
2195 #define gen_op_lbz_le_64_hypv gen_op_lbz_64_hypv
2196 #define gen_op_stb_le_raw gen_op_stb_raw
2197 #define gen_op_stb_le_user gen_op_stb_user
2198 #define gen_op_stb_le_kernel gen_op_stb_kernel
2199 #define gen_op_stb_le_hypv gen_op_stb_hypv
2200 #define gen_op_stb_le_64_raw gen_op_stb_64_raw
2201 #define gen_op_stb_le_64_user gen_op_stb_64_user
2202 #define gen_op_stb_le_64_kernel gen_op_stb_64_kernel
2203 #define gen_op_stb_le_64_hypv gen_op_stb_64_hypv
2204 #define OP_LD_TABLE(width) \
2205 static GenOpFunc *gen_op_l##width[NB_MEM_FUNCS] = { \
2206 GEN_MEM_FUNCS(l##width), \
2207 };
2208 #define OP_ST_TABLE(width) \
2209 static GenOpFunc *gen_op_st##width[NB_MEM_FUNCS] = { \
2210 GEN_MEM_FUNCS(st##width), \
2211 };
2212
2213 #define GEN_LD(width, opc, type) \
2214 GEN_HANDLER(l##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2215 { \
2216 gen_addr_imm_index(ctx, 0); \
2217 op_ldst(l##width); \
2218 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); \
2219 }
2220
2221 #define GEN_LDU(width, opc, type) \
2222 GEN_HANDLER(l##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2223 { \
2224 if (unlikely(rA(ctx->opcode) == 0 || \
2225 rA(ctx->opcode) == rD(ctx->opcode))) { \
2226 GEN_EXCP_INVAL(ctx); \
2227 return; \
2228 } \
2229 if (type == PPC_64B) \
2230 gen_addr_imm_index(ctx, 0x03); \
2231 else \
2232 gen_addr_imm_index(ctx, 0); \
2233 op_ldst(l##width); \
2234 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); \
2235 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2236 }
2237
2238 #define GEN_LDUX(width, opc2, opc3, type) \
2239 GEN_HANDLER(l##width##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2240 { \
2241 if (unlikely(rA(ctx->opcode) == 0 || \
2242 rA(ctx->opcode) == rD(ctx->opcode))) { \
2243 GEN_EXCP_INVAL(ctx); \
2244 return; \
2245 } \
2246 gen_addr_reg_index(ctx); \
2247 op_ldst(l##width); \
2248 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); \
2249 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2250 }
2251
2252 #define GEN_LDX(width, opc2, opc3, type) \
2253 GEN_HANDLER(l##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2254 { \
2255 gen_addr_reg_index(ctx); \
2256 op_ldst(l##width); \
2257 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); \
2258 }
2259
2260 #define GEN_LDS(width, op, type) \
2261 OP_LD_TABLE(width); \
2262 GEN_LD(width, op | 0x20, type); \
2263 GEN_LDU(width, op | 0x21, type); \
2264 GEN_LDUX(width, 0x17, op | 0x01, type); \
2265 GEN_LDX(width, 0x17, op | 0x00, type)
2266
2267 /* lbz lbzu lbzux lbzx */
2268 GEN_LDS(bz, 0x02, PPC_INTEGER);
2269 /* lha lhau lhaux lhax */
2270 GEN_LDS(ha, 0x0A, PPC_INTEGER);
2271 /* lhz lhzu lhzux lhzx */
2272 GEN_LDS(hz, 0x08, PPC_INTEGER);
2273 /* lwz lwzu lwzux lwzx */
2274 GEN_LDS(wz, 0x00, PPC_INTEGER);
2275 #if defined(TARGET_PPC64)
2276 OP_LD_TABLE(wa);
2277 OP_LD_TABLE(d);
2278 /* lwaux */
2279 GEN_LDUX(wa, 0x15, 0x0B, PPC_64B);
2280 /* lwax */
2281 GEN_LDX(wa, 0x15, 0x0A, PPC_64B);
2282 /* ldux */
2283 GEN_LDUX(d, 0x15, 0x01, PPC_64B);
2284 /* ldx */
2285 GEN_LDX(d, 0x15, 0x00, PPC_64B);
2286 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B)
2287 {
2288 if (Rc(ctx->opcode)) {
2289 if (unlikely(rA(ctx->opcode) == 0 ||
2290 rA(ctx->opcode) == rD(ctx->opcode))) {
2291 GEN_EXCP_INVAL(ctx);
2292 return;
2293 }
2294 }
2295 gen_addr_imm_index(ctx, 0x03);
2296 if (ctx->opcode & 0x02) {
2297 /* lwa (lwau is undefined) */
2298 op_ldst(lwa);
2299 } else {
2300 /* ld - ldu */
2301 op_ldst(ld);
2302 }
2303 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]);
2304 if (Rc(ctx->opcode))
2305 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
2306 }
2307 /* lq */
2308 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX)
2309 {
2310 #if defined(CONFIG_USER_ONLY)
2311 GEN_EXCP_PRIVOPC(ctx);
2312 #else
2313 int ra, rd;
2314
2315 /* Restore CPU state */
2316 if (unlikely(ctx->supervisor == 0)) {
2317 GEN_EXCP_PRIVOPC(ctx);
2318 return;
2319 }
2320 ra = rA(ctx->opcode);
2321 rd = rD(ctx->opcode);
2322 if (unlikely((rd & 1) || rd == ra)) {
2323 GEN_EXCP_INVAL(ctx);
2324 return;
2325 }
2326 if (unlikely(ctx->mem_idx & 1)) {
2327 /* Little-endian mode is not handled */
2328 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2329 return;
2330 }
2331 gen_addr_imm_index(ctx, 0x0F);
2332 op_ldst(ld);
2333 tcg_gen_mov_tl(cpu_gpr[rd], cpu_T[1]);
2334 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 8);
2335 op_ldst(ld);
2336 tcg_gen_mov_tl(cpu_gpr[rd + 1], cpu_T[1]);
2337 #endif
2338 }
2339 #endif
2340
2341 /*** Integer store ***/
2342 #define GEN_ST(width, opc, type) \
2343 GEN_HANDLER(st##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2344 { \
2345 gen_addr_imm_index(ctx, 0); \
2346 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]); \
2347 op_ldst(st##width); \
2348 }
2349
2350 #define GEN_STU(width, opc, type) \
2351 GEN_HANDLER(st##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2352 { \
2353 if (unlikely(rA(ctx->opcode) == 0)) { \
2354 GEN_EXCP_INVAL(ctx); \
2355 return; \
2356 } \
2357 if (type == PPC_64B) \
2358 gen_addr_imm_index(ctx, 0x03); \
2359 else \
2360 gen_addr_imm_index(ctx, 0); \
2361 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]); \
2362 op_ldst(st##width); \
2363 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2364 }
2365
2366 #define GEN_STUX(width, opc2, opc3, type) \
2367 GEN_HANDLER(st##width##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2368 { \
2369 if (unlikely(rA(ctx->opcode) == 0)) { \
2370 GEN_EXCP_INVAL(ctx); \
2371 return; \
2372 } \
2373 gen_addr_reg_index(ctx); \
2374 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]); \
2375 op_ldst(st##width); \
2376 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2377 }
2378
2379 #define GEN_STX(width, opc2, opc3, type) \
2380 GEN_HANDLER(st##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2381 { \
2382 gen_addr_reg_index(ctx); \
2383 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]); \
2384 op_ldst(st##width); \
2385 }
2386
2387 #define GEN_STS(width, op, type) \
2388 OP_ST_TABLE(width); \
2389 GEN_ST(width, op | 0x20, type); \
2390 GEN_STU(width, op | 0x21, type); \
2391 GEN_STUX(width, 0x17, op | 0x01, type); \
2392 GEN_STX(width, 0x17, op | 0x00, type)
2393
2394 /* stb stbu stbux stbx */
2395 GEN_STS(b, 0x06, PPC_INTEGER);
2396 /* sth sthu sthux sthx */
2397 GEN_STS(h, 0x0C, PPC_INTEGER);
2398 /* stw stwu stwux stwx */
2399 GEN_STS(w, 0x04, PPC_INTEGER);
2400 #if defined(TARGET_PPC64)
2401 OP_ST_TABLE(d);
2402 GEN_STUX(d, 0x15, 0x05, PPC_64B);
2403 GEN_STX(d, 0x15, 0x04, PPC_64B);
2404 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B)
2405 {
2406 int rs;
2407
2408 rs = rS(ctx->opcode);
2409 if ((ctx->opcode & 0x3) == 0x2) {
2410 #if defined(CONFIG_USER_ONLY)
2411 GEN_EXCP_PRIVOPC(ctx);
2412 #else
2413 /* stq */
2414 if (unlikely(ctx->supervisor == 0)) {
2415 GEN_EXCP_PRIVOPC(ctx);
2416 return;
2417 }
2418 if (unlikely(rs & 1)) {
2419 GEN_EXCP_INVAL(ctx);
2420 return;
2421 }
2422 if (unlikely(ctx->mem_idx & 1)) {
2423 /* Little-endian mode is not handled */
2424 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2425 return;
2426 }
2427 gen_addr_imm_index(ctx, 0x03);
2428 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rs]);
2429 op_ldst(std);
2430 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 8);
2431 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rs + 1]);
2432 op_ldst(std);
2433 #endif
2434 } else {
2435 /* std / stdu */
2436 if (Rc(ctx->opcode)) {
2437 if (unlikely(rA(ctx->opcode) == 0)) {
2438 GEN_EXCP_INVAL(ctx);
2439 return;
2440 }
2441 }
2442 gen_addr_imm_index(ctx, 0x03);
2443 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rs]);
2444 op_ldst(std);
2445 if (Rc(ctx->opcode))
2446 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
2447 }
2448 }
2449 #endif
2450 /*** Integer load and store with byte reverse ***/
2451 /* lhbrx */
2452 OP_LD_TABLE(hbr);
2453 GEN_LDX(hbr, 0x16, 0x18, PPC_INTEGER);
2454 /* lwbrx */
2455 OP_LD_TABLE(wbr);
2456 GEN_LDX(wbr, 0x16, 0x10, PPC_INTEGER);
2457 /* sthbrx */
2458 OP_ST_TABLE(hbr);
2459 GEN_STX(hbr, 0x16, 0x1C, PPC_INTEGER);
2460 /* stwbrx */
2461 OP_ST_TABLE(wbr);
2462 GEN_STX(wbr, 0x16, 0x14, PPC_INTEGER);
2463
2464 /*** Integer load and store multiple ***/
2465 #define op_ldstm(name, reg) (*gen_op_##name[ctx->mem_idx])(reg)
2466 static GenOpFunc1 *gen_op_lmw[NB_MEM_FUNCS] = {
2467 GEN_MEM_FUNCS(lmw),
2468 };
2469 static GenOpFunc1 *gen_op_stmw[NB_MEM_FUNCS] = {
2470 GEN_MEM_FUNCS(stmw),
2471 };
2472
2473 /* lmw */
2474 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
2475 {
2476 /* NIP cannot be restored if the memory exception comes from an helper */
2477 gen_update_nip(ctx, ctx->nip - 4);
2478 gen_addr_imm_index(ctx, 0);
2479 op_ldstm(lmw, rD(ctx->opcode));
2480 }
2481
2482 /* stmw */
2483 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
2484 {
2485 /* NIP cannot be restored if the memory exception comes from an helper */
2486 gen_update_nip(ctx, ctx->nip - 4);
2487 gen_addr_imm_index(ctx, 0);
2488 op_ldstm(stmw, rS(ctx->opcode));
2489 }
2490
2491 /*** Integer load and store strings ***/
2492 #define op_ldsts(name, start) (*gen_op_##name[ctx->mem_idx])(start)
2493 #define op_ldstsx(name, rd, ra, rb) (*gen_op_##name[ctx->mem_idx])(rd, ra, rb)
2494 /* string load & stores are by definition endian-safe */
2495 #define gen_op_lswi_le_raw gen_op_lswi_raw
2496 #define gen_op_lswi_le_user gen_op_lswi_user
2497 #define gen_op_lswi_le_kernel gen_op_lswi_kernel
2498 #define gen_op_lswi_le_hypv gen_op_lswi_hypv
2499 #define gen_op_lswi_le_64_raw gen_op_lswi_raw
2500 #define gen_op_lswi_le_64_user gen_op_lswi_user
2501 #define gen_op_lswi_le_64_kernel gen_op_lswi_kernel
2502 #define gen_op_lswi_le_64_hypv gen_op_lswi_hypv
2503 static GenOpFunc1 *gen_op_lswi[NB_MEM_FUNCS] = {
2504 GEN_MEM_FUNCS(lswi),
2505 };
2506 #define gen_op_lswx_le_raw gen_op_lswx_raw
2507 #define gen_op_lswx_le_user gen_op_lswx_user
2508 #define gen_op_lswx_le_kernel gen_op_lswx_kernel
2509 #define gen_op_lswx_le_hypv gen_op_lswx_hypv
2510 #define gen_op_lswx_le_64_raw gen_op_lswx_raw
2511 #define gen_op_lswx_le_64_user gen_op_lswx_user
2512 #define gen_op_lswx_le_64_kernel gen_op_lswx_kernel
2513 #define gen_op_lswx_le_64_hypv gen_op_lswx_hypv
2514 static GenOpFunc3 *gen_op_lswx[NB_MEM_FUNCS] = {
2515 GEN_MEM_FUNCS(lswx),
2516 };
2517 #define gen_op_stsw_le_raw gen_op_stsw_raw
2518 #define gen_op_stsw_le_user gen_op_stsw_user
2519 #define gen_op_stsw_le_kernel gen_op_stsw_kernel
2520 #define gen_op_stsw_le_hypv gen_op_stsw_hypv
2521 #define gen_op_stsw_le_64_raw gen_op_stsw_raw
2522 #define gen_op_stsw_le_64_user gen_op_stsw_user
2523 #define gen_op_stsw_le_64_kernel gen_op_stsw_kernel
2524 #define gen_op_stsw_le_64_hypv gen_op_stsw_hypv
2525 static GenOpFunc1 *gen_op_stsw[NB_MEM_FUNCS] = {
2526 GEN_MEM_FUNCS(stsw),
2527 };
2528
2529 /* lswi */
2530 /* PowerPC32 specification says we must generate an exception if
2531 * rA is in the range of registers to be loaded.
2532 * In an other hand, IBM says this is valid, but rA won't be loaded.
2533 * For now, I'll follow the spec...
2534 */
2535 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING)
2536 {
2537 int nb = NB(ctx->opcode);
2538 int start = rD(ctx->opcode);
2539 int ra = rA(ctx->opcode);
2540 int nr;
2541
2542 if (nb == 0)
2543 nb = 32;
2544 nr = nb / 4;
2545 if (unlikely(((start + nr) > 32 &&
2546 start <= ra && (start + nr - 32) > ra) ||
2547 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
2548 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
2549 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_LSWX);
2550 return;
2551 }
2552 /* NIP cannot be restored if the memory exception comes from an helper */
2553 gen_update_nip(ctx, ctx->nip - 4);
2554 gen_addr_register(ctx);
2555 tcg_gen_movi_tl(cpu_T[1], nb);
2556 op_ldsts(lswi, start);
2557 }
2558
2559 /* lswx */
2560 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING)
2561 {
2562 int ra = rA(ctx->opcode);
2563 int rb = rB(ctx->opcode);
2564
2565 /* NIP cannot be restored if the memory exception comes from an helper */
2566 gen_update_nip(ctx, ctx->nip - 4);
2567 gen_addr_reg_index(ctx);
2568 if (ra == 0) {
2569 ra = rb;
2570 }
2571 gen_op_load_xer_bc();
2572 op_ldstsx(lswx, rD(ctx->opcode), ra, rb);
2573 }
2574
2575 /* stswi */
2576 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING)
2577 {
2578 int nb = NB(ctx->opcode);
2579
2580 /* NIP cannot be restored if the memory exception comes from an helper */
2581 gen_update_nip(ctx, ctx->nip - 4);
2582 gen_addr_register(ctx);
2583 if (nb == 0)
2584 nb = 32;
2585 tcg_gen_movi_tl(cpu_T[1], nb);
2586 op_ldsts(stsw, rS(ctx->opcode));
2587 }
2588
2589 /* stswx */
2590 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING)
2591 {
2592 /* NIP cannot be restored if the memory exception comes from an helper */
2593 gen_update_nip(ctx, ctx->nip - 4);
2594 gen_addr_reg_index(ctx);
2595 gen_op_load_xer_bc();
2596 op_ldsts(stsw, rS(ctx->opcode));
2597 }
2598
2599 /*** Memory synchronisation ***/
2600 /* eieio */
2601 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO)
2602 {
2603 }
2604
2605 /* isync */
2606 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM)
2607 {
2608 GEN_STOP(ctx);
2609 }
2610
2611 #define op_lwarx() (*gen_op_lwarx[ctx->mem_idx])()
2612 #define op_stwcx() (*gen_op_stwcx[ctx->mem_idx])()
2613 static GenOpFunc *gen_op_lwarx[NB_MEM_FUNCS] = {
2614 GEN_MEM_FUNCS(lwarx),
2615 };
2616 static GenOpFunc *gen_op_stwcx[NB_MEM_FUNCS] = {
2617 GEN_MEM_FUNCS(stwcx),
2618 };
2619
2620 /* lwarx */
2621 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000001, PPC_RES)
2622 {
2623 /* NIP cannot be restored if the memory exception comes from an helper */
2624 gen_update_nip(ctx, ctx->nip - 4);
2625 gen_addr_reg_index(ctx);
2626 op_lwarx();
2627 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]);
2628 }
2629
2630 /* stwcx. */
2631 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES)
2632 {
2633 /* NIP cannot be restored if the memory exception comes from an helper */
2634 gen_update_nip(ctx, ctx->nip - 4);
2635 gen_addr_reg_index(ctx);
2636 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
2637 op_stwcx();
2638 }
2639
2640 #if defined(TARGET_PPC64)
2641 #define op_ldarx() (*gen_op_ldarx[ctx->mem_idx])()
2642 #define op_stdcx() (*gen_op_stdcx[ctx->mem_idx])()
2643 static GenOpFunc *gen_op_ldarx[NB_MEM_FUNCS] = {
2644 GEN_MEM_FUNCS(ldarx),
2645 };
2646 static GenOpFunc *gen_op_stdcx[NB_MEM_FUNCS] = {
2647 GEN_MEM_FUNCS(stdcx),
2648 };
2649
2650 /* ldarx */
2651 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000001, PPC_64B)
2652 {
2653 /* NIP cannot be restored if the memory exception comes from an helper */
2654 gen_update_nip(ctx, ctx->nip - 4);
2655 gen_addr_reg_index(ctx);
2656 op_ldarx();
2657 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]);
2658 }
2659
2660 /* stdcx. */
2661 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B)
2662 {
2663 /* NIP cannot be restored if the memory exception comes from an helper */
2664 gen_update_nip(ctx, ctx->nip - 4);
2665 gen_addr_reg_index(ctx);
2666 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
2667 op_stdcx();
2668 }
2669 #endif /* defined(TARGET_PPC64) */
2670
2671 /* sync */
2672 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC)
2673 {
2674 }
2675
2676 /* wait */
2677 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT)
2678 {
2679 /* Stop translation, as the CPU is supposed to sleep from now */
2680 gen_op_wait();
2681 GEN_EXCP(ctx, EXCP_HLT, 1);
2682 }
2683
2684 /*** Floating-point load ***/
2685 #define GEN_LDF(width, opc, type) \
2686 GEN_HANDLER(l##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2687 { \
2688 if (unlikely(!ctx->fpu_enabled)) { \
2689 GEN_EXCP_NO_FP(ctx); \
2690 return; \
2691 } \
2692 gen_addr_imm_index(ctx, 0); \
2693 op_ldst(l##width); \
2694 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2695 }
2696
2697 #define GEN_LDUF(width, opc, type) \
2698 GEN_HANDLER(l##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2699 { \
2700 if (unlikely(!ctx->fpu_enabled)) { \
2701 GEN_EXCP_NO_FP(ctx); \
2702 return; \
2703 } \
2704 if (unlikely(rA(ctx->opcode) == 0)) { \
2705 GEN_EXCP_INVAL(ctx); \
2706 return; \
2707 } \
2708 gen_addr_imm_index(ctx, 0); \
2709 op_ldst(l##width); \
2710 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2711 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2712 }
2713
2714 #define GEN_LDUXF(width, opc, type) \
2715 GEN_HANDLER(l##width##ux, 0x1F, 0x17, opc, 0x00000001, type) \
2716 { \
2717 if (unlikely(!ctx->fpu_enabled)) { \
2718 GEN_EXCP_NO_FP(ctx); \
2719 return; \
2720 } \
2721 if (unlikely(rA(ctx->opcode) == 0)) { \
2722 GEN_EXCP_INVAL(ctx); \
2723 return; \
2724 } \
2725 gen_addr_reg_index(ctx); \
2726 op_ldst(l##width); \
2727 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2728 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2729 }
2730
2731 #define GEN_LDXF(width, opc2, opc3, type) \
2732 GEN_HANDLER(l##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2733 { \
2734 if (unlikely(!ctx->fpu_enabled)) { \
2735 GEN_EXCP_NO_FP(ctx); \
2736 return; \
2737 } \
2738 gen_addr_reg_index(ctx); \
2739 op_ldst(l##width); \
2740 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2741 }
2742
2743 #define GEN_LDFS(width, op, type) \
2744 OP_LD_TABLE(width); \
2745 GEN_LDF(width, op | 0x20, type); \
2746 GEN_LDUF(width, op | 0x21, type); \
2747 GEN_LDUXF(width, op | 0x01, type); \
2748 GEN_LDXF(width, 0x17, op | 0x00, type)
2749
2750 /* lfd lfdu lfdux lfdx */
2751 GEN_LDFS(fd, 0x12, PPC_FLOAT);
2752 /* lfs lfsu lfsux lfsx */
2753 GEN_LDFS(fs, 0x10, PPC_FLOAT);
2754
2755 /*** Floating-point store ***/
2756 #define GEN_STF(width, opc, type) \
2757 GEN_HANDLER(st##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2758 { \
2759 if (unlikely(!ctx->fpu_enabled)) { \
2760 GEN_EXCP_NO_FP(ctx); \
2761 return; \
2762 } \
2763 gen_addr_imm_index(ctx, 0); \
2764 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
2765 op_ldst(st##width); \
2766 }
2767
2768 #define GEN_STUF(width, opc, type) \
2769 GEN_HANDLER(st##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2770 { \
2771 if (unlikely(!ctx->fpu_enabled)) { \
2772 GEN_EXCP_NO_FP(ctx); \
2773 return; \
2774 } \
2775 if (unlikely(rA(ctx->opcode) == 0)) { \
2776 GEN_EXCP_INVAL(ctx); \
2777 return; \
2778 } \
2779 gen_addr_imm_index(ctx, 0); \
2780 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
2781 op_ldst(st##width); \
2782 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2783 }
2784
2785 #define GEN_STUXF(width, opc, type) \
2786 GEN_HANDLER(st##width##ux, 0x1F, 0x17, opc, 0x00000001, type) \
2787 { \
2788 if (unlikely(!ctx->fpu_enabled)) { \
2789 GEN_EXCP_NO_FP(ctx); \
2790 return; \
2791 } \
2792 if (unlikely(rA(ctx->opcode) == 0)) { \
2793 GEN_EXCP_INVAL(ctx); \
2794 return; \
2795 } \
2796 gen_addr_reg_index(ctx); \
2797 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
2798 op_ldst(st##width); \
2799 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2800 }
2801
2802 #define GEN_STXF(width, opc2, opc3, type) \
2803 GEN_HANDLER(st##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2804 { \
2805 if (unlikely(!ctx->fpu_enabled)) { \
2806 GEN_EXCP_NO_FP(ctx); \
2807 return; \
2808 } \
2809 gen_addr_reg_index(ctx); \
2810 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
2811 op_ldst(st##width); \
2812 }
2813
2814 #define GEN_STFS(width, op, type) \
2815 OP_ST_TABLE(width); \
2816 GEN_STF(width, op | 0x20, type); \
2817 GEN_STUF(width, op | 0x21, type); \
2818 GEN_STUXF(width, op | 0x01, type); \
2819 GEN_STXF(width, 0x17, op | 0x00, type)
2820
2821 /* stfd stfdu stfdux stfdx */
2822 GEN_STFS(fd, 0x16, PPC_FLOAT);
2823 /* stfs stfsu stfsux stfsx */
2824 GEN_STFS(fs, 0x14, PPC_FLOAT);
2825
2826 /* Optional: */
2827 /* stfiwx */
2828 OP_ST_TABLE(fiw);
2829 GEN_STXF(fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
2830
2831 /*** Branch ***/
2832 static always_inline void gen_goto_tb (DisasContext *ctx, int n,
2833 target_ulong dest)
2834 {
2835 TranslationBlock *tb;
2836 tb = ctx->tb;
2837 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
2838 likely(!ctx->singlestep_enabled)) {
2839 tcg_gen_goto_tb(n);
2840 tcg_gen_movi_tl(cpu_T[1], dest);
2841 #if defined(TARGET_PPC64)
2842 if (ctx->sf_mode)
2843 tcg_gen_andi_tl(cpu_nip, cpu_T[1], ~3);
2844 else
2845 #endif
2846 tcg_gen_andi_tl(cpu_nip, cpu_T[1], (uint32_t)~3);
2847 tcg_gen_exit_tb((long)tb + n);
2848 } else {
2849 tcg_gen_movi_tl(cpu_T[1], dest);
2850 #if defined(TARGET_PPC64)
2851 if (ctx->sf_mode)
2852 tcg_gen_andi_tl(cpu_nip, cpu_T[1], ~3);
2853 else
2854 #endif
2855 tcg_gen_andi_tl(cpu_nip, cpu_T[1], (uint32_t)~3);
2856 if (unlikely(ctx->singlestep_enabled)) {
2857 if ((ctx->singlestep_enabled &
2858 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
2859 ctx->exception == POWERPC_EXCP_BRANCH) {
2860 target_ulong tmp = ctx->nip;
2861 ctx->nip = dest;
2862 GEN_EXCP(ctx, POWERPC_EXCP_TRACE, 0);
2863 ctx->nip = tmp;
2864 }
2865 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
2866 gen_update_nip(ctx, dest);
2867 gen_op_debug();
2868 }
2869 }
2870 tcg_gen_exit_tb(0);
2871 }
2872 }
2873
2874 static always_inline void gen_setlr (DisasContext *ctx, target_ulong nip)
2875 {
2876 #if defined(TARGET_PPC64)
2877 if (ctx->sf_mode != 0 && (nip >> 32))
2878 gen_op_setlr_64(ctx->nip >> 32, ctx->nip);
2879 else
2880 #endif
2881 gen_op_setlr(ctx->nip);
2882 }
2883
2884 /* b ba bl bla */
2885 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
2886 {
2887 target_ulong li, target;
2888
2889 ctx->exception = POWERPC_EXCP_BRANCH;
2890 /* sign extend LI */
2891 #if defined(TARGET_PPC64)
2892 if (ctx->sf_mode)
2893 li = ((int64_t)LI(ctx->opcode) << 38) >> 38;
2894 else
2895 #endif
2896 li = ((int32_t)LI(ctx->opcode) << 6) >> 6;
2897 if (likely(AA(ctx->opcode) == 0))
2898 target = ctx->nip + li - 4;
2899 else
2900 target = li;
2901 #if defined(TARGET_PPC64)
2902 if (!ctx->sf_mode)
2903 target = (uint32_t)target;
2904 #endif
2905 if (LK(ctx->opcode))
2906 gen_setlr(ctx, ctx->nip);
2907 gen_goto_tb(ctx, 0, target);
2908 }
2909
2910 #define BCOND_IM 0
2911 #define BCOND_LR 1
2912 #define BCOND_CTR 2
2913
2914 static always_inline void gen_bcond (DisasContext *ctx, int type)
2915 {
2916 target_ulong target = 0;
2917 target_ulong li;
2918 uint32_t bo = BO(ctx->opcode);
2919 uint32_t bi = BI(ctx->opcode);
2920 uint32_t mask;
2921
2922 ctx->exception = POWERPC_EXCP_BRANCH;
2923 if ((bo & 0x4) == 0)
2924 gen_op_dec_ctr();
2925 switch(type) {
2926 case BCOND_IM:
2927 li = (target_long)((int16_t)(BD(ctx->opcode)));
2928 if (likely(AA(ctx->opcode) == 0)) {
2929 target = ctx->nip + li - 4;
2930 } else {
2931 target = li;
2932 }
2933 #if defined(TARGET_PPC64)
2934 if (!ctx->sf_mode)
2935 target = (uint32_t)target;
2936 #endif
2937 break;
2938 case BCOND_CTR:
2939 gen_op_movl_T1_ctr();
2940 break;
2941 default:
2942 case BCOND_LR:
2943 gen_op_movl_T1_lr();
2944 break;
2945 }
2946 if (LK(ctx->opcode))
2947 gen_setlr(ctx, ctx->nip);
2948 if (bo & 0x10) {
2949 /* No CR condition */
2950 switch (bo & 0x6) {
2951 case 0:
2952 #if defined(TARGET_PPC64)
2953 if (ctx->sf_mode)
2954 gen_op_test_ctr_64();
2955 else
2956 #endif
2957 gen_op_test_ctr();
2958 break;
2959 case 2:
2960 #if defined(TARGET_PPC64)
2961 if (ctx->sf_mode)
2962 gen_op_test_ctrz_64();
2963 else
2964 #endif
2965 gen_op_test_ctrz();
2966 break;
2967 default:
2968 case 4:
2969 case 6:
2970 if (type == BCOND_IM) {
2971 gen_goto_tb(ctx, 0, target);
2972 return;
2973 } else {
2974 #if defined(TARGET_PPC64)
2975 if (ctx->sf_mode)
2976 tcg_gen_andi_tl(cpu_nip, cpu_T[1], ~3);
2977 else
2978 #endif
2979 tcg_gen_andi_tl(cpu_nip, cpu_T[1], (uint32_t)~3);
2980 goto no_test;
2981 }
2982 break;
2983 }
2984 } else {
2985 mask = 1 << (3 - (bi & 0x03));
2986 tcg_gen_mov_i32(cpu_T[0], cpu_crf[bi >> 2]);
2987 if (bo & 0x8) {
2988 switch (bo & 0x6) {
2989 case 0:
2990 #if defined(TARGET_PPC64)
2991 if (ctx->sf_mode)
2992 gen_op_test_ctr_true_64(mask);
2993 else
2994 #endif
2995 gen_op_test_ctr_true(mask);
2996 break;
2997 case 2:
2998 #if defined(TARGET_PPC64)
2999 if (ctx->sf_mode)
3000 gen_op_test_ctrz_true_64(mask);
3001 else
3002 #endif
3003 gen_op_test_ctrz_true(mask);
3004 break;
3005 default:
3006 case 4:
3007 case 6:
3008 gen_op_test_true(mask);
3009 break;
3010 }
3011 } else {
3012 switch (bo & 0x6) {
3013 case 0:
3014 #if defined(TARGET_PPC64)
3015 if (ctx->sf_mode)
3016 gen_op_test_ctr_false_64(mask);
3017 else
3018 #endif
3019 gen_op_test_ctr_false(mask);
3020 break;
3021 case 2:
3022 #if defined(TARGET_PPC64)
3023 if (ctx->sf_mode)
3024 gen_op_test_ctrz_false_64(mask);
3025 else
3026 #endif
3027 gen_op_test_ctrz_false(mask);
3028 break;
3029 default:
3030 case 4:
3031 case 6:
3032 gen_op_test_false(mask);
3033 break;
3034 }
3035 }
3036 }
3037 if (type == BCOND_IM) {
3038 int l1 = gen_new_label();
3039 gen_op_jz_T0(l1);
3040 gen_goto_tb(ctx, 0, target);
3041 gen_set_label(l1);
3042 gen_goto_tb(ctx, 1, ctx->nip);
3043 } else {
3044 #if defined(TARGET_PPC64)
3045 if (ctx->sf_mode)
3046 gen_op_btest_T1_64(ctx->nip >> 32, ctx->nip);
3047 else
3048 #endif
3049 gen_op_btest_T1(ctx->nip);
3050 no_test:
3051 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3052 gen_update_nip(ctx, ctx->nip);
3053 gen_op_debug();
3054 }
3055 tcg_gen_exit_tb(0);
3056 }
3057 }
3058
3059 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3060 {
3061 gen_bcond(ctx, BCOND_IM);
3062 }
3063
3064 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW)
3065 {
3066 gen_bcond(ctx, BCOND_CTR);
3067 }
3068
3069 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW)
3070 {
3071 gen_bcond(ctx, BCOND_LR);
3072 }
3073
3074 /*** Condition register logical ***/
3075 #define GEN_CRLOGIC(op, opc) \
3076 GEN_HANDLER(cr##op, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) \
3077 { \
3078 uint8_t bitmask; \
3079 int sh; \
3080 tcg_gen_mov_i32(cpu_T[0], cpu_crf[crbA(ctx->opcode) >> 2]); \
3081 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3082 if (sh > 0) \
3083 gen_op_srli_T0(sh); \
3084 else if (sh < 0) \
3085 gen_op_sli_T0(-sh); \
3086 tcg_gen_mov_i32(cpu_T[1], cpu_crf[crbB(ctx->opcode) >> 2]); \
3087 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3088 if (sh > 0) \
3089 gen_op_srli_T1(sh); \
3090 else if (sh < 0) \
3091 gen_op_sli_T1(-sh); \
3092 gen_op_##op(); \
3093 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3094 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], bitmask); \
3095 tcg_gen_andi_i32(cpu_T[1], cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3096 gen_op_or(); \
3097 tcg_gen_andi_i32(cpu_crf[crbD(ctx->opcode) >> 2], cpu_T[0], 0xf); \
3098 }
3099
3100 /* crand */
3101 GEN_CRLOGIC(and, 0x08);
3102 /* crandc */
3103 GEN_CRLOGIC(andc, 0x04);
3104 /* creqv */
3105 GEN_CRLOGIC(eqv, 0x09);
3106 /* crnand */
3107 GEN_CRLOGIC(nand, 0x07);
3108 /* crnor */
3109 GEN_CRLOGIC(nor, 0x01);
3110 /* cror */
3111 GEN_CRLOGIC(or, 0x0E);
3112 /* crorc */
3113 GEN_CRLOGIC(orc, 0x0D);
3114 /* crxor */
3115 GEN_CRLOGIC(xor, 0x06);
3116 /* mcrf */
3117 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER)
3118 {
3119 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3120 }
3121
3122 /*** System linkage ***/
3123 /* rfi (supervisor only) */
3124 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW)
3125 {
3126 #if defined(CONFIG_USER_ONLY)
3127 GEN_EXCP_PRIVOPC(ctx);
3128 #else
3129 /* Restore CPU state */
3130 if (unlikely(!ctx->supervisor)) {
3131 GEN_EXCP_PRIVOPC(ctx);
3132 return;
3133 }
3134 gen_op_rfi();
3135 GEN_SYNC(ctx);
3136 #endif
3137 }
3138
3139 #if defined(TARGET_PPC64)
3140 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B)
3141 {
3142 #if defined(CONFIG_USER_ONLY)
3143 GEN_EXCP_PRIVOPC(ctx);
3144 #else
3145 /* Restore CPU state */
3146 if (unlikely(!ctx->supervisor)) {
3147 GEN_EXCP_PRIVOPC(ctx);
3148 return;
3149 }
3150 gen_op_rfid();
3151 GEN_SYNC(ctx);
3152 #endif
3153 }
3154
3155 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H)
3156 {
3157 #if defined(CONFIG_USER_ONLY)
3158 GEN_EXCP_PRIVOPC(ctx);
3159 #else
3160 /* Restore CPU state */
3161 if (unlikely(ctx->supervisor <= 1)) {
3162 GEN_EXCP_PRIVOPC(ctx);
3163 return;
3164 }
3165 gen_op_hrfid();
3166 GEN_SYNC(ctx);
3167 #endif
3168 }
3169 #endif
3170
3171 /* sc */
3172 #if defined(CONFIG_USER_ONLY)
3173 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3174 #else
3175 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3176 #endif
3177 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW)
3178 {
3179 uint32_t lev;
3180
3181 lev = (ctx->opcode >> 5) & 0x7F;
3182 GEN_EXCP(ctx, POWERPC_SYSCALL, lev);
3183 }
3184
3185 /*** Trap ***/
3186 /* tw */
3187 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW)
3188 {
3189 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3190 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3191 /* Update the nip since this might generate a trap exception */
3192 gen_update_nip(ctx, ctx->nip);
3193 gen_op_tw(TO(ctx->opcode));
3194 }
3195
3196 /* twi */
3197 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3198 {
3199 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3200 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
3201 /* Update the nip since this might generate a trap exception */
3202 gen_update_nip(ctx, ctx->nip);
3203 gen_op_tw(TO(ctx->opcode));
3204 }
3205
3206 #if defined(TARGET_PPC64)
3207 /* td */
3208 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B)
3209 {
3210 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3211 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3212 /* Update the nip since this might generate a trap exception */
3213 gen_update_nip(ctx, ctx->nip);
3214 gen_op_td(TO(ctx->opcode));
3215 }
3216
3217 /* tdi */
3218 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B)
3219 {
3220 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3221 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
3222 /* Update the nip since this might generate a trap exception */
3223 gen_update_nip(ctx, ctx->nip);
3224 gen_op_td(TO(ctx->opcode));
3225 }
3226 #endif
3227
3228 /*** Processor control ***/
3229 /* mcrxr */
3230 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC)
3231 {
3232 gen_op_load_xer_cr();
3233 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
3234 gen_op_clear_xer_ov();
3235 gen_op_clear_xer_ca();
3236 }
3237
3238 /* mfcr */
3239 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC)
3240 {
3241 uint32_t crm, crn;
3242
3243 if (likely(ctx->opcode & 0x00100000)) {
3244 crm = CRM(ctx->opcode);
3245 if (likely((crm ^ (crm - 1)) == 0)) {
3246 crn = ffs(crm);
3247 tcg_gen_mov_i32(cpu_T[0], cpu_crf[7 - crn]);
3248 }
3249 } else {
3250 gen_op_load_cr();
3251 }
3252 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3253 }
3254
3255 /* mfmsr */
3256 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC)
3257 {
3258 #if defined(CONFIG_USER_ONLY)
3259 GEN_EXCP_PRIVREG(ctx);
3260 #else
3261 if (unlikely(!ctx->supervisor)) {
3262 GEN_EXCP_PRIVREG(ctx);
3263 return;
3264 }
3265 gen_op_load_msr();
3266 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3267 #endif
3268 }
3269
3270 #if 1
3271 #define SPR_NOACCESS ((void *)(-1UL))
3272 #else
3273 static void spr_noaccess (void *opaque, int sprn)
3274 {
3275 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3276 printf("ERROR: try to access SPR %d !\n", sprn);
3277 }
3278 #define SPR_NOACCESS (&spr_noaccess)
3279 #endif
3280
3281 /* mfspr */
3282 static always_inline void gen_op_mfspr (DisasContext *ctx)
3283 {
3284 void (*read_cb)(void *opaque, int sprn);
3285 uint32_t sprn = SPR(ctx->opcode);
3286
3287 #if !defined(CONFIG_USER_ONLY)
3288 if (ctx->supervisor == 2)
3289 read_cb = ctx->spr_cb[sprn].hea_read;
3290 else if (ctx->supervisor)
3291 read_cb = ctx->spr_cb[sprn].oea_read;
3292 else
3293 #endif
3294 read_cb = ctx->spr_cb[sprn].uea_read;
3295 if (likely(read_cb != NULL)) {
3296 if (likely(read_cb != SPR_NOACCESS)) {
3297 (*read_cb)(ctx, sprn);
3298 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3299 } else {
3300 /* Privilege exception */
3301 /* This is a hack to avoid warnings when running Linux:
3302 * this OS breaks the PowerPC virtualisation model,
3303 * allowing userland application to read the PVR
3304 */
3305 if (sprn != SPR_PVR) {
3306 if (loglevel != 0) {
3307 fprintf(logfile, "Trying to read privileged spr %d %03x at "
3308 ADDRX "\n", sprn, sprn, ctx->nip);
3309 }
3310 printf("Trying to read privileged spr %d %03x at " ADDRX "\n",
3311 sprn, sprn, ctx->nip);
3312 }
3313 GEN_EXCP_PRIVREG(ctx);
3314 }
3315 } else {
3316 /* Not defined */
3317 if (loglevel != 0) {
3318 fprintf(logfile, "Trying to read invalid spr %d %03x at "
3319 ADDRX "\n", sprn, sprn, ctx->nip);
3320 }
3321 printf("Trying to read invalid spr %d %03x at " ADDRX "\n",
3322 sprn, sprn, ctx->nip);
3323 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3324 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
3325 }
3326 }
3327
3328 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC)
3329 {
3330 gen_op_mfspr(ctx);
3331 }
3332
3333 /* mftb */
3334 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB)
3335 {
3336 gen_op_mfspr(ctx);
3337 }
3338
3339 /* mtcrf */
3340 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC)
3341 {
3342 uint32_t crm, crn;
3343
3344 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3345 crm = CRM(ctx->opcode);
3346 if (likely((ctx->opcode & 0x00100000) || (crm ^ (crm - 1)) == 0)) {
3347 crn = ffs(crm);
3348 gen_op_srli_T0(crn * 4);
3349 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_T[0], 0xf);
3350 } else {
3351 gen_op_store_cr(crm);
3352 }
3353 }
3354
3355 /* mtmsr */
3356 #if defined(TARGET_PPC64)
3357 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B)
3358 {
3359 #if defined(CONFIG_USER_ONLY)
3360 GEN_EXCP_PRIVREG(ctx);
3361 #else
3362 if (unlikely(!ctx->supervisor)) {
3363 GEN_EXCP_PRIVREG(ctx);
3364 return;
3365 }
3366 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3367 if (ctx->opcode & 0x00010000) {
3368 /* Special form that does not need any synchronisation */
3369 gen_op_update_riee();
3370 } else {
3371 /* XXX: we need to update nip before the store
3372 * if we enter power saving mode, we will exit the loop
3373 * directly from ppc_store_msr
3374 */
3375 gen_update_nip(ctx, ctx->nip);
3376 gen_op_store_msr();
3377 /* Must stop the translation as machine state (may have) changed */
3378 /* Note that mtmsr is not always defined as context-synchronizing */
3379 ctx->exception = POWERPC_EXCP_STOP;
3380 }
3381 #endif
3382 }
3383 #endif
3384
3385 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC)
3386 {
3387 #if defined(CONFIG_USER_ONLY)
3388 GEN_EXCP_PRIVREG(ctx);
3389 #else
3390 if (unlikely(!ctx->supervisor)) {
3391 GEN_EXCP_PRIVREG(ctx);
3392 return;
3393 }
3394 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3395 if (ctx->opcode & 0x00010000) {
3396 /* Special form that does not need any synchronisation */
3397 gen_op_update_riee();
3398 } else {
3399 /* XXX: we need to update nip before the store
3400 * if we enter power saving mode, we will exit the loop
3401 * directly from ppc_store_msr
3402 */
3403 gen_update_nip(ctx, ctx->nip);
3404 #if defined(TARGET_PPC64)
3405 if (!ctx->sf_mode)
3406 gen_op_store_msr_32();
3407 else
3408 #endif
3409 gen_op_store_msr();
3410 /* Must stop the translation as machine state (may have) changed */
3411 /* Note that mtmsrd is not always defined as context-synchronizing */
3412 ctx->exception = POWERPC_EXCP_STOP;
3413 }
3414 #endif
3415 }
3416
3417 /* mtspr */
3418 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC)
3419 {
3420 void (*write_cb)(void *opaque, int sprn);
3421 uint32_t sprn = SPR(ctx->opcode);
3422
3423 #if !defined(CONFIG_USER_ONLY)
3424 if (ctx->supervisor == 2)
3425 write_cb = ctx->spr_cb[sprn].hea_write;
3426 else if (ctx->supervisor)
3427 write_cb = ctx->spr_cb[sprn].oea_write;
3428 else
3429 #endif
3430 write_cb = ctx->spr_cb[sprn].uea_write;
3431 if (likely(write_cb != NULL)) {
3432 if (likely(write_cb != SPR_NOACCESS)) {
3433 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3434 (*write_cb)(ctx, sprn);
3435 } else {
3436 /* Privilege exception */
3437 if (loglevel != 0) {
3438 fprintf(logfile, "Trying to write privileged spr %d %03x at "
3439 ADDRX "\n", sprn, sprn, ctx->nip);
3440 }
3441 printf("Trying to write privileged spr %d %03x at " ADDRX "\n",
3442 sprn, sprn, ctx->nip);
3443 GEN_EXCP_PRIVREG(ctx);
3444 }
3445 } else {
3446 /* Not defined */
3447 if (loglevel != 0) {
3448 fprintf(logfile, "Trying to write invalid spr %d %03x at "
3449 ADDRX "\n", sprn, sprn, ctx->nip);
3450 }
3451 printf("Trying to write invalid spr %d %03x at " ADDRX "\n",
3452 sprn, sprn, ctx->nip);
3453 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3454 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
3455 }
3456 }
3457
3458 /*** Cache management ***/
3459 /* dcbf */
3460 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE)
3461 {
3462 /* XXX: specification says this is treated as a load by the MMU */
3463 gen_addr_reg_index(ctx);
3464 op_ldst(lbz);
3465 }
3466
3467 /* dcbi (Supervisor only) */
3468 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE)
3469 {
3470 #if defined(CONFIG_USER_ONLY)
3471 GEN_EXCP_PRIVOPC(ctx);
3472 #else
3473 if (unlikely(!ctx->supervisor)) {
3474 GEN_EXCP_PRIVOPC(ctx);
3475 return;
3476 }
3477 gen_addr_reg_index(ctx);
3478 /* XXX: specification says this should be treated as a store by the MMU */
3479 op_ldst(lbz);
3480 op_ldst(stb);
3481 #endif
3482 }
3483
3484 /* dcdst */
3485 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE)
3486 {
3487 /* XXX: specification say this is treated as a load by the MMU */
3488 gen_addr_reg_index(ctx);
3489 op_ldst(lbz);
3490 }
3491
3492 /* dcbt */
3493 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE)
3494 {
3495 /* interpreted as no-op */
3496 /* XXX: specification say this is treated as a load by the MMU
3497 * but does not generate any exception
3498 */
3499 }
3500
3501 /* dcbtst */
3502 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE)
3503 {
3504 /* interpreted as no-op */
3505 /* XXX: specification say this is treated as a load by the MMU
3506 * but does not generate any exception
3507 */
3508 }
3509
3510 /* dcbz */
3511 #define op_dcbz(n) (*gen_op_dcbz[n][ctx->mem_idx])()
3512 static GenOpFunc *gen_op_dcbz[4][NB_MEM_FUNCS] = {
3513 /* 32 bytes cache line size */
3514 {
3515 #define gen_op_dcbz_l32_le_raw gen_op_dcbz_l32_raw
3516 #define gen_op_dcbz_l32_le_user gen_op_dcbz_l32_user
3517 #define gen_op_dcbz_l32_le_kernel gen_op_dcbz_l32_kernel
3518 #define gen_op_dcbz_l32_le_hypv gen_op_dcbz_l32_hypv
3519 #define gen_op_dcbz_l32_le_64_raw gen_op_dcbz_l32_64_raw
3520 #define gen_op_dcbz_l32_le_64_user gen_op_dcbz_l32_64_user
3521 #define gen_op_dcbz_l32_le_64_kernel gen_op_dcbz_l32_64_kernel
3522 #define gen_op_dcbz_l32_le_64_hypv gen_op_dcbz_l32_64_hypv
3523 GEN_MEM_FUNCS(dcbz_l32),
3524 },
3525 /* 64 bytes cache line size */
3526 {
3527 #define gen_op_dcbz_l64_le_raw gen_op_dcbz_l64_raw
3528 #define gen_op_dcbz_l64_le_user gen_op_dcbz_l64_user
3529 #define gen_op_dcbz_l64_le_kernel gen_op_dcbz_l64_kernel
3530 #define gen_op_dcbz_l64_le_hypv gen_op_dcbz_l64_hypv
3531 #define gen_op_dcbz_l64_le_64_raw gen_op_dcbz_l64_64_raw
3532 #define gen_op_dcbz_l64_le_64_user gen_op_dcbz_l64_64_user
3533 #define gen_op_dcbz_l64_le_64_kernel gen_op_dcbz_l64_64_kernel
3534 #define gen_op_dcbz_l64_le_64_hypv gen_op_dcbz_l64_64_hypv
3535 GEN_MEM_FUNCS(dcbz_l64),
3536 },
3537 /* 128 bytes cache line size */
3538 {
3539 #define gen_op_dcbz_l128_le_raw gen_op_dcbz_l128_raw
3540 #define gen_op_dcbz_l128_le_user gen_op_dcbz_l128_user
3541 #define gen_op_dcbz_l128_le_kernel gen_op_dcbz_l128_kernel
3542 #define gen_op_dcbz_l128_le_hypv gen_op_dcbz_l128_hypv
3543 #define gen_op_dcbz_l128_le_64_raw gen_op_dcbz_l128_64_raw
3544 #define gen_op_dcbz_l128_le_64_user gen_op_dcbz_l128_64_user
3545 #define gen_op_dcbz_l128_le_64_kernel gen_op_dcbz_l128_64_kernel
3546 #define gen_op_dcbz_l128_le_64_hypv gen_op_dcbz_l128_64_hypv
3547 GEN_MEM_FUNCS(dcbz_l128),
3548 },
3549 /* tunable cache line size */
3550 {
3551 #define gen_op_dcbz_le_raw gen_op_dcbz_raw
3552 #define gen_op_dcbz_le_user gen_op_dcbz_user
3553 #define gen_op_dcbz_le_kernel gen_op_dcbz_kernel
3554 #define gen_op_dcbz_le_hypv gen_op_dcbz_hypv
3555 #define gen_op_dcbz_le_64_raw gen_op_dcbz_64_raw
3556 #define gen_op_dcbz_le_64_user gen_op_dcbz_64_user
3557 #define gen_op_dcbz_le_64_kernel gen_op_dcbz_64_kernel
3558 #define gen_op_dcbz_le_64_hypv gen_op_dcbz_64_hypv
3559 GEN_MEM_FUNCS(dcbz),
3560 },
3561 };
3562
3563 static always_inline void handler_dcbz (DisasContext *ctx,
3564 int dcache_line_size)
3565 {
3566 int n;
3567
3568 switch (dcache_line_size) {
3569 case 32:
3570 n = 0;
3571 break;
3572 case 64:
3573 n = 1;
3574 break;
3575 case 128:
3576 n = 2;
3577 break;
3578 default:
3579 n = 3;
3580 break;
3581 }
3582 op_dcbz(n);
3583 }
3584
3585 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ)
3586 {
3587 gen_addr_reg_index(ctx);
3588 handler_dcbz(ctx, ctx->dcache_line_size);
3589 gen_op_check_reservation();
3590 }
3591
3592 GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT)
3593 {
3594 gen_addr_reg_index(ctx);
3595 if (ctx->opcode & 0x00200000)
3596 handler_dcbz(ctx, ctx->dcache_line_size);
3597 else
3598 handler_dcbz(ctx, -1);
3599 gen_op_check_reservation();
3600 }
3601
3602 /* icbi */
3603 #define op_icbi() (*gen_op_icbi[ctx->mem_idx])()
3604 #define gen_op_icbi_le_raw gen_op_icbi_raw
3605 #define gen_op_icbi_le_user gen_op_icbi_user
3606 #define gen_op_icbi_le_kernel gen_op_icbi_kernel
3607 #define gen_op_icbi_le_hypv gen_op_icbi_hypv
3608 #define gen_op_icbi_le_64_raw gen_op_icbi_64_raw
3609 #define gen_op_icbi_le_64_user gen_op_icbi_64_user
3610 #define gen_op_icbi_le_64_kernel gen_op_icbi_64_kernel
3611 #define gen_op_icbi_le_64_hypv gen_op_icbi_64_hypv
3612 static GenOpFunc *gen_op_icbi[NB_MEM_FUNCS] = {
3613 GEN_MEM_FUNCS(icbi),
3614 };
3615
3616 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI)
3617 {
3618 /* NIP cannot be restored if the memory exception comes from an helper */
3619 gen_update_nip(ctx, ctx->nip - 4);
3620 gen_addr_reg_index(ctx);
3621 op_icbi();
3622 }
3623
3624 /* Optional: */
3625 /* dcba */
3626 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA)
3627 {
3628 /* interpreted as no-op */
3629 /* XXX: specification say this is treated as a store by the MMU
3630 * but does not generate any exception
3631 */
3632 }
3633
3634 /*** Segment register manipulation ***/
3635 /* Supervisor only: */
3636 /* mfsr */
3637 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT)
3638 {
3639 #if defined(CONFIG_USER_ONLY)
3640 GEN_EXCP_PRIVREG(ctx);
3641 #else
3642 if (unlikely(!ctx->supervisor)) {
3643 GEN_EXCP_PRIVREG(ctx);
3644 return;
3645 }
3646 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
3647 gen_op_load_sr();
3648 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3649 #endif
3650 }
3651
3652 /* mfsrin */
3653 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT)
3654 {
3655 #if defined(CONFIG_USER_ONLY)
3656 GEN_EXCP_PRIVREG(ctx);
3657 #else
3658 if (unlikely(!ctx->supervisor)) {
3659 GEN_EXCP_PRIVREG(ctx);
3660 return;
3661 }
3662 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3663 gen_op_srli_T1(28);
3664 gen_op_load_sr();
3665 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3666 #endif
3667 }
3668
3669 /* mtsr */
3670 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT)
3671 {
3672 #if defined(CONFIG_USER_ONLY)
3673 GEN_EXCP_PRIVREG(ctx);
3674 #else
3675 if (unlikely(!ctx->supervisor)) {
3676 GEN_EXCP_PRIVREG(ctx);
3677 return;
3678 }
3679 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3680 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
3681 gen_op_store_sr();
3682 #endif
3683 }
3684
3685 /* mtsrin */
3686 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT)
3687 {
3688 #if defined(CONFIG_USER_ONLY)
3689 GEN_EXCP_PRIVREG(ctx);
3690 #else
3691 if (unlikely(!ctx->supervisor)) {
3692 GEN_EXCP_PRIVREG(ctx);
3693 return;
3694 }
3695 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3696 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3697 gen_op_srli_T1(28);
3698 gen_op_store_sr();
3699 #endif
3700 }
3701
3702 #if defined(TARGET_PPC64)
3703 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
3704 /* mfsr */
3705 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B)
3706 {
3707 #if defined(CONFIG_USER_ONLY)
3708 GEN_EXCP_PRIVREG(ctx);
3709 #else
3710 if (unlikely(!ctx->supervisor)) {
3711 GEN_EXCP_PRIVREG(ctx);
3712 return;
3713 }
3714 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
3715 gen_op_load_slb();
3716 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3717 #endif
3718 }
3719
3720 /* mfsrin */
3721 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
3722 PPC_SEGMENT_64B)
3723 {
3724 #if defined(CONFIG_USER_ONLY)
3725 GEN_EXCP_PRIVREG(ctx);
3726 #else
3727 if (unlikely(!ctx->supervisor)) {
3728 GEN_EXCP_PRIVREG(ctx);
3729 return;
3730 }
3731 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3732 gen_op_srli_T1(28);
3733 gen_op_load_slb();
3734 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3735 #endif
3736 }
3737
3738 /* mtsr */
3739 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B)
3740 {
3741 #if defined(CONFIG_USER_ONLY)
3742 GEN_EXCP_PRIVREG(ctx);
3743 #else
3744 if (unlikely(!ctx->supervisor)) {
3745 GEN_EXCP_PRIVREG(ctx);
3746 return;
3747 }
3748 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3749 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
3750 gen_op_store_slb();
3751 #endif
3752 }
3753
3754 /* mtsrin */
3755 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
3756 PPC_SEGMENT_64B)
3757 {
3758 #if defined(CONFIG_USER_ONLY)
3759 GEN_EXCP_PRIVREG(ctx);
3760 #else
3761 if (unlikely(!ctx->supervisor)) {
3762 GEN_EXCP_PRIVREG(ctx);
3763 return;
3764 }
3765 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3766 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3767 gen_op_srli_T1(28);
3768 gen_op_store_slb();
3769 #endif
3770 }
3771 #endif /* defined(TARGET_PPC64) */
3772
3773 /*** Lookaside buffer management ***/
3774 /* Optional & supervisor only: */
3775 /* tlbia */
3776 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA)
3777 {
3778 #if defined(CONFIG_USER_ONLY)
3779 GEN_EXCP_PRIVOPC(ctx);
3780 #else
3781 if (unlikely(!ctx->supervisor)) {
3782 GEN_EXCP_PRIVOPC(ctx);
3783 return;
3784 }
3785 gen_op_tlbia();
3786 #endif
3787 }
3788
3789 /* tlbie */
3790 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE)
3791 {
3792 #if defined(CONFIG_USER_ONLY)
3793 GEN_EXCP_PRIVOPC(ctx);
3794 #else
3795 if (unlikely(!ctx->supervisor)) {
3796 GEN_EXCP_PRIVOPC(ctx);
3797 return;
3798 }
3799 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
3800 #if defined(TARGET_PPC64)
3801 if (ctx->sf_mode)
3802 gen_op_tlbie_64();
3803 else
3804 #endif
3805 gen_op_tlbie();
3806 #endif
3807 }
3808
3809 /* tlbsync */
3810 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC)
3811 {
3812 #if defined(CONFIG_USER_ONLY)
3813 GEN_EXCP_PRIVOPC(ctx);
3814 #else
3815 if (unlikely(!ctx->supervisor)) {
3816 GEN_EXCP_PRIVOPC(ctx);
3817 return;
3818 }
3819 /* This has no effect: it should ensure that all previous
3820 * tlbie have completed
3821 */
3822 GEN_STOP(ctx);
3823 #endif
3824 }
3825
3826 #if defined(TARGET_PPC64)
3827 /* slbia */
3828 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI)
3829 {
3830 #if defined(CONFIG_USER_ONLY)
3831 GEN_EXCP_PRIVOPC(ctx);
3832 #else
3833 if (unlikely(!ctx->supervisor)) {
3834 GEN_EXCP_PRIVOPC(ctx);
3835 return;
3836 }
3837 gen_op_slbia();
3838 #endif
3839 }
3840
3841 /* slbie */
3842 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI)
3843 {
3844 #if defined(CONFIG_USER_ONLY)
3845 GEN_EXCP_PRIVOPC(ctx);
3846 #else
3847 if (unlikely(!ctx->supervisor)) {
3848 GEN_EXCP_PRIVOPC(ctx);
3849 return;
3850 }
3851 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
3852 gen_op_slbie();
3853 #endif
3854 }
3855 #endif
3856
3857 /*** External control ***/
3858 /* Optional: */
3859 #define op_eciwx() (*gen_op_eciwx[ctx->mem_idx])()
3860 #define op_ecowx() (*gen_op_ecowx[ctx->mem_idx])()
3861 static GenOpFunc *gen_op_eciwx[NB_MEM_FUNCS] = {
3862 GEN_MEM_FUNCS(eciwx),
3863 };
3864 static GenOpFunc *gen_op_ecowx[NB_MEM_FUNCS] = {
3865 GEN_MEM_FUNCS(ecowx),
3866 };
3867
3868 /* eciwx */
3869 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN)
3870 {
3871 /* Should check EAR[E] & alignment ! */
3872 gen_addr_reg_index(ctx);
3873 op_eciwx();
3874 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3875 }
3876
3877 /* ecowx */
3878 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN)
3879 {
3880 /* Should check EAR[E] & alignment ! */
3881 gen_addr_reg_index(ctx);
3882 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
3883 op_ecowx();
3884 }
3885
3886 /* PowerPC 601 specific instructions */
3887 /* abs - abs. */
3888 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR)
3889 {
3890 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3891 gen_op_POWER_abs();
3892 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3893 if (unlikely(Rc(ctx->opcode) != 0))
3894 gen_set_Rc0(ctx);
3895 }
3896
3897 /* abso - abso. */
3898 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR)
3899 {
3900 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3901 gen_op_POWER_abso();
3902 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3903 if (unlikely(Rc(ctx->opcode) != 0))
3904 gen_set_Rc0(ctx);
3905 }
3906
3907 /* clcs */
3908 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR)
3909 {
3910 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3911 gen_op_POWER_clcs();
3912 /* Rc=1 sets CR0 to an undefined state */
3913 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3914 }
3915
3916 /* div - div. */
3917 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR)
3918 {
3919 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3920 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3921 gen_op_POWER_div();
3922 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3923 if (unlikely(Rc(ctx->opcode) != 0))
3924 gen_set_Rc0(ctx);
3925 }
3926
3927 /* divo - divo. */
3928 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR)
3929 {
3930 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3931 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3932 gen_op_POWER_divo();
3933 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3934 if (unlikely(Rc(ctx->opcode) != 0))
3935 gen_set_Rc0(ctx);
3936 }
3937
3938 /* divs - divs. */
3939 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR)
3940 {
3941 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3942 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3943 gen_op_POWER_divs();
3944 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3945 if (unlikely(Rc(ctx->opcode) != 0))
3946 gen_set_Rc0(ctx);
3947 }
3948
3949 /* divso - divso. */
3950 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR)
3951 {
3952 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3953 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3954 gen_op_POWER_divso();
3955 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3956 if (unlikely(Rc(ctx->opcode) != 0))
3957 gen_set_Rc0(ctx);
3958 }
3959
3960 /* doz - doz. */
3961 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR)
3962 {
3963 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3964 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3965 gen_op_POWER_doz();
3966 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3967 if (unlikely(Rc(ctx->opcode) != 0))
3968 gen_set_Rc0(ctx);
3969 }
3970
3971 /* dozo - dozo. */
3972 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR)
3973 {
3974 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3975 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3976 gen_op_POWER_dozo();
3977 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3978 if (unlikely(Rc(ctx->opcode) != 0))
3979 gen_set_Rc0(ctx);
3980 }
3981
3982 /* dozi */
3983 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
3984 {
3985 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3986 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
3987 gen_op_POWER_doz();
3988 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3989 }
3990
3991 /* As lscbx load from memory byte after byte, it's always endian safe.
3992 * Original POWER is 32 bits only, define 64 bits ops as 32 bits ones
3993 */
3994 #define op_POWER_lscbx(start, ra, rb) \
3995 (*gen_op_POWER_lscbx[ctx->mem_idx])(start, ra, rb)
3996 #define gen_op_POWER_lscbx_64_raw gen_op_POWER_lscbx_raw
3997 #define gen_op_POWER_lscbx_64_user gen_op_POWER_lscbx_user
3998 #define gen_op_POWER_lscbx_64_kernel gen_op_POWER_lscbx_kernel
3999 #define gen_op_POWER_lscbx_64_hypv gen_op_POWER_lscbx_hypv
4000 #define gen_op_POWER_lscbx_le_raw gen_op_POWER_lscbx_raw
4001 #define gen_op_POWER_lscbx_le_user gen_op_POWER_lscbx_user
4002 #define gen_op_POWER_lscbx_le_kernel gen_op_POWER_lscbx_kernel
4003 #define gen_op_POWER_lscbx_le_hypv gen_op_POWER_lscbx_hypv
4004 #define gen_op_POWER_lscbx_le_64_raw gen_op_POWER_lscbx_raw
4005 #define gen_op_POWER_lscbx_le_64_user gen_op_POWER_lscbx_user
4006 #define gen_op_POWER_lscbx_le_64_kernel gen_op_POWER_lscbx_kernel
4007 #define gen_op_POWER_lscbx_le_64_hypv gen_op_POWER_lscbx_hypv
4008 static GenOpFunc3 *gen_op_POWER_lscbx[NB_MEM_FUNCS] = {
4009 GEN_MEM_FUNCS(POWER_lscbx),
4010 };
4011
4012 /* lscbx - lscbx. */
4013 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR)
4014 {
4015 int ra = rA(ctx->opcode);
4016 int rb = rB(ctx->opcode);
4017
4018 gen_addr_reg_index(ctx);
4019 if (ra == 0) {
4020 ra = rb;
4021 }
4022 /* NIP cannot be restored if the memory exception comes from an helper */
4023 gen_update_nip(ctx, ctx->nip - 4);
4024 gen_op_load_xer_bc();
4025 gen_op_load_xer_cmp();
4026 op_POWER_lscbx(rD(ctx->opcode), ra, rb);
4027 gen_op_store_xer_bc();
4028 if (unlikely(Rc(ctx->opcode) != 0))
4029 gen_set_Rc0(ctx);
4030 }
4031
4032 /* maskg - maskg. */
4033 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR)
4034 {
4035 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4036 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4037 gen_op_POWER_maskg();
4038 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4039 if (unlikely(Rc(ctx->opcode) != 0))
4040 gen_set_Rc0(ctx);
4041 }
4042
4043 /* maskir - maskir. */
4044 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR)
4045 {
4046 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4047 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4048 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4049 gen_op_POWER_maskir();
4050 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4051 if (unlikely(Rc(ctx->opcode) != 0))
4052 gen_set_Rc0(ctx);
4053 }
4054
4055 /* mul - mul. */
4056 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR)
4057 {
4058 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4059 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4060 gen_op_POWER_mul();
4061 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4062 if (unlikely(Rc(ctx->opcode) != 0))
4063 gen_set_Rc0(ctx);
4064 }
4065
4066 /* mulo - mulo. */
4067 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR)
4068 {
4069 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4070 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4071 gen_op_POWER_mulo();
4072 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4073 if (unlikely(Rc(ctx->opcode) != 0))
4074 gen_set_Rc0(ctx);
4075 }
4076
4077 /* nabs - nabs. */
4078 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR)
4079 {
4080 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4081 gen_op_POWER_nabs();
4082 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4083 if (unlikely(Rc(ctx->opcode) != 0))
4084 gen_set_Rc0(ctx);
4085 }
4086
4087 /* nabso - nabso. */
4088 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR)
4089 {
4090 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4091 gen_op_POWER_nabso();
4092 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4093 if (unlikely(Rc(ctx->opcode) != 0))
4094 gen_set_Rc0(ctx);
4095 }
4096
4097 /* rlmi - rlmi. */
4098 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
4099 {
4100 uint32_t mb, me;
4101
4102 mb = MB(ctx->opcode);
4103 me = ME(ctx->opcode);
4104 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4105 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
4106 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4107 gen_op_POWER_rlmi(MASK(mb, me), ~MASK(mb, me));
4108 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4109 if (unlikely(Rc(ctx->opcode) != 0))
4110 gen_set_Rc0(ctx);
4111 }
4112
4113 /* rrib - rrib. */
4114 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR)
4115 {
4116 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4117 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
4118 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4119 gen_op_POWER_rrib();
4120 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4121 if (unlikely(Rc(ctx->opcode) != 0))
4122 gen_set_Rc0(ctx);
4123 }
4124
4125 /* sle - sle. */
4126 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR)
4127 {
4128 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4129 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4130 gen_op_POWER_sle();
4131 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4132 if (unlikely(Rc(ctx->opcode) != 0))
4133 gen_set_Rc0(ctx);
4134 }
4135
4136 /* sleq - sleq. */
4137 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR)
4138 {
4139 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4140 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4141 gen_op_POWER_sleq();
4142 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4143 if (unlikely(Rc(ctx->opcode) != 0))
4144 gen_set_Rc0(ctx);
4145 }
4146
4147 /* sliq - sliq. */
4148 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR)
4149 {
4150 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4151 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4152 gen_op_POWER_sle();
4153 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4154 if (unlikely(Rc(ctx->opcode) != 0))
4155 gen_set_Rc0(ctx);
4156 }
4157
4158 /* slliq - slliq. */
4159 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR)
4160 {
4161 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4162 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4163 gen_op_POWER_sleq();
4164 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4165 if (unlikely(Rc(ctx->opcode) != 0))
4166 gen_set_Rc0(ctx);
4167 }
4168
4169 /* sllq - sllq. */
4170 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR)
4171 {
4172 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4173 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4174 gen_op_POWER_sllq();
4175 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4176 if (unlikely(Rc(ctx->opcode) != 0))
4177 gen_set_Rc0(ctx);
4178 }
4179
4180 /* slq - slq. */
4181 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR)
4182 {
4183 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4184 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4185 gen_op_POWER_slq();
4186 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4187 if (unlikely(Rc(ctx->opcode) != 0))
4188 gen_set_Rc0(ctx);
4189 }
4190
4191 /* sraiq - sraiq. */
4192 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR)
4193 {
4194 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4195 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4196 gen_op_POWER_sraq();
4197 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4198 if (unlikely(Rc(ctx->opcode) != 0))
4199 gen_set_Rc0(ctx);
4200 }
4201
4202 /* sraq - sraq. */
4203 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR)
4204 {
4205 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4206 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4207 gen_op_POWER_sraq();
4208 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4209 if (unlikely(Rc(ctx->opcode) != 0))
4210 gen_set_Rc0(ctx);
4211 }
4212
4213 /* sre - sre. */
4214 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR)
4215 {
4216 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4217 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4218 gen_op_POWER_sre();
4219 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4220 if (unlikely(Rc(ctx->opcode) != 0))
4221 gen_set_Rc0(ctx);
4222 }
4223
4224 /* srea - srea. */
4225 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR)
4226 {
4227 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4228 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4229 gen_op_POWER_srea();
4230 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4231 if (unlikely(Rc(ctx->opcode) != 0))
4232 gen_set_Rc0(ctx);
4233 }
4234
4235 /* sreq */
4236 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR)
4237 {
4238 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4239 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4240 gen_op_POWER_sreq();
4241 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4242 if (unlikely(Rc(ctx->opcode) != 0))
4243 gen_set_Rc0(ctx);
4244 }
4245
4246 /* sriq */
4247 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR)
4248 {
4249 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4250 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4251 gen_op_POWER_srq();
4252 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4253 if (unlikely(Rc(ctx->opcode) != 0))
4254 gen_set_Rc0(ctx);
4255 }
4256
4257 /* srliq */
4258 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR)
4259 {
4260 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4261 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4262 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4263 gen_op_POWER_srlq();
4264 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4265 if (unlikely(Rc(ctx->opcode) != 0))
4266 gen_set_Rc0(ctx);
4267 }
4268
4269 /* srlq */
4270 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR)
4271 {
4272 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4273 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4274 gen_op_POWER_srlq();
4275 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4276 if (unlikely(Rc(ctx->opcode) != 0))
4277 gen_set_Rc0(ctx);
4278 }
4279
4280 /* srq */
4281 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR)
4282 {
4283 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4284 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4285 gen_op_POWER_srq();
4286 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4287 if (unlikely(Rc(ctx->opcode) != 0))
4288 gen_set_Rc0(ctx);
4289 }
4290
4291 /* PowerPC 602 specific instructions */
4292 /* dsa */
4293 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC)
4294 {
4295 /* XXX: TODO */
4296 GEN_EXCP_INVAL(ctx);
4297 }
4298
4299 /* esa */
4300 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC)
4301 {
4302 /* XXX: TODO */
4303 GEN_EXCP_INVAL(ctx);
4304 }
4305
4306 /* mfrom */
4307 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC)
4308 {
4309 #if defined(CONFIG_USER_ONLY)
4310 GEN_EXCP_PRIVOPC(ctx);
4311 #else
4312 if (unlikely(!ctx->supervisor)) {
4313 GEN_EXCP_PRIVOPC(ctx);
4314 return;
4315 }
4316 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4317 gen_op_602_mfrom();
4318 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4319 #endif
4320 }
4321
4322 /* 602 - 603 - G2 TLB management */
4323 /* tlbld */
4324 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB)
4325 {
4326 #if defined(CONFIG_USER_ONLY)
4327 GEN_EXCP_PRIVOPC(ctx);
4328 #else
4329 if (unlikely(!ctx->supervisor)) {
4330 GEN_EXCP_PRIVOPC(ctx);
4331 return;
4332 }
4333 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4334 gen_op_6xx_tlbld();
4335 #endif
4336 }
4337
4338 /* tlbli */
4339 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB)
4340 {
4341 #if defined(CONFIG_USER_ONLY)
4342 GEN_EXCP_PRIVOPC(ctx);
4343 #else
4344 if (unlikely(!ctx->supervisor)) {
4345 GEN_EXCP_PRIVOPC(ctx);
4346 return;
4347 }
4348 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4349 gen_op_6xx_tlbli();
4350 #endif
4351 }
4352
4353 /* 74xx TLB management */
4354 /* tlbld */
4355 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB)
4356 {
4357 #if defined(CONFIG_USER_ONLY)
4358 GEN_EXCP_PRIVOPC(ctx);
4359 #else
4360 if (unlikely(!ctx->supervisor)) {
4361 GEN_EXCP_PRIVOPC(ctx);
4362 return;
4363 }
4364 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4365 gen_op_74xx_tlbld();
4366 #endif
4367 }
4368
4369 /* tlbli */
4370 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB)
4371 {
4372 #if defined(CONFIG_USER_ONLY)
4373 GEN_EXCP_PRIVOPC(ctx);
4374 #else
4375 if (unlikely(!ctx->supervisor)) {
4376 GEN_EXCP_PRIVOPC(ctx);
4377 return;
4378 }
4379 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4380 gen_op_74xx_tlbli();
4381 #endif
4382 }
4383
4384 /* POWER instructions not in PowerPC 601 */
4385 /* clf */
4386 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER)
4387 {
4388 /* Cache line flush: implemented as no-op */
4389 }
4390
4391 /* cli */
4392 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER)
4393 {
4394 /* Cache line invalidate: privileged and treated as no-op */
4395 #if defined(CONFIG_USER_ONLY)
4396 GEN_EXCP_PRIVOPC(ctx);
4397 #else
4398 if (unlikely(!ctx->supervisor)) {
4399 GEN_EXCP_PRIVOPC(ctx);
4400 return;
4401 }
4402 #endif
4403 }
4404
4405 /* dclst */
4406 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER)
4407 {
4408 /* Data cache line store: treated as no-op */
4409 }
4410
4411 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER)
4412 {
4413 #if defined(CONFIG_USER_ONLY)
4414 GEN_EXCP_PRIVOPC(ctx);
4415 #else
4416 if (unlikely(!ctx->supervisor)) {
4417 GEN_EXCP_PRIVOPC(ctx);
4418 return;
4419 }
4420 int ra = rA(ctx->opcode);
4421 int rd = rD(ctx->opcode);
4422
4423 gen_addr_reg_index(ctx);
4424 gen_op_POWER_mfsri();
4425 tcg_gen_mov_tl(cpu_gpr[rd], cpu_T[0]);
4426 if (ra != 0 && ra != rd)
4427 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[1]);
4428 #endif
4429 }
4430
4431 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER)
4432 {
4433 #if defined(CONFIG_USER_ONLY)
4434 GEN_EXCP_PRIVOPC(ctx);
4435 #else
4436 if (unlikely(!ctx->supervisor)) {
4437 GEN_EXCP_PRIVOPC(ctx);
4438 return;
4439 }
4440 gen_addr_reg_index(ctx);
4441 gen_op_POWER_rac();
4442 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4443 #endif
4444 }
4445
4446 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER)
4447 {
4448 #if defined(CONFIG_USER_ONLY)
4449 GEN_EXCP_PRIVOPC(ctx);
4450 #else
4451 if (unlikely(!ctx->supervisor)) {
4452 GEN_EXCP_PRIVOPC(ctx);
4453 return;
4454 }
4455 gen_op_POWER_rfsvc();
4456 GEN_SYNC(ctx);
4457 #endif
4458 }
4459
4460 /* svc is not implemented for now */
4461
4462 /* POWER2 specific instructions */
4463 /* Quad manipulation (load/store two floats at a time) */
4464 /* Original POWER2 is 32 bits only, define 64 bits ops as 32 bits ones */
4465 #define op_POWER2_lfq() (*gen_op_POWER2_lfq[ctx->mem_idx])()
4466 #define op_POWER2_stfq() (*gen_op_POWER2_stfq[ctx->mem_idx])()
4467 #define gen_op_POWER2_lfq_64_raw gen_op_POWER2_lfq_raw
4468 #define gen_op_POWER2_lfq_64_user gen_op_POWER2_lfq_user
4469 #define gen_op_POWER2_lfq_64_kernel gen_op_POWER2_lfq_kernel
4470 #define gen_op_POWER2_lfq_64_hypv gen_op_POWER2_lfq_hypv
4471 #define gen_op_POWER2_lfq_le_64_raw gen_op_POWER2_lfq_le_raw
4472 #define gen_op_POWER2_lfq_le_64_user gen_op_POWER2_lfq_le_user
4473 #define gen_op_POWER2_lfq_le_64_kernel gen_op_POWER2_lfq_le_kernel
4474 #define gen_op_POWER2_lfq_le_64_hypv gen_op_POWER2_lfq_le_hypv
4475 #define gen_op_POWER2_stfq_64_raw gen_op_POWER2_stfq_raw
4476 #define gen_op_POWER2_stfq_64_user gen_op_POWER2_stfq_user
4477 #define gen_op_POWER2_stfq_64_kernel gen_op_POWER2_stfq_kernel
4478 #define gen_op_POWER2_stfq_64_hypv gen_op_POWER2_stfq_hypv
4479 #define gen_op_POWER2_stfq_le_64_raw gen_op_POWER2_stfq_le_raw
4480 #define gen_op_POWER2_stfq_le_64_user gen_op_POWER2_stfq_le_user
4481 #define gen_op_POWER2_stfq_le_64_kernel gen_op_POWER2_stfq_le_kernel
4482 #define gen_op_POWER2_stfq_le_64_hypv gen_op_POWER2_stfq_le_hypv
4483 static GenOpFunc *gen_op_POWER2_lfq[NB_MEM_FUNCS] = {
4484 GEN_MEM_FUNCS(POWER2_lfq),
4485 };
4486 static GenOpFunc *gen_op_POWER2_stfq[NB_MEM_FUNCS] = {
4487 GEN_MEM_FUNCS(POWER2_stfq),
4488 };
4489
4490 /* lfq */
4491 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4492 {
4493 /* NIP cannot be restored if the memory exception comes from an helper */
4494 gen_update_nip(ctx, ctx->nip - 4);
4495 gen_addr_imm_index(ctx, 0);
4496 op_POWER2_lfq();
4497 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4498 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4499 }
4500
4501 /* lfqu */
4502 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4503 {
4504 int ra = rA(ctx->opcode);
4505
4506 /* NIP cannot be restored if the memory exception comes from an helper */
4507 gen_update_nip(ctx, ctx->nip - 4);
4508 gen_addr_imm_index(ctx, 0);
4509 op_POWER2_lfq();
4510 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4511 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4512 if (ra != 0)
4513 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4514 }
4515
4516 /* lfqux */
4517 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2)
4518 {
4519 int ra = rA(ctx->opcode);
4520
4521 /* NIP cannot be restored if the memory exception comes from an helper */
4522 gen_update_nip(ctx, ctx->nip - 4);
4523 gen_addr_reg_index(ctx);
4524 op_POWER2_lfq();
4525 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4526 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4527 if (ra != 0)
4528 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4529 }
4530
4531 /* lfqx */
4532 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2)
4533 {
4534 /* NIP cannot be restored if the memory exception comes from an helper */
4535 gen_update_nip(ctx, ctx->nip - 4);
4536 gen_addr_reg_index(ctx);
4537 op_POWER2_lfq();
4538 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4539 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4540 }
4541
4542 /* stfq */
4543 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4544 {
4545 /* NIP cannot be restored if the memory exception comes from an helper */
4546 gen_update_nip(ctx, ctx->nip - 4);
4547 gen_addr_imm_index(ctx, 0);
4548 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4549 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4550 op_POWER2_stfq();
4551 }
4552
4553 /* stfqu */
4554 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4555 {
4556 int ra = rA(ctx->opcode);
4557
4558 /* NIP cannot be restored if the memory exception comes from an helper */
4559 gen_update_nip(ctx, ctx->nip - 4);
4560 gen_addr_imm_index(ctx, 0);
4561 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4562 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4563 op_POWER2_stfq();
4564 if (ra != 0)
4565 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4566 }
4567
4568 /* stfqux */
4569 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2)
4570 {
4571 int ra = rA(ctx->opcode);
4572
4573 /* NIP cannot be restored if the memory exception comes from an helper */
4574 gen_update_nip(ctx, ctx->nip - 4);
4575 gen_addr_reg_index(ctx);
4576 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4577 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4578 op_POWER2_stfq();
4579 if (ra != 0)
4580 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4581 }
4582
4583 /* stfqx */
4584 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2)
4585 {
4586 /* NIP cannot be restored if the memory exception comes from an helper */
4587 gen_update_nip(ctx, ctx->nip - 4);
4588 gen_addr_reg_index(ctx);
4589 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4590 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4591 op_POWER2_stfq();
4592 }
4593
4594 /* BookE specific instructions */
4595 /* XXX: not implemented on 440 ? */
4596 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI)
4597 {
4598 /* XXX: TODO */
4599 GEN_EXCP_INVAL(ctx);
4600 }
4601
4602 /* XXX: not implemented on 440 ? */
4603 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA)
4604 {
4605 #if defined(CONFIG_USER_ONLY)
4606 GEN_EXCP_PRIVOPC(ctx);
4607 #else
4608 if (unlikely(!ctx->supervisor)) {
4609 GEN_EXCP_PRIVOPC(ctx);
4610 return;
4611 }
4612 gen_addr_reg_index(ctx);
4613 /* Use the same micro-ops as for tlbie */
4614 #if defined(TARGET_PPC64)
4615 if (ctx->sf_mode)
4616 gen_op_tlbie_64();
4617 else
4618 #endif
4619 gen_op_tlbie();
4620 #endif
4621 }
4622
4623 /* All 405 MAC instructions are translated here */
4624 static always_inline void gen_405_mulladd_insn (DisasContext *ctx,
4625 int opc2, int opc3,
4626 int ra, int rb, int rt, int Rc)
4627 {
4628 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[ra]);
4629 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rb]);
4630 switch (opc3 & 0x0D) {
4631 case 0x05:
4632 /* macchw - macchw. - macchwo - macchwo. */
4633 /* macchws - macchws. - macchwso - macchwso. */
4634 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
4635 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
4636 /* mulchw - mulchw. */
4637 gen_op_405_mulchw();
4638 break;
4639 case 0x04:
4640 /* macchwu - macchwu. - macchwuo - macchwuo. */
4641 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
4642 /* mulchwu - mulchwu. */
4643 gen_op_405_mulchwu();
4644 break;
4645 case 0x01:
4646 /* machhw - machhw. - machhwo - machhwo. */
4647 /* machhws - machhws. - machhwso - machhwso. */
4648 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
4649 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
4650 /* mulhhw - mulhhw. */
4651 gen_op_405_mulhhw();
4652 break;
4653 case 0x00:
4654 /* machhwu - machhwu. - machhwuo - machhwuo. */
4655 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
4656 /* mulhhwu - mulhhwu. */
4657 gen_op_405_mulhhwu();
4658 break;
4659 case 0x0D:
4660 /* maclhw - maclhw. - maclhwo - maclhwo. */
4661 /* maclhws - maclhws. - maclhwso - maclhwso. */
4662 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
4663 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
4664 /* mullhw - mullhw. */
4665 gen_op_405_mullhw();
4666 break;
4667 case 0x0C:
4668 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
4669 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
4670 /* mullhwu - mullhwu. */
4671 gen_op_405_mullhwu();
4672 break;
4673 }
4674 if (opc2 & 0x02) {
4675 /* nmultiply-and-accumulate (0x0E) */
4676 gen_op_neg();
4677 }
4678 if (opc2 & 0x04) {
4679 /* (n)multiply-and-accumulate (0x0C - 0x0E) */
4680 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rt]);
4681 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4682 gen_op_405_add_T0_T2();
4683 }
4684 if (opc3 & 0x10) {
4685 /* Check overflow */
4686 if (opc3 & 0x01)
4687 gen_op_check_addo();
4688 else
4689 gen_op_405_check_ovu();
4690 }
4691 if (opc3 & 0x02) {
4692 /* Saturate */
4693 if (opc3 & 0x01)
4694 gen_op_405_check_sat();
4695 else
4696 gen_op_405_check_satu();
4697 }
4698 tcg_gen_mov_tl(cpu_gpr[rt], cpu_T[0]);
4699 if (unlikely(Rc) != 0) {
4700 /* Update Rc0 */
4701 gen_set_Rc0(ctx);
4702 }
4703 }
4704
4705 #define GEN_MAC_HANDLER(name, opc2, opc3) \
4706 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) \
4707 { \
4708 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
4709 rD(ctx->opcode), Rc(ctx->opcode)); \
4710 }
4711
4712 /* macchw - macchw. */
4713 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
4714 /* macchwo - macchwo. */
4715 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
4716 /* macchws - macchws. */
4717 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
4718 /* macchwso - macchwso. */
4719 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
4720 /* macchwsu - macchwsu. */
4721 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
4722 /* macchwsuo - macchwsuo. */
4723 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
4724 /* macchwu - macchwu. */
4725 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
4726 /* macchwuo - macchwuo. */
4727 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
4728 /* machhw - machhw. */
4729 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
4730 /* machhwo - machhwo. */
4731 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
4732 /* machhws - machhws. */
4733 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
4734 /* machhwso - machhwso. */
4735 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
4736 /* machhwsu - machhwsu. */
4737 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
4738 /* machhwsuo - machhwsuo. */
4739 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
4740 /* machhwu - machhwu. */
4741 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
4742 /* machhwuo - machhwuo. */
4743 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
4744 /* maclhw - maclhw. */
4745 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
4746 /* maclhwo - maclhwo. */
4747 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
4748 /* maclhws - maclhws. */
4749 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
4750 /* maclhwso - maclhwso. */
4751 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
4752 /* maclhwu - maclhwu. */
4753 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
4754 /* maclhwuo - maclhwuo. */
4755 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
4756 /* maclhwsu - maclhwsu. */
4757 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
4758 /* maclhwsuo - maclhwsuo. */
4759 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
4760 /* nmacchw - nmacchw. */
4761 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
4762 /* nmacchwo - nmacchwo. */
4763 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
4764 /* nmacchws - nmacchws. */
4765 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
4766 /* nmacchwso - nmacchwso. */
4767 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
4768 /* nmachhw - nmachhw. */
4769 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
4770 /* nmachhwo - nmachhwo. */
4771 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
4772 /* nmachhws - nmachhws. */
4773 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
4774 /* nmachhwso - nmachhwso. */
4775 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
4776 /* nmaclhw - nmaclhw. */
4777 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
4778 /* nmaclhwo - nmaclhwo. */
4779 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
4780 /* nmaclhws - nmaclhws. */
4781 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
4782 /* nmaclhwso - nmaclhwso. */
4783 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
4784
4785 /* mulchw - mulchw. */
4786 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
4787 /* mulchwu - mulchwu. */
4788 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
4789 /* mulhhw - mulhhw. */
4790 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
4791 /* mulhhwu - mulhhwu. */
4792 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
4793 /* mullhw - mullhw. */
4794 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
4795 /* mullhwu - mullhwu. */
4796 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
4797
4798 /* mfdcr */
4799 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR)
4800 {
4801 #if defined(CONFIG_USER_ONLY)
4802 GEN_EXCP_PRIVREG(ctx);
4803 #else
4804 uint32_t dcrn = SPR(ctx->opcode);
4805
4806 if (unlikely(!ctx->supervisor)) {
4807 GEN_EXCP_PRIVREG(ctx);
4808 return;
4809 }
4810 tcg_gen_movi_tl(cpu_T[0], dcrn);
4811 gen_op_load_dcr();
4812 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4813 #endif
4814 }
4815
4816 /* mtdcr */
4817 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR)
4818 {
4819 #if defined(CONFIG_USER_ONLY)
4820 GEN_EXCP_PRIVREG(ctx);
4821 #else
4822 uint32_t dcrn = SPR(ctx->opcode);
4823
4824 if (unlikely(!ctx->supervisor)) {
4825 GEN_EXCP_PRIVREG(ctx);
4826 return;
4827 }
4828 tcg_gen_movi_tl(cpu_T[0], dcrn);
4829 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4830 gen_op_store_dcr();
4831 #endif
4832 }
4833
4834 /* mfdcrx */
4835 /* XXX: not implemented on 440 ? */
4836 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX)
4837 {
4838 #if defined(CONFIG_USER_ONLY)
4839 GEN_EXCP_PRIVREG(ctx);
4840 #else
4841 if (unlikely(!ctx->supervisor)) {
4842 GEN_EXCP_PRIVREG(ctx);
4843 return;
4844 }
4845 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4846 gen_op_load_dcr();
4847 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4848 /* Note: Rc update flag set leads to undefined state of Rc0 */
4849 #endif
4850 }
4851
4852 /* mtdcrx */
4853 /* XXX: not implemented on 440 ? */
4854 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX)
4855 {
4856 #if defined(CONFIG_USER_ONLY)
4857 GEN_EXCP_PRIVREG(ctx);
4858 #else
4859 if (unlikely(!ctx->supervisor)) {
4860 GEN_EXCP_PRIVREG(ctx);
4861 return;
4862 }
4863 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4864 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4865 gen_op_store_dcr();
4866 /* Note: Rc update flag set leads to undefined state of Rc0 */
4867 #endif
4868 }
4869
4870 /* mfdcrux (PPC 460) : user-mode access to DCR */
4871 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX)
4872 {
4873 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4874 gen_op_load_dcr();
4875 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4876 /* Note: Rc update flag set leads to undefined state of Rc0 */
4877 }
4878
4879 /* mtdcrux (PPC 460) : user-mode access to DCR */
4880 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX)
4881 {
4882 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4883 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4884 gen_op_store_dcr();
4885 /* Note: Rc update flag set leads to undefined state of Rc0 */
4886 }
4887
4888 /* dccci */
4889 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON)
4890 {
4891 #if defined(CONFIG_USER_ONLY)
4892 GEN_EXCP_PRIVOPC(ctx);
4893 #else
4894 if (unlikely(!ctx->supervisor)) {
4895 GEN_EXCP_PRIVOPC(ctx);
4896 return;
4897 }
4898 /* interpreted as no-op */
4899 #endif
4900 }
4901
4902 /* dcread */
4903 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON)
4904 {
4905 #if defined(CONFIG_USER_ONLY)
4906 GEN_EXCP_PRIVOPC(ctx);
4907 #else
4908 if (unlikely(!ctx->supervisor)) {
4909 GEN_EXCP_PRIVOPC(ctx);
4910 return;
4911 }
4912 gen_addr_reg_index(ctx);
4913 op_ldst(lwz);
4914 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4915 #endif
4916 }
4917
4918 /* icbt */
4919 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT)
4920 {
4921 /* interpreted as no-op */
4922 /* XXX: specification say this is treated as a load by the MMU
4923 * but does not generate any exception
4924 */
4925 }
4926
4927 /* iccci */
4928 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON)
4929 {
4930 #if defined(CONFIG_USER_ONLY)
4931 GEN_EXCP_PRIVOPC(ctx);
4932 #else
4933 if (unlikely(!ctx->supervisor)) {
4934 GEN_EXCP_PRIVOPC(ctx);
4935 return;
4936 }
4937 /* interpreted as no-op */
4938 #endif
4939 }
4940
4941 /* icread */
4942 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON)
4943 {
4944 #if defined(CONFIG_USER_ONLY)
4945 GEN_EXCP_PRIVOPC(ctx);
4946 #else
4947 if (unlikely(!ctx->supervisor)) {
4948 GEN_EXCP_PRIVOPC(ctx);
4949 return;
4950 }
4951 /* interpreted as no-op */
4952 #endif
4953 }
4954
4955 /* rfci (supervisor only) */
4956 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP)
4957 {
4958 #if defined(CONFIG_USER_ONLY)
4959 GEN_EXCP_PRIVOPC(ctx);
4960 #else
4961 if (unlikely(!ctx->supervisor)) {
4962 GEN_EXCP_PRIVOPC(ctx);
4963 return;
4964 }
4965 /* Restore CPU state */
4966 gen_op_40x_rfci();
4967 GEN_SYNC(ctx);
4968 #endif
4969 }
4970
4971 GEN_HANDLER(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE)
4972 {
4973 #if defined(CONFIG_USER_ONLY)
4974 GEN_EXCP_PRIVOPC(ctx);
4975 #else
4976 if (unlikely(!ctx->supervisor)) {
4977 GEN_EXCP_PRIVOPC(ctx);
4978 return;
4979 }
4980 /* Restore CPU state */
4981 gen_op_rfci();
4982 GEN_SYNC(ctx);
4983 #endif
4984 }
4985
4986 /* BookE specific */
4987 /* XXX: not implemented on 440 ? */
4988 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI)
4989 {
4990 #if defined(CONFIG_USER_ONLY)
4991 GEN_EXCP_PRIVOPC(ctx);
4992 #else
4993 if (unlikely(!ctx->supervisor)) {
4994 GEN_EXCP_PRIVOPC(ctx);
4995 return;
4996 }
4997 /* Restore CPU state */
4998 gen_op_rfdi();
4999 GEN_SYNC(ctx);
5000 #endif
5001 }
5002
5003 /* XXX: not implemented on 440 ? */
5004 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI)
5005 {
5006 #if defined(CONFIG_USER_ONLY)
5007 GEN_EXCP_PRIVOPC(ctx);
5008 #else
5009 if (unlikely(!ctx->supervisor)) {
5010 GEN_EXCP_PRIVOPC(ctx);
5011 return;
5012 }
5013 /* Restore CPU state */
5014 gen_op_rfmci();
5015 GEN_SYNC(ctx);
5016 #endif
5017 }
5018
5019 /* TLB management - PowerPC 405 implementation */
5020 /* tlbre */
5021 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB)
5022 {
5023 #if defined(CONFIG_USER_ONLY)
5024 GEN_EXCP_PRIVOPC(ctx);
5025 #else
5026 if (unlikely(!ctx->supervisor)) {
5027 GEN_EXCP_PRIVOPC(ctx);
5028 return;
5029 }
5030 switch (rB(ctx->opcode)) {
5031 case 0:
5032 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5033 gen_op_4xx_tlbre_hi();
5034 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5035 break;
5036 case 1:
5037 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5038 gen_op_4xx_tlbre_lo();
5039 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5040 break;
5041 default:
5042 GEN_EXCP_INVAL(ctx);
5043 break;
5044 }
5045 #endif
5046 }
5047
5048 /* tlbsx - tlbsx. */
5049 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB)
5050 {
5051 #if defined(CONFIG_USER_ONLY)
5052 GEN_EXCP_PRIVOPC(ctx);
5053 #else
5054 if (unlikely(!ctx->supervisor)) {
5055 GEN_EXCP_PRIVOPC(ctx);
5056 return;
5057 }
5058 gen_addr_reg_index(ctx);
5059 gen_op_4xx_tlbsx();
5060 if (Rc(ctx->opcode))
5061 gen_op_4xx_tlbsx_check();
5062 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5063 #endif
5064 }
5065
5066 /* tlbwe */
5067 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB)
5068 {
5069 #if defined(CONFIG_USER_ONLY)
5070 GEN_EXCP_PRIVOPC(ctx);
5071 #else
5072 if (unlikely(!ctx->supervisor)) {
5073 GEN_EXCP_PRIVOPC(ctx);
5074 return;
5075 }
5076 switch (rB(ctx->opcode)) {
5077 case 0:
5078 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5079 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5080 gen_op_4xx_tlbwe_hi();
5081 break;
5082 case 1:
5083 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5084 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5085 gen_op_4xx_tlbwe_lo();
5086 break;
5087 default:
5088 GEN_EXCP_INVAL(ctx);
5089 break;
5090 }
5091 #endif
5092 }
5093
5094 /* TLB management - PowerPC 440 implementation */
5095 /* tlbre */
5096 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE)
5097 {
5098 #if defined(CONFIG_USER_ONLY)
5099 GEN_EXCP_PRIVOPC(ctx);
5100 #else
5101 if (unlikely(!ctx->supervisor)) {
5102 GEN_EXCP_PRIVOPC(ctx);
5103 return;
5104 }
5105 switch (rB(ctx->opcode)) {
5106 case 0:
5107 case 1:
5108 case 2:
5109 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5110 gen_op_440_tlbre(rB(ctx->opcode));
5111 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5112 break;
5113 default:
5114 GEN_EXCP_INVAL(ctx);
5115 break;
5116 }
5117 #endif
5118 }
5119
5120 /* tlbsx - tlbsx. */
5121 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE)
5122 {
5123 #if defined(CONFIG_USER_ONLY)
5124 GEN_EXCP_PRIVOPC(ctx);
5125 #else
5126 if (unlikely(!ctx->supervisor)) {
5127 GEN_EXCP_PRIVOPC(ctx);
5128 return;
5129 }
5130 gen_addr_reg_index(ctx);
5131 gen_op_440_tlbsx();
5132 if (Rc(ctx->opcode))
5133 gen_op_4xx_tlbsx_check();
5134 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5135 #endif
5136 }
5137
5138 /* tlbwe */
5139 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE)
5140 {
5141 #if defined(CONFIG_USER_ONLY)
5142 GEN_EXCP_PRIVOPC(ctx);
5143 #else
5144 if (unlikely(!ctx->supervisor)) {
5145 GEN_EXCP_PRIVOPC(ctx);
5146 return;
5147 }
5148 switch (rB(ctx->opcode)) {
5149 case 0:
5150 case 1:
5151 case 2:
5152 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5153 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5154 gen_op_440_tlbwe(rB(ctx->opcode));
5155 break;
5156 default:
5157 GEN_EXCP_INVAL(ctx);
5158 break;
5159 }
5160 #endif
5161 }
5162
5163 /* wrtee */
5164 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE)
5165 {
5166 #if defined(CONFIG_USER_ONLY)
5167 GEN_EXCP_PRIVOPC(ctx);
5168 #else
5169 if (unlikely(!ctx->supervisor)) {
5170 GEN_EXCP_PRIVOPC(ctx);
5171 return;
5172 }
5173 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rD(ctx->opcode)]);
5174 gen_op_wrte();
5175 /* Stop translation to have a chance to raise an exception
5176 * if we just set msr_ee to 1
5177 */
5178 GEN_STOP(ctx);
5179 #endif
5180 }
5181
5182 /* wrteei */
5183 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000EFC01, PPC_WRTEE)
5184 {
5185 #if defined(CONFIG_USER_ONLY)
5186 GEN_EXCP_PRIVOPC(ctx);
5187 #else
5188 if (unlikely(!ctx->supervisor)) {
5189 GEN_EXCP_PRIVOPC(ctx);
5190 return;
5191 }
5192 tcg_gen_movi_tl(cpu_T[0], ctx->opcode & 0x00010000);
5193 gen_op_wrte();
5194 /* Stop translation to have a chance to raise an exception
5195 * if we just set msr_ee to 1
5196 */
5197 GEN_STOP(ctx);
5198 #endif
5199 }
5200
5201 /* PowerPC 440 specific instructions */
5202 /* dlmzb */
5203 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC)
5204 {
5205 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
5206 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
5207 gen_op_440_dlmzb();
5208 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
5209 gen_op_store_xer_bc();
5210 if (Rc(ctx->opcode)) {
5211 gen_op_440_dlmzb_update_Rc();
5212 tcg_gen_andi_i32(cpu_crf[0], cpu_T[0], 0xf);
5213 }
5214 }
5215
5216 /* mbar replaces eieio on 440 */
5217 GEN_HANDLER(mbar, 0x1F, 0x16, 0x13, 0x001FF801, PPC_BOOKE)
5218 {
5219 /* interpreted as no-op */
5220 }
5221
5222 /* msync replaces sync on 440 */
5223 GEN_HANDLER(msync, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE)
5224 {
5225 /* interpreted as no-op */
5226 }
5227
5228 /* icbt */
5229 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, PPC_BOOKE)
5230 {
5231 /* interpreted as no-op */
5232 /* XXX: specification say this is treated as a load by the MMU
5233 * but does not generate any exception
5234 */
5235 }
5236
5237 /*** Altivec vector extension ***/
5238 /* Altivec registers moves */
5239
5240 static always_inline void gen_load_avr(int t, int reg) {
5241 tcg_gen_mov_i64(cpu_AVRh[t], cpu_avrh[reg]);
5242 tcg_gen_mov_i64(cpu_AVRl[t], cpu_avrl[reg]);
5243 }
5244
5245 static always_inline void gen_store_avr(int reg, int t) {
5246 tcg_gen_mov_i64(cpu_avrh[reg], cpu_AVRh[t]);
5247 tcg_gen_mov_i64(cpu_avrl[reg], cpu_AVRl[t]);
5248 }
5249
5250 #define op_vr_ldst(name) (*gen_op_##name[ctx->mem_idx])()
5251 #define OP_VR_LD_TABLE(name) \
5252 static GenOpFunc *gen_op_vr_l##name[NB_MEM_FUNCS] = { \
5253 GEN_MEM_FUNCS(vr_l##name), \
5254 };
5255 #define OP_VR_ST_TABLE(name) \
5256 static GenOpFunc *gen_op_vr_st##name[NB_MEM_FUNCS] = { \
5257 GEN_MEM_FUNCS(vr_st##name), \
5258 };
5259
5260 #define GEN_VR_LDX(name, opc2, opc3) \
5261 GEN_HANDLER(l##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
5262 { \
5263 if (unlikely(!ctx->altivec_enabled)) { \
5264 GEN_EXCP_NO_VR(ctx); \
5265 return; \
5266 } \
5267 gen_addr_reg_index(ctx); \
5268 op_vr_ldst(vr_l##name); \
5269 gen_store_avr(rD(ctx->opcode), 0); \
5270 }
5271
5272 #define GEN_VR_STX(name, opc2, opc3) \
5273 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
5274 { \
5275 if (unlikely(!ctx->altivec_enabled)) { \
5276 GEN_EXCP_NO_VR(ctx); \
5277 return; \
5278 } \
5279 gen_addr_reg_index(ctx); \
5280 gen_load_avr(0, rS(ctx->opcode)); \
5281 op_vr_ldst(vr_st##name); \
5282 }
5283
5284 OP_VR_LD_TABLE(vx);
5285 GEN_VR_LDX(vx, 0x07, 0x03);
5286 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
5287 #define gen_op_vr_lvxl gen_op_vr_lvx
5288 GEN_VR_LDX(vxl, 0x07, 0x0B);
5289
5290 OP_VR_ST_TABLE(vx);
5291 GEN_VR_STX(vx, 0x07, 0x07);
5292 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
5293 #define gen_op_vr_stvxl gen_op_vr_stvx
5294 GEN_VR_STX(vxl, 0x07, 0x0F);
5295
5296 /*** SPE extension ***/
5297 /* Register moves */
5298
5299 static always_inline void gen_load_gpr64(TCGv t, int reg) {
5300 #if defined(TARGET_PPC64)
5301 tcg_gen_mov_i64(t, cpu_gpr[reg]);
5302 #else
5303 tcg_gen_extu_i32_i64(t, cpu_gprh[reg]);
5304 tcg_gen_shli_i64(t, t, 32);
5305 TCGv tmp = tcg_temp_local_new(TCG_TYPE_I64);
5306 tcg_gen_extu_i32_i64(tmp, cpu_gpr[reg]);
5307 tcg_gen_or_i64(t, t, tmp);
5308 tcg_temp_free(tmp);
5309 #endif
5310 }
5311
5312 static always_inline void gen_store_gpr64(int reg, TCGv t) {
5313 #if defined(TARGET_PPC64)
5314 tcg_gen_mov_i64(cpu_gpr[reg], t);
5315 #else
5316 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
5317 TCGv tmp = tcg_temp_local_new(TCG_TYPE_I64);
5318 tcg_gen_shri_i64(tmp, t, 32);
5319 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
5320 tcg_temp_free(tmp);
5321 #endif
5322 }
5323
5324 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
5325 GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type) \
5326 { \
5327 if (Rc(ctx->opcode)) \
5328 gen_##name1(ctx); \
5329 else \
5330 gen_##name0(ctx); \
5331 }
5332
5333 /* Handler for undefined SPE opcodes */
5334 static always_inline void gen_speundef (DisasContext *ctx)
5335 {
5336 GEN_EXCP_INVAL(ctx);
5337 }
5338
5339 /* SPE load and stores */
5340 static always_inline void gen_addr_spe_imm_index (DisasContext *ctx, int sh)
5341 {
5342 target_long simm = rB(ctx->opcode);
5343
5344 if (rA(ctx->opcode) == 0) {
5345 tcg_gen_movi_tl(cpu_T[0], simm << sh);
5346 } else {
5347 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5348 if (likely(simm != 0))
5349 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm << sh);
5350 }
5351 }
5352
5353 #define op_spe_ldst(name) (*gen_op_##name[ctx->mem_idx])()
5354 #define OP_SPE_LD_TABLE(name) \
5355 static GenOpFunc *gen_op_spe_l##name[NB_MEM_FUNCS] = { \
5356 GEN_MEM_FUNCS(spe_l##name), \
5357 };
5358 #define OP_SPE_ST_TABLE(name) \
5359 static GenOpFunc *gen_op_spe_st##name[NB_MEM_FUNCS] = { \
5360 GEN_MEM_FUNCS(spe_st##name), \
5361 };
5362
5363 #define GEN_SPE_LD(name, sh) \
5364 static always_inline void gen_evl##name (DisasContext *ctx) \
5365 { \
5366 if (unlikely(!ctx->spe_enabled)) { \
5367 GEN_EXCP_NO_AP(ctx); \
5368 return; \
5369 } \
5370 gen_addr_spe_imm_index(ctx, sh); \
5371 op_spe_ldst(spe_l##name); \
5372 gen_store_gpr64(rD(ctx->opcode), cpu_T64[1]); \
5373 }
5374
5375 #define GEN_SPE_LDX(name) \
5376 static always_inline void gen_evl##name##x (DisasContext *ctx) \
5377 { \
5378 if (unlikely(!ctx->spe_enabled)) { \
5379 GEN_EXCP_NO_AP(ctx); \
5380 return; \
5381 } \
5382 gen_addr_reg_index(ctx); \
5383 op_spe_ldst(spe_l##name); \
5384 gen_store_gpr64(rD(ctx->opcode), cpu_T64[1]); \
5385 }
5386
5387 #define GEN_SPEOP_LD(name, sh) \
5388 OP_SPE_LD_TABLE(name); \
5389 GEN_SPE_LD(name, sh); \
5390 GEN_SPE_LDX(name)
5391
5392 #define GEN_SPE_ST(name, sh) \
5393 static always_inline void gen_evst##name (DisasContext *ctx) \
5394 { \
5395 if (unlikely(!ctx->spe_enabled)) { \
5396 GEN_EXCP_NO_AP(ctx); \
5397 return; \
5398 } \
5399 gen_addr_spe_imm_index(ctx, sh); \
5400 gen_load_gpr64(cpu_T64[1], rS(ctx->opcode)); \
5401 op_spe_ldst(spe_st##name); \
5402 }
5403
5404 #define GEN_SPE_STX(name) \
5405 static always_inline void gen_evst##name##x (DisasContext *ctx) \
5406 { \
5407 if (unlikely(!ctx->spe_enabled)) { \
5408 GEN_EXCP_NO_AP(ctx); \
5409 return; \
5410 } \
5411 gen_addr_reg_index(ctx); \
5412 gen_load_gpr64(cpu_T64[1], rS(ctx->opcode)); \
5413 op_spe_ldst(spe_st##name); \
5414 }
5415
5416 #define GEN_SPEOP_ST(name, sh) \
5417 OP_SPE_ST_TABLE(name); \
5418 GEN_SPE_ST(name, sh); \
5419 GEN_SPE_STX(name)
5420
5421 #define GEN_SPEOP_LDST(name, sh) \
5422 GEN_SPEOP_LD(name, sh); \
5423 GEN_SPEOP_ST(name, sh)
5424
5425 /* SPE arithmetic and logic */
5426 #define GEN_SPEOP_ARITH2(name) \
5427 static always_inline void gen_##name (DisasContext *ctx) \
5428 { \
5429 if (unlikely(!ctx->spe_enabled)) { \
5430 GEN_EXCP_NO_AP(ctx); \
5431 return; \
5432 } \
5433 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5434 gen_load_gpr64(cpu_T64[1], rB(ctx->opcode)); \
5435 gen_op_##name(); \
5436 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5437 }
5438
5439 #define GEN_SPEOP_ARITH1(name) \
5440 static always_inline void gen_##name (DisasContext *ctx) \
5441 { \
5442 if (unlikely(!ctx->spe_enabled)) { \
5443 GEN_EXCP_NO_AP(ctx); \
5444 return; \
5445 } \
5446 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5447 gen_op_##name(); \
5448 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5449 }
5450
5451 #define GEN_SPEOP_COMP(name) \
5452 static always_inline void gen_##name (DisasContext *ctx) \
5453 { \
5454 if (unlikely(!ctx->spe_enabled)) { \
5455 GEN_EXCP_NO_AP(ctx); \
5456 return; \
5457 } \
5458 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5459 gen_load_gpr64(cpu_T64[1], rB(ctx->opcode)); \
5460 gen_op_##name(); \
5461 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf); \
5462 }
5463
5464 /* Logical */
5465 GEN_SPEOP_ARITH2(evand);
5466 GEN_SPEOP_ARITH2(evandc);
5467 GEN_SPEOP_ARITH2(evxor);
5468 GEN_SPEOP_ARITH2(evor);
5469 GEN_SPEOP_ARITH2(evnor);
5470 GEN_SPEOP_ARITH2(eveqv);
5471 GEN_SPEOP_ARITH2(evorc);
5472 GEN_SPEOP_ARITH2(evnand);
5473 GEN_SPEOP_ARITH2(evsrwu);
5474 GEN_SPEOP_ARITH2(evsrws);
5475 GEN_SPEOP_ARITH2(evslw);
5476 GEN_SPEOP_ARITH2(evrlw);
5477 GEN_SPEOP_ARITH2(evmergehi);
5478 GEN_SPEOP_ARITH2(evmergelo);
5479 GEN_SPEOP_ARITH2(evmergehilo);
5480 GEN_SPEOP_ARITH2(evmergelohi);
5481
5482 /* Arithmetic */
5483 GEN_SPEOP_ARITH2(evaddw);
5484 GEN_SPEOP_ARITH2(evsubfw);
5485 GEN_SPEOP_ARITH1(evabs);
5486 GEN_SPEOP_ARITH1(evneg);
5487 GEN_SPEOP_ARITH1(evextsb);
5488 GEN_SPEOP_ARITH1(evextsh);
5489 GEN_SPEOP_ARITH1(evrndw);
5490 GEN_SPEOP_ARITH1(evcntlzw);
5491 GEN_SPEOP_ARITH1(evcntlsw);
5492 static always_inline void gen_brinc (DisasContext *ctx)
5493 {
5494 /* Note: brinc is usable even if SPE is disabled */
5495 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5496 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
5497 gen_op_brinc();
5498 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5499 }
5500
5501 #define GEN_SPEOP_ARITH_IMM2(name) \
5502 static always_inline void gen_##name##i (DisasContext *ctx) \
5503 { \
5504 if (unlikely(!ctx->spe_enabled)) { \
5505 GEN_EXCP_NO_AP(ctx); \
5506 return; \
5507 } \
5508 gen_load_gpr64(cpu_T64[0], rB(ctx->opcode)); \
5509 gen_op_splatwi_T1_64(rA(ctx->opcode)); \
5510 gen_op_##name(); \
5511 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5512 }
5513
5514 #define GEN_SPEOP_LOGIC_IMM2(name) \
5515 static always_inline void gen_##name##i (DisasContext *ctx) \
5516 { \
5517 if (unlikely(!ctx->spe_enabled)) { \
5518 GEN_EXCP_NO_AP(ctx); \
5519 return; \
5520 } \
5521 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5522 gen_op_splatwi_T1_64(rB(ctx->opcode)); \
5523 gen_op_##name(); \
5524 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5525 }
5526
5527 GEN_SPEOP_ARITH_IMM2(evaddw);
5528 #define gen_evaddiw gen_evaddwi
5529 GEN_SPEOP_ARITH_IMM2(evsubfw);
5530 #define gen_evsubifw gen_evsubfwi
5531 GEN_SPEOP_LOGIC_IMM2(evslw);
5532 GEN_SPEOP_LOGIC_IMM2(evsrwu);
5533 #define gen_evsrwis gen_evsrwsi
5534 GEN_SPEOP_LOGIC_IMM2(evsrws);
5535 #define gen_evsrwiu gen_evsrwui
5536 GEN_SPEOP_LOGIC_IMM2(evrlw);
5537
5538 static always_inline void gen_evsplati (DisasContext *ctx)
5539 {
5540 int32_t imm = (int32_t)(rA(ctx->opcode) << 27) >> 27;
5541
5542 gen_op_splatwi_T0_64(imm);
5543 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]);
5544 }
5545
5546 static always_inline void gen_evsplatfi (DisasContext *ctx)
5547 {
5548 uint32_t imm = rA(ctx->opcode) << 27;
5549
5550 gen_op_splatwi_T0_64(imm);
5551 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]);
5552 }
5553
5554 /* Comparison */
5555 GEN_SPEOP_COMP(evcmpgtu);
5556 GEN_SPEOP_COMP(evcmpgts);
5557 GEN_SPEOP_COMP(evcmpltu);
5558 GEN_SPEOP_COMP(evcmplts);
5559 GEN_SPEOP_COMP(evcmpeq);
5560
5561 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); ////
5562 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE);
5563 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); ////
5564 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE);
5565 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); ////
5566 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); ////
5567 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); ////
5568 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); //
5569 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); ////
5570 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); ////
5571 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); ////
5572 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); ////
5573 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); ////
5574 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); ////
5575 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); ////
5576 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE);
5577 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); ////
5578 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE);
5579 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); //
5580 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE);
5581 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); ////
5582 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); ////
5583 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
5584 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
5585 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
5586
5587 static always_inline void gen_evsel (DisasContext *ctx)
5588 {
5589 if (unlikely(!ctx->spe_enabled)) {
5590 GEN_EXCP_NO_AP(ctx);
5591 return;
5592 }
5593 tcg_gen_mov_i32(cpu_T[0], cpu_crf[ctx->opcode & 0x7]);
5594 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode));
5595 gen_load_gpr64(cpu_T64[1], rB(ctx->opcode));
5596 gen_op_evsel();
5597 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]);
5598 }
5599
5600 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE)
5601 {
5602 gen_evsel(ctx);
5603 }
5604 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE)
5605 {
5606 gen_evsel(ctx);
5607 }
5608 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE)
5609 {
5610 gen_evsel(ctx);
5611 }
5612 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE)
5613 {
5614 gen_evsel(ctx);
5615 }
5616
5617 /* Load and stores */
5618 #if defined(TARGET_PPC64)
5619 /* In that case, we already have 64 bits load & stores
5620 * so, spe_ldd is equivalent to ld and spe_std is equivalent to std
5621 */
5622 #define gen_op_spe_ldd_raw gen_op_ld_raw
5623 #define gen_op_spe_ldd_user gen_op_ld_user
5624 #define gen_op_spe_ldd_kernel gen_op_ld_kernel
5625 #define gen_op_spe_ldd_hypv gen_op_ld_hypv
5626 #define gen_op_spe_ldd_64_raw gen_op_ld_64_raw
5627 #define gen_op_spe_ldd_64_user gen_op_ld_64_user
5628 #define gen_op_spe_ldd_64_kernel gen_op_ld_64_kernel
5629 #define gen_op_spe_ldd_64_hypv gen_op_ld_64_hypv
5630 #define gen_op_spe_ldd_le_raw gen_op_ld_le_raw
5631 #define gen_op_spe_ldd_le_user gen_op_ld_le_user
5632 #define gen_op_spe_ldd_le_kernel gen_op_ld_le_kernel
5633 #define gen_op_spe_ldd_le_hypv gen_op_ld_le_hypv
5634 #define gen_op_spe_ldd_le_64_raw gen_op_ld_le_64_raw
5635 #define gen_op_spe_ldd_le_64_user gen_op_ld_le_64_user
5636 #define gen_op_spe_ldd_le_64_kernel gen_op_ld_le_64_kernel
5637 #define gen_op_spe_ldd_le_64_hypv gen_op_ld_le_64_hypv
5638 #define gen_op_spe_stdd_raw gen_op_std_raw
5639 #define gen_op_spe_stdd_user gen_op_std_user
5640 #define gen_op_spe_stdd_kernel gen_op_std_kernel
5641 #define gen_op_spe_stdd_hypv gen_op_std_hypv
5642 #define gen_op_spe_stdd_64_raw gen_op_std_64_raw
5643 #define gen_op_spe_stdd_64_user gen_op_std_64_user
5644 #define gen_op_spe_stdd_64_kernel gen_op_std_64_kernel
5645 #define gen_op_spe_stdd_64_hypv gen_op_std_64_hypv
5646 #define gen_op_spe_stdd_le_raw gen_op_std_le_raw
5647 #define gen_op_spe_stdd_le_user gen_op_std_le_user
5648 #define gen_op_spe_stdd_le_kernel gen_op_std_le_kernel
5649 #define gen_op_spe_stdd_le_hypv gen_op_std_le_hypv
5650 #define gen_op_spe_stdd_le_64_raw gen_op_std_le_64_raw
5651 #define gen_op_spe_stdd_le_64_user gen_op_std_le_64_user
5652 #define gen_op_spe_stdd_le_64_kernel gen_op_std_le_64_kernel
5653 #define gen_op_spe_stdd_le_64_hypv gen_op_std_le_64_hypv
5654 #endif /* defined(TARGET_PPC64) */
5655 GEN_SPEOP_LDST(dd, 3);
5656 GEN_SPEOP_LDST(dw, 3);
5657 GEN_SPEOP_LDST(dh, 3);
5658 GEN_SPEOP_LDST(whe, 2);
5659 GEN_SPEOP_LD(whou, 2);
5660 GEN_SPEOP_LD(whos, 2);
5661 GEN_SPEOP_ST(who, 2);
5662
5663 #if defined(TARGET_PPC64)
5664 /* In that case, spe_stwwo is equivalent to stw */
5665 #define gen_op_spe_stwwo_raw gen_op_stw_raw
5666 #define gen_op_spe_stwwo_user gen_op_stw_user
5667 #define gen_op_spe_stwwo_kernel gen_op_stw_kernel
5668 #define gen_op_spe_stwwo_hypv gen_op_stw_hypv
5669 #define gen_op_spe_stwwo_le_raw gen_op_stw_le_raw
5670 #define gen_op_spe_stwwo_le_user gen_op_stw_le_user
5671 #define gen_op_spe_stwwo_le_kernel gen_op_stw_le_kernel
5672 #define gen_op_spe_stwwo_le_hypv gen_op_stw_le_hypv
5673 #define gen_op_spe_stwwo_64_raw gen_op_stw_64_raw
5674 #define gen_op_spe_stwwo_64_user gen_op_stw_64_user
5675 #define gen_op_spe_stwwo_64_kernel gen_op_stw_64_kernel
5676 #define gen_op_spe_stwwo_64_hypv gen_op_stw_64_hypv
5677 #define gen_op_spe_stwwo_le_64_raw gen_op_stw_le_64_raw
5678 #define gen_op_spe_stwwo_le_64_user gen_op_stw_le_64_user
5679 #define gen_op_spe_stwwo_le_64_kernel gen_op_stw_le_64_kernel
5680 #define gen_op_spe_stwwo_le_64_hypv gen_op_stw_le_64_hypv
5681 #endif
5682 #define _GEN_OP_SPE_STWWE(suffix) \
5683 static always_inline void gen_op_spe_stwwe_##suffix (void) \
5684 { \
5685 gen_op_srli32_T1_64(); \
5686 gen_op_spe_stwwo_##suffix(); \
5687 }
5688 #define _GEN_OP_SPE_STWWE_LE(suffix) \
5689 static always_inline void gen_op_spe_stwwe_le_##suffix (void) \
5690 { \
5691 gen_op_srli32_T1_64(); \
5692 gen_op_spe_stwwo_le_##suffix(); \
5693 }
5694 #if defined(TARGET_PPC64)
5695 #define GEN_OP_SPE_STWWE(suffix) \
5696 _GEN_OP_SPE_STWWE(suffix); \
5697 _GEN_OP_SPE_STWWE_LE(suffix); \
5698 static always_inline void gen_op_spe_stwwe_64_##suffix (void) \
5699 { \
5700 gen_op_srli32_T1_64(); \
5701 gen_op_spe_stwwo_64_##suffix(); \
5702 } \
5703 static always_inline void gen_op_spe_stwwe_le_64_##suffix (void) \
5704 { \
5705 gen_op_srli32_T1_64(); \
5706 gen_op_spe_stwwo_le_64_##suffix(); \
5707 }
5708 #else
5709 #define GEN_OP_SPE_STWWE(suffix) \
5710 _GEN_OP_SPE_STWWE(suffix); \
5711 _GEN_OP_SPE_STWWE_LE(suffix)
5712 #endif
5713 #if defined(CONFIG_USER_ONLY)
5714 GEN_OP_SPE_STWWE(raw);
5715 #else /* defined(CONFIG_USER_ONLY) */
5716 GEN_OP_SPE_STWWE(user);
5717 GEN_OP_SPE_STWWE(kernel);
5718 GEN_OP_SPE_STWWE(hypv);
5719 #endif /* defined(CONFIG_USER_ONLY) */
5720 GEN_SPEOP_ST(wwe, 2);
5721 GEN_SPEOP_ST(wwo, 2);
5722
5723 #define GEN_SPE_LDSPLAT(name, op, suffix) \
5724 static always_inline void gen_op_spe_l##name##_##suffix (void) \
5725 { \
5726 gen_op_##op##_##suffix(); \
5727 gen_op_splatw_T1_64(); \
5728 }
5729
5730 #define GEN_OP_SPE_LHE(suffix) \
5731 static always_inline void gen_op_spe_lhe_##suffix (void) \
5732 { \
5733 gen_op_spe_lh_##suffix(); \
5734 gen_op_sli16_T1_64(); \
5735 }
5736
5737 #define GEN_OP_SPE_LHX(suffix) \
5738 static always_inline void gen_op_spe_lhx_##suffix (void) \
5739 { \
5740 gen_op_spe_lh_##suffix(); \
5741 gen_op_extsh_T1_64(); \
5742 }
5743
5744 #if defined(CONFIG_USER_ONLY)
5745 GEN_OP_SPE_LHE(raw);
5746 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, raw);
5747 GEN_OP_SPE_LHE(le_raw);
5748 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_raw);
5749 GEN_SPE_LDSPLAT(hhousplat, spe_lh, raw);
5750 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_raw);
5751 GEN_OP_SPE_LHX(raw);
5752 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, raw);
5753 GEN_OP_SPE_LHX(le_raw);
5754 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_raw);
5755 #if defined(TARGET_PPC64)
5756 GEN_OP_SPE_LHE(64_raw);
5757 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_raw);
5758 GEN_OP_SPE_LHE(le_64_raw);
5759 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_raw);
5760 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_raw);
5761 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_raw);
5762 GEN_OP_SPE_LHX(64_raw);
5763 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_raw);
5764 GEN_OP_SPE_LHX(le_64_raw);
5765 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_raw);
5766 #endif
5767 #else
5768 GEN_OP_SPE_LHE(user);
5769 GEN_OP_SPE_LHE(kernel);
5770 GEN_OP_SPE_LHE(hypv);
5771 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, user);
5772 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, kernel);
5773 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, hypv);
5774 GEN_OP_SPE_LHE(le_user);
5775 GEN_OP_SPE_LHE(le_kernel);
5776 GEN_OP_SPE_LHE(le_hypv);
5777 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_user);
5778 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_kernel);
5779 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_hypv);
5780 GEN_SPE_LDSPLAT(hhousplat, spe_lh, user);
5781 GEN_SPE_LDSPLAT(hhousplat, spe_lh, kernel);
5782 GEN_SPE_LDSPLAT(hhousplat, spe_lh, hypv);
5783 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_user);
5784 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_kernel);
5785 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_hypv);
5786 GEN_OP_SPE_LHX(user);
5787 GEN_OP_SPE_LHX(kernel);
5788 GEN_OP_SPE_LHX(hypv);
5789 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, user);
5790 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, kernel);
5791 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, hypv);
5792 GEN_OP_SPE_LHX(le_user);
5793 GEN_OP_SPE_LHX(le_kernel);
5794 GEN_OP_SPE_LHX(le_hypv);
5795 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_user);
5796 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_kernel);
5797 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_hypv);
5798 #if defined(TARGET_PPC64)
5799 GEN_OP_SPE_LHE(64_user);
5800 GEN_OP_SPE_LHE(64_kernel);
5801 GEN_OP_SPE_LHE(64_hypv);
5802 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_user);
5803 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_kernel);
5804 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_hypv);
5805 GEN_OP_SPE_LHE(le_64_user);
5806 GEN_OP_SPE_LHE(le_64_kernel);
5807 GEN_OP_SPE_LHE(le_64_hypv);
5808 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_user);
5809 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_kernel);
5810 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_hypv);
5811 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_user);
5812 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_kernel);
5813 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_hypv);
5814 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_user);
5815 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_kernel);
5816 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_hypv);
5817 GEN_OP_SPE_LHX(64_user);
5818 GEN_OP_SPE_LHX(64_kernel);
5819 GEN_OP_SPE_LHX(64_hypv);
5820 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_user);
5821 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_kernel);
5822 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_hypv);
5823 GEN_OP_SPE_LHX(le_64_user);
5824 GEN_OP_SPE_LHX(le_64_kernel);
5825 GEN_OP_SPE_LHX(le_64_hypv);
5826 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_user);
5827 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_kernel);
5828 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_hypv);
5829 #endif
5830 #endif
5831 GEN_SPEOP_LD(hhesplat, 1);
5832 GEN_SPEOP_LD(hhousplat, 1);
5833 GEN_SPEOP_LD(hhossplat, 1);
5834 GEN_SPEOP_LD(wwsplat, 2);
5835 GEN_SPEOP_LD(whsplat, 2);
5836
5837 GEN_SPE(evlddx, evldd, 0x00, 0x0C, 0x00000000, PPC_SPE); //
5838 GEN_SPE(evldwx, evldw, 0x01, 0x0C, 0x00000000, PPC_SPE); //
5839 GEN_SPE(evldhx, evldh, 0x02, 0x0C, 0x00000000, PPC_SPE); //
5840 GEN_SPE(evlhhesplatx, evlhhesplat, 0x04, 0x0C, 0x00000000, PPC_SPE); //
5841 GEN_SPE(evlhhousplatx, evlhhousplat, 0x06, 0x0C, 0x00000000, PPC_SPE); //
5842 GEN_SPE(evlhhossplatx, evlhhossplat, 0x07, 0x0C, 0x00000000, PPC_SPE); //
5843 GEN_SPE(evlwhex, evlwhe, 0x08, 0x0C, 0x00000000, PPC_SPE); //
5844 GEN_SPE(evlwhoux, evlwhou, 0x0A, 0x0C, 0x00000000, PPC_SPE); //
5845 GEN_SPE(evlwhosx, evlwhos, 0x0B, 0x0C, 0x00000000, PPC_SPE); //
5846 GEN_SPE(evlwwsplatx, evlwwsplat, 0x0C, 0x0C, 0x00000000, PPC_SPE); //
5847 GEN_SPE(evlwhsplatx, evlwhsplat, 0x0E, 0x0C, 0x00000000, PPC_SPE); //
5848 GEN_SPE(evstddx, evstdd, 0x10, 0x0C, 0x00000000, PPC_SPE); //
5849 GEN_SPE(evstdwx, evstdw, 0x11, 0x0C, 0x00000000, PPC_SPE); //
5850 GEN_SPE(evstdhx, evstdh, 0x12, 0x0C, 0x00000000, PPC_SPE); //
5851 GEN_SPE(evstwhex, evstwhe, 0x18, 0x0C, 0x00000000, PPC_SPE); //
5852 GEN_SPE(evstwhox, evstwho, 0x1A, 0x0C, 0x00000000, PPC_SPE); //
5853 GEN_SPE(evstwwex, evstwwe, 0x1C, 0x0C, 0x00000000, PPC_SPE); //
5854 GEN_SPE(evstwwox, evstwwo, 0x1E, 0x0C, 0x00000000, PPC_SPE); //
5855
5856 /* Multiply and add - TODO */
5857 #if 0
5858 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE);
5859 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE);
5860 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE);
5861 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE);
5862 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE);
5863 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE);
5864 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE);
5865 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE);
5866 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE);
5867 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE);
5868 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE);
5869 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE);
5870
5871 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE);
5872 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE);
5873 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE);
5874 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE);
5875 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE);
5876 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE);
5877 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE);
5878 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE);
5879 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE);
5880 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE);
5881 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE);
5882 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE);
5883 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE);
5884 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE);
5885
5886 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE);
5887 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE);
5888 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE);
5889 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE);
5890 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE);
5891 GEN_SPE(evmra, speundef, 0x07, 0x13, 0x0000F800, PPC_SPE);
5892
5893 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE);
5894 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE);
5895 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE);
5896 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE);
5897 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE);
5898 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE);
5899 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE);
5900 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE);
5901 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE);
5902 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE);
5903 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE);
5904 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE);
5905
5906 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE);
5907 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE);
5908 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE);
5909 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE);
5910 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE);
5911
5912 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE);
5913 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE);
5914 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE);
5915 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE);
5916 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE);
5917 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE);
5918 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE);
5919 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE);
5920 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE);
5921 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE);
5922 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE);
5923 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE);
5924
5925 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE);
5926 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE);
5927 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE);
5928 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE);
5929 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
5930 #endif
5931
5932 /*** SPE floating-point extension ***/
5933 #define GEN_SPEFPUOP_CONV(name) \
5934 static always_inline void gen_##name (DisasContext *ctx) \
5935 { \
5936 gen_load_gpr64(cpu_T64[0], rB(ctx->opcode)); \
5937 gen_op_##name(); \
5938 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5939 }
5940
5941 /* Single precision floating-point vectors operations */
5942 /* Arithmetic */
5943 GEN_SPEOP_ARITH2(evfsadd);
5944 GEN_SPEOP_ARITH2(evfssub);
5945 GEN_SPEOP_ARITH2(evfsmul);
5946 GEN_SPEOP_ARITH2(evfsdiv);
5947 GEN_SPEOP_ARITH1(evfsabs);
5948 GEN_SPEOP_ARITH1(evfsnabs);
5949 GEN_SPEOP_ARITH1(evfsneg);
5950 /* Conversion */
5951 GEN_SPEFPUOP_CONV(evfscfui);
5952 GEN_SPEFPUOP_CONV(evfscfsi);
5953 GEN_SPEFPUOP_CONV(evfscfuf);
5954 GEN_SPEFPUOP_CONV(evfscfsf);
5955 GEN_SPEFPUOP_CONV(evfsctui);
5956 GEN_SPEFPUOP_CONV(evfsctsi);
5957 GEN_SPEFPUOP_CONV(evfsctuf);
5958 GEN_SPEFPUOP_CONV(evfsctsf);
5959 GEN_SPEFPUOP_CONV(evfsctuiz);
5960 GEN_SPEFPUOP_CONV(evfsctsiz);
5961 /* Comparison */
5962 GEN_SPEOP_COMP(evfscmpgt);
5963 GEN_SPEOP_COMP(evfscmplt);
5964 GEN_SPEOP_COMP(evfscmpeq);
5965 GEN_SPEOP_COMP(evfststgt);
5966 GEN_SPEOP_COMP(evfststlt);
5967 GEN_SPEOP_COMP(evfststeq);
5968
5969 /* Opcodes definitions */
5970 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPEFPU); //
5971 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPEFPU); //
5972 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPEFPU); //
5973 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPEFPU); //
5974 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPEFPU); //
5975 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPEFPU); //
5976 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPEFPU); //
5977 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPEFPU); //
5978 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPEFPU); //
5979 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPEFPU); //
5980 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPEFPU); //
5981 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPEFPU); //
5982 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPEFPU); //
5983 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPEFPU); //
5984
5985 /* Single precision floating-point operations */
5986 /* Arithmetic */
5987 GEN_SPEOP_ARITH2(efsadd);
5988 GEN_SPEOP_ARITH2(efssub);
5989 GEN_SPEOP_ARITH2(efsmul);
5990 GEN_SPEOP_ARITH2(efsdiv);
5991 GEN_SPEOP_ARITH1(efsabs);
5992 GEN_SPEOP_ARITH1(efsnabs);
5993 GEN_SPEOP_ARITH1(efsneg);
5994 /* Conversion */
5995 GEN_SPEFPUOP_CONV(efscfui);
5996 GEN_SPEFPUOP_CONV(efscfsi);
5997 GEN_SPEFPUOP_CONV(efscfuf);
5998 GEN_SPEFPUOP_CONV(efscfsf);
5999 GEN_SPEFPUOP_CONV(efsctui);
6000 GEN_SPEFPUOP_CONV(efsctsi);
6001 GEN_SPEFPUOP_CONV(efsctuf);
6002 GEN_SPEFPUOP_CONV(efsctsf);
6003 GEN_SPEFPUOP_CONV(efsctuiz);
6004 GEN_SPEFPUOP_CONV(efsctsiz);
6005 GEN_SPEFPUOP_CONV(efscfd);
6006 /* Comparison */
6007 GEN_SPEOP_COMP(efscmpgt);
6008 GEN_SPEOP_COMP(efscmplt);
6009 GEN_SPEOP_COMP(efscmpeq);
6010 GEN_SPEOP_COMP(efststgt);
6011 GEN_SPEOP_COMP(efststlt);
6012 GEN_SPEOP_COMP(efststeq);
6013
6014 /* Opcodes definitions */
6015 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPEFPU); //
6016 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPEFPU); //
6017 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPEFPU); //
6018 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPEFPU); //
6019 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPEFPU); //
6020 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPEFPU); //
6021 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPEFPU); //
6022 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPEFPU); //
6023 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPEFPU); //
6024 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPEFPU); //
6025 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPEFPU); //
6026 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPEFPU); //
6027 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPEFPU); //
6028 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPEFPU); //
6029
6030 /* Double precision floating-point operations */
6031 /* Arithmetic */
6032 GEN_SPEOP_ARITH2(efdadd);
6033 GEN_SPEOP_ARITH2(efdsub);
6034 GEN_SPEOP_ARITH2(efdmul);
6035 GEN_SPEOP_ARITH2(efddiv);
6036 GEN_SPEOP_ARITH1(efdabs);
6037 GEN_SPEOP_ARITH1(efdnabs);
6038 GEN_SPEOP_ARITH1(efdneg);
6039 /* Conversion */
6040
6041 GEN_SPEFPUOP_CONV(efdcfui);
6042 GEN_SPEFPUOP_CONV(efdcfsi);
6043 GEN_SPEFPUOP_CONV(efdcfuf);
6044 GEN_SPEFPUOP_CONV(efdcfsf);
6045 GEN_SPEFPUOP_CONV(efdctui);
6046 GEN_SPEFPUOP_CONV(efdctsi);
6047 GEN_SPEFPUOP_CONV(efdctuf);
6048 GEN_SPEFPUOP_CONV(efdctsf);
6049 GEN_SPEFPUOP_CONV(efdctuiz);
6050 GEN_SPEFPUOP_CONV(efdctsiz);
6051 GEN_SPEFPUOP_CONV(efdcfs);
6052 GEN_SPEFPUOP_CONV(efdcfuid);
6053 GEN_SPEFPUOP_CONV(efdcfsid);
6054 GEN_SPEFPUOP_CONV(efdctuidz);
6055 GEN_SPEFPUOP_CONV(efdctsidz);
6056 /* Comparison */
6057 GEN_SPEOP_COMP(efdcmpgt);
6058 GEN_SPEOP_COMP(efdcmplt);
6059 GEN_SPEOP_COMP(efdcmpeq);
6060 GEN_SPEOP_COMP(efdtstgt);
6061 GEN_SPEOP_COMP(efdtstlt);
6062 GEN_SPEOP_COMP(efdtsteq);
6063
6064 /* Opcodes definitions */
6065 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPEFPU); //
6066 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPEFPU); //
6067 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPEFPU); //
6068 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPEFPU); //
6069 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPEFPU); //
6070 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPEFPU); //
6071 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPEFPU); //
6072 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPEFPU); //
6073 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPEFPU); //
6074 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPEFPU); //
6075 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPEFPU); //
6076 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPEFPU); //
6077 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPEFPU); //
6078 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPEFPU); //
6079 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPEFPU); //
6080 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPEFPU); //
6081
6082 /* End opcode list */
6083 GEN_OPCODE_MARK(end);
6084
6085 #include "translate_init.c"
6086 #include "helper_regs.h"
6087
6088 /*****************************************************************************/
6089 /* Misc PowerPC helpers */
6090 void cpu_dump_state (CPUState *env, FILE *f,
6091 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
6092 int flags)
6093 {
6094 #define RGPL 4
6095 #define RFPL 4
6096
6097 int i;
6098
6099 cpu_fprintf(f, "NIP " ADDRX " LR " ADDRX " CTR " ADDRX " XER %08x\n",
6100 env->nip, env->lr, env->ctr, hreg_load_xer(env));
6101 cpu_fprintf(f, "MSR " ADDRX " HID0 " ADDRX " HF " ADDRX " idx %d\n",
6102 env->msr, env->spr[SPR_HID0], env->hflags, env->mmu_idx);
6103 #if !defined(NO_TIMER_DUMP)
6104 cpu_fprintf(f, "TB %08x %08x "
6105 #if !defined(CONFIG_USER_ONLY)
6106 "DECR %08x"
6107 #endif
6108 "\n",
6109 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
6110 #if !defined(CONFIG_USER_ONLY)
6111 , cpu_ppc_load_decr(env)
6112 #endif
6113 );
6114 #endif
6115 for (i = 0; i < 32; i++) {
6116 if ((i & (RGPL - 1)) == 0)
6117 cpu_fprintf(f, "GPR%02d", i);
6118 cpu_fprintf(f, " " REGX, ppc_dump_gpr(env, i));
6119 if ((i & (RGPL - 1)) == (RGPL - 1))
6120 cpu_fprintf(f, "\n");
6121 }
6122 cpu_fprintf(f, "CR ");
6123 for (i = 0; i < 8; i++)
6124 cpu_fprintf(f, "%01x", env->crf[i]);
6125 cpu_fprintf(f, " [");
6126 for (i = 0; i < 8; i++) {
6127 char a = '-';
6128 if (env->crf[i] & 0x08)
6129 a = 'L';
6130 else if (env->crf[i] & 0x04)
6131 a = 'G';
6132 else if (env->crf[i] & 0x02)
6133 a = 'E';
6134 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
6135 }
6136 cpu_fprintf(f, " ] RES " ADDRX "\n", env->reserve);
6137 for (i = 0; i < 32; i++) {
6138 if ((i & (RFPL - 1)) == 0)
6139 cpu_fprintf(f, "FPR%02d", i);
6140 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
6141 if ((i & (RFPL - 1)) == (RFPL - 1))
6142 cpu_fprintf(f, "\n");
6143 }
6144 #if !defined(CONFIG_USER_ONLY)
6145 cpu_fprintf(f, "SRR0 " ADDRX " SRR1 " ADDRX " SDR1 " ADDRX "\n",
6146 env->spr[SPR_SRR0], env->spr[SPR_SRR1], env->sdr1);
6147 #endif
6148
6149 #undef RGPL
6150 #undef RFPL
6151 }
6152
6153 void cpu_dump_statistics (CPUState *env, FILE*f,
6154 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
6155 int flags)
6156 {
6157 #if defined(DO_PPC_STATISTICS)
6158 opc_handler_t **t1, **t2, **t3, *handler;
6159 int op1, op2, op3;
6160
6161 t1 = env->opcodes;
6162 for (op1 = 0; op1 < 64; op1++) {
6163 handler = t1[op1];
6164 if (is_indirect_opcode(handler)) {
6165 t2 = ind_table(handler);
6166 for (op2 = 0; op2 < 32; op2++) {
6167 handler = t2[op2];
6168 if (is_indirect_opcode(handler)) {
6169 t3 = ind_table(handler);
6170 for (op3 = 0; op3 < 32; op3++) {
6171 handler = t3[op3];
6172 if (handler->count == 0)
6173 continue;
6174 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
6175 "%016llx %lld\n",
6176 op1, op2, op3, op1, (op3 << 5) | op2,
6177 handler->oname,
6178 handler->count, handler->count);
6179 }
6180 } else {
6181 if (handler->count == 0)
6182 continue;
6183 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
6184 "%016llx %lld\n",
6185 op1, op2, op1, op2, handler->oname,
6186 handler->count, handler->count);
6187 }
6188 }
6189 } else {
6190 if (handler->count == 0)
6191 continue;
6192 cpu_fprintf(f, "%02x (%02x ) %16s: %016llx %lld\n",
6193 op1, op1, handler->oname,
6194 handler->count, handler->count);
6195 }
6196 }
6197 #endif
6198 }
6199
6200 /*****************************************************************************/
6201 static always_inline void gen_intermediate_code_internal (CPUState *env,
6202 TranslationBlock *tb,
6203 int search_pc)
6204 {
6205 DisasContext ctx, *ctxp = &ctx;
6206 opc_handler_t **table, *handler;
6207 target_ulong pc_start;
6208 uint16_t *gen_opc_end;
6209 int supervisor, little_endian;
6210 int j, lj = -1;
6211 int num_insns;
6212 int max_insns;
6213
6214 pc_start = tb->pc;
6215 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6216 #if defined(OPTIMIZE_FPRF_UPDATE)
6217 gen_fprf_ptr = gen_fprf_buf;
6218 #endif
6219 ctx.nip = pc_start;
6220 ctx.tb = tb;
6221 ctx.exception = POWERPC_EXCP_NONE;
6222 ctx.spr_cb = env->spr_cb;
6223 supervisor = env->mmu_idx;
6224 #if !defined(CONFIG_USER_ONLY)
6225 ctx.supervisor = supervisor;
6226 #endif
6227 little_endian = env->hflags & (1 << MSR_LE) ? 1 : 0;
6228 #if defined(TARGET_PPC64)
6229 ctx.sf_mode = msr_sf;
6230 ctx.mem_idx = (supervisor << 2) | (msr_sf << 1) | little_endian;
6231 #else
6232 ctx.mem_idx = (supervisor << 1) | little_endian;
6233 #endif
6234 ctx.dcache_line_size = env->dcache_line_size;
6235 ctx.fpu_enabled = msr_fp;
6236 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
6237 ctx.spe_enabled = msr_spe;
6238 else
6239 ctx.spe_enabled = 0;
6240 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
6241 ctx.altivec_enabled = msr_vr;
6242 else
6243 ctx.altivec_enabled = 0;
6244 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
6245 ctx.singlestep_enabled = CPU_SINGLE_STEP;
6246 else
6247 ctx.singlestep_enabled = 0;
6248 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
6249 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
6250 if (unlikely(env->singlestep_enabled))
6251 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
6252 #if defined (DO_SINGLE_STEP) && 0
6253 /* Single step trace mode */
6254 msr_se = 1;
6255 #endif
6256 num_insns = 0;
6257 max_insns = tb->cflags & CF_COUNT_MASK;
6258 if (max_insns == 0)
6259 max_insns = CF_COUNT_MASK;
6260
6261 gen_icount_start();
6262 /* Set env in case of segfault during code fetch */
6263 while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) {
6264 if (unlikely(env->nb_breakpoints > 0)) {
6265 for (j = 0; j < env->nb_breakpoints; j++) {
6266 if (env->breakpoints[j] == ctx.nip) {
6267 gen_update_nip(&ctx, ctx.nip);
6268 gen_op_debug();
6269 break;
6270 }
6271 }
6272 }
6273 if (unlikely(search_pc)) {
6274 j = gen_opc_ptr - gen_opc_buf;
6275 if (lj < j) {
6276 lj++;
6277 while (lj < j)
6278 gen_opc_instr_start[lj++] = 0;
6279 gen_opc_pc[lj] = ctx.nip;
6280 gen_opc_instr_start[lj] = 1;
6281 gen_opc_icount[lj] = num_insns;
6282 }
6283 }
6284 #if defined PPC_DEBUG_DISAS
6285 if (loglevel & CPU_LOG_TB_IN_ASM) {
6286 fprintf(logfile, "----------------\n");
6287 fprintf(logfile, "nip=" ADDRX " super=%d ir=%d\n",
6288 ctx.nip, supervisor, (int)msr_ir);
6289 }
6290 #endif
6291 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
6292 gen_io_start();
6293 if (unlikely(little_endian)) {
6294 ctx.opcode = bswap32(ldl_code(ctx.nip));
6295 } else {
6296 ctx.opcode = ldl_code(ctx.nip);
6297 }
6298 #if defined PPC_DEBUG_DISAS
6299 if (loglevel & CPU_LOG_TB_IN_ASM) {
6300 fprintf(logfile, "translate opcode %08x (%02x %02x %02x) (%s)\n",
6301 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
6302 opc3(ctx.opcode), little_endian ? "little" : "big");
6303 }
6304 #endif
6305 ctx.nip += 4;
6306 table = env->opcodes;
6307 num_insns++;
6308 handler = table[opc1(ctx.opcode)];
6309 if (is_indirect_opcode(handler)) {
6310 table = ind_table(handler);
6311 handler = table[opc2(ctx.opcode)];
6312 if (is_indirect_opcode(handler)) {
6313 table = ind_table(handler);
6314 handler = table[opc3(ctx.opcode)];
6315 }
6316 }
6317 /* Is opcode *REALLY* valid ? */
6318 if (unlikely(handler->handler == &gen_invalid)) {
6319 if (loglevel != 0) {
6320 fprintf(logfile, "invalid/unsupported opcode: "
6321 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
6322 opc1(ctx.opcode), opc2(ctx.opcode),
6323 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
6324 } else {
6325 printf("invalid/unsupported opcode: "
6326 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
6327 opc1(ctx.opcode), opc2(ctx.opcode),
6328 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
6329 }
6330 } else {
6331 if (unlikely((ctx.opcode & handler->inval) != 0)) {
6332 if (loglevel != 0) {
6333 fprintf(logfile, "invalid bits: %08x for opcode: "
6334 "%02x - %02x - %02x (%08x) " ADDRX "\n",
6335 ctx.opcode & handler->inval, opc1(ctx.opcode),
6336 opc2(ctx.opcode), opc3(ctx.opcode),
6337 ctx.opcode, ctx.nip - 4);
6338 } else {
6339 printf("invalid bits: %08x for opcode: "
6340 "%02x - %02x - %02x (%08x) " ADDRX "\n",
6341 ctx.opcode & handler->inval, opc1(ctx.opcode),
6342 opc2(ctx.opcode), opc3(ctx.opcode),
6343 ctx.opcode, ctx.nip - 4);
6344 }
6345 GEN_EXCP_INVAL(ctxp);
6346 break;
6347 }
6348 }
6349 (*(handler->handler))(&ctx);
6350 #if defined(DO_PPC_STATISTICS)
6351 handler->count++;
6352 #endif
6353 /* Check trace mode exceptions */
6354 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
6355 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
6356 ctx.exception != POWERPC_SYSCALL &&
6357 ctx.exception != POWERPC_EXCP_TRAP &&
6358 ctx.exception != POWERPC_EXCP_BRANCH)) {
6359 GEN_EXCP(ctxp, POWERPC_EXCP_TRACE, 0);
6360 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
6361 (env->singlestep_enabled) ||
6362 num_insns >= max_insns)) {
6363 /* if we reach a page boundary or are single stepping, stop
6364 * generation
6365 */
6366 break;
6367 }
6368 #if defined (DO_SINGLE_STEP)
6369 break;
6370 #endif
6371 }
6372 if (tb->cflags & CF_LAST_IO)
6373 gen_io_end();
6374 if (ctx.exception == POWERPC_EXCP_NONE) {
6375 gen_goto_tb(&ctx, 0, ctx.nip);
6376 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
6377 if (unlikely(env->singlestep_enabled)) {
6378 gen_update_nip(&ctx, ctx.nip);
6379 gen_op_debug();
6380 }
6381 /* Generate the return instruction */
6382 tcg_gen_exit_tb(0);
6383 }
6384 gen_icount_end(tb, num_insns);
6385 *gen_opc_ptr = INDEX_op_end;
6386 if (unlikely(search_pc)) {
6387 j = gen_opc_ptr - gen_opc_buf;
6388 lj++;
6389 while (lj <= j)
6390 gen_opc_instr_start[lj++] = 0;
6391 } else {
6392 tb->size = ctx.nip - pc_start;
6393 tb->icount = num_insns;
6394 }
6395 #if defined(DEBUG_DISAS)
6396 if (loglevel & CPU_LOG_TB_CPU) {
6397 fprintf(logfile, "---------------- excp: %04x\n", ctx.exception);
6398 cpu_dump_state(env, logfile, fprintf, 0);
6399 }
6400 if (loglevel & CPU_LOG_TB_IN_ASM) {
6401 int flags;
6402 flags = env->bfd_mach;
6403 flags |= little_endian << 16;
6404 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6405 target_disas(logfile, pc_start, ctx.nip - pc_start, flags);
6406 fprintf(logfile, "\n");
6407 }
6408 #endif
6409 }
6410
6411 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
6412 {
6413 gen_intermediate_code_internal(env, tb, 0);
6414 }
6415
6416 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
6417 {
6418 gen_intermediate_code_internal(env, tb, 1);
6419 }
6420
6421 void gen_pc_load(CPUState *env, TranslationBlock *tb,
6422 unsigned long searched_pc, int pc_pos, void *puc)
6423 {
6424 int type, c;
6425 /* for PPC, we need to look at the micro operation to get the
6426 * access type */
6427 env->nip = gen_opc_pc[pc_pos];
6428 c = gen_opc_buf[pc_pos];
6429 switch(c) {
6430 #if defined(CONFIG_USER_ONLY)
6431 #define CASE3(op)\
6432 case INDEX_op_ ## op ## _raw
6433 #else
6434 #define CASE3(op)\
6435 case INDEX_op_ ## op ## _user:\
6436 case INDEX_op_ ## op ## _kernel:\
6437 case INDEX_op_ ## op ## _hypv
6438 #endif
6439
6440 CASE3(stfd):
6441 CASE3(stfs):
6442 CASE3(lfd):
6443 CASE3(lfs):
6444 type = ACCESS_FLOAT;
6445 break;
6446 CASE3(lwarx):
6447 type = ACCESS_RES;
6448 break;
6449 CASE3(stwcx):
6450 type = ACCESS_RES;
6451 break;
6452 CASE3(eciwx):
6453 CASE3(ecowx):
6454 type = ACCESS_EXT;
6455 break;
6456 default:
6457 type = ACCESS_INT;
6458 break;
6459 }
6460 env->access_type = type;
6461 }