]> git.proxmox.com Git - qemu.git/blob - target-ppc/translate.c
target-ppc: convert dcr load/store to TCG
[qemu.git] / target-ppc / translate.c
1 /*
2 * PowerPC emulation for qemu: main translation routines.
3 *
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25
26 #include "cpu.h"
27 #include "exec-all.h"
28 #include "disas.h"
29 #include "tcg-op.h"
30 #include "qemu-common.h"
31
32 #include "helper.h"
33 #define GEN_HELPER 1
34 #include "helper.h"
35
36 #define CPU_SINGLE_STEP 0x1
37 #define CPU_BRANCH_STEP 0x2
38 #define GDBSTUB_SINGLE_STEP 0x4
39
40 /* Include definitions for instructions classes and implementations flags */
41 //#define DO_SINGLE_STEP
42 //#define PPC_DEBUG_DISAS
43 //#define DO_PPC_STATISTICS
44 //#define OPTIMIZE_FPRF_UPDATE
45
46 /*****************************************************************************/
47 /* Code translation helpers */
48
49 /* global register indexes */
50 static TCGv_ptr cpu_env;
51 static char cpu_reg_names[10*3 + 22*4 /* GPR */
52 #if !defined(TARGET_PPC64)
53 + 10*4 + 22*5 /* SPE GPRh */
54 #endif
55 + 10*4 + 22*5 /* FPR */
56 + 2*(10*6 + 22*7) /* AVRh, AVRl */
57 + 8*5 /* CRF */];
58 static TCGv cpu_gpr[32];
59 #if !defined(TARGET_PPC64)
60 static TCGv cpu_gprh[32];
61 #endif
62 static TCGv_i64 cpu_fpr[32];
63 static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
64 static TCGv_i32 cpu_crf[8];
65 static TCGv cpu_nip;
66 static TCGv cpu_msr;
67 static TCGv cpu_ctr;
68 static TCGv cpu_lr;
69 static TCGv cpu_xer;
70 static TCGv cpu_reserve;
71 static TCGv_i32 cpu_fpscr;
72 static TCGv_i32 cpu_access_type;
73
74 /* dyngen register indexes */
75 static TCGv cpu_T[3];
76
77 #include "gen-icount.h"
78
79 void ppc_translate_init(void)
80 {
81 int i;
82 char* p;
83 static int done_init = 0;
84
85 if (done_init)
86 return;
87
88 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
89 #if TARGET_LONG_BITS > HOST_LONG_BITS
90 cpu_T[0] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t0), "T0");
91 cpu_T[1] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t1), "T1");
92 cpu_T[2] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t2), "T2");
93 #else
94 cpu_T[0] = tcg_global_reg_new(TCG_AREG1, "T0");
95 cpu_T[1] = tcg_global_reg_new(TCG_AREG2, "T1");
96 #ifdef HOST_I386
97 /* XXX: This is a temporary workaround for i386.
98 * On i386 qemu_st32 runs out of registers.
99 * The proper fix is to remove cpu_T.
100 */
101 cpu_T[2] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t2), "T2");
102 #else
103 cpu_T[2] = tcg_global_reg_new(TCG_AREG3, "T2");
104 #endif
105 #endif
106
107 p = cpu_reg_names;
108
109 for (i = 0; i < 8; i++) {
110 sprintf(p, "crf%d", i);
111 cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
112 offsetof(CPUState, crf[i]), p);
113 p += 5;
114 }
115
116 for (i = 0; i < 32; i++) {
117 sprintf(p, "r%d", i);
118 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
119 offsetof(CPUState, gpr[i]), p);
120 p += (i < 10) ? 3 : 4;
121 #if !defined(TARGET_PPC64)
122 sprintf(p, "r%dH", i);
123 cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
124 offsetof(CPUState, gprh[i]), p);
125 p += (i < 10) ? 4 : 5;
126 #endif
127
128 sprintf(p, "fp%d", i);
129 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
130 offsetof(CPUState, fpr[i]), p);
131 p += (i < 10) ? 4 : 5;
132
133 sprintf(p, "avr%dH", i);
134 #ifdef WORDS_BIGENDIAN
135 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
136 offsetof(CPUState, avr[i].u64[0]), p);
137 #else
138 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
139 offsetof(CPUState, avr[i].u64[1]), p);
140 #endif
141 p += (i < 10) ? 6 : 7;
142
143 sprintf(p, "avr%dL", i);
144 #ifdef WORDS_BIGENDIAN
145 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
146 offsetof(CPUState, avr[i].u64[1]), p);
147 #else
148 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
149 offsetof(CPUState, avr[i].u64[0]), p);
150 #endif
151 p += (i < 10) ? 6 : 7;
152 }
153
154 cpu_nip = tcg_global_mem_new(TCG_AREG0,
155 offsetof(CPUState, nip), "nip");
156
157 cpu_msr = tcg_global_mem_new(TCG_AREG0,
158 offsetof(CPUState, msr), "msr");
159
160 cpu_ctr = tcg_global_mem_new(TCG_AREG0,
161 offsetof(CPUState, ctr), "ctr");
162
163 cpu_lr = tcg_global_mem_new(TCG_AREG0,
164 offsetof(CPUState, lr), "lr");
165
166 cpu_xer = tcg_global_mem_new(TCG_AREG0,
167 offsetof(CPUState, xer), "xer");
168
169 cpu_reserve = tcg_global_mem_new(TCG_AREG0,
170 offsetof(CPUState, reserve), "reserve");
171
172 cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
173 offsetof(CPUState, fpscr), "fpscr");
174
175 cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0,
176 offsetof(CPUState, access_type), "access_type");
177
178 /* register helpers */
179 #define GEN_HELPER 2
180 #include "helper.h"
181
182 done_init = 1;
183 }
184
185 #if defined(OPTIMIZE_FPRF_UPDATE)
186 static uint16_t *gen_fprf_buf[OPC_BUF_SIZE];
187 static uint16_t **gen_fprf_ptr;
188 #endif
189
190 /* internal defines */
191 typedef struct DisasContext {
192 struct TranslationBlock *tb;
193 target_ulong nip;
194 uint32_t opcode;
195 uint32_t exception;
196 /* Routine used to access memory */
197 int mem_idx;
198 /* Translation flags */
199 #if !defined(CONFIG_USER_ONLY)
200 int supervisor;
201 #endif
202 #if defined(TARGET_PPC64)
203 int sf_mode;
204 #endif
205 int fpu_enabled;
206 int altivec_enabled;
207 int spe_enabled;
208 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
209 int singlestep_enabled;
210 } DisasContext;
211
212 struct opc_handler_t {
213 /* invalid bits */
214 uint32_t inval;
215 /* instruction type */
216 uint64_t type;
217 /* handler */
218 void (*handler)(DisasContext *ctx);
219 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
220 const char *oname;
221 #endif
222 #if defined(DO_PPC_STATISTICS)
223 uint64_t count;
224 #endif
225 };
226
227 static always_inline void gen_reset_fpstatus (void)
228 {
229 #ifdef CONFIG_SOFTFLOAT
230 gen_op_reset_fpstatus();
231 #endif
232 }
233
234 static always_inline void gen_compute_fprf (TCGv_i64 arg, int set_fprf, int set_rc)
235 {
236 TCGv_i32 t0 = tcg_temp_new_i32();
237
238 if (set_fprf != 0) {
239 /* This case might be optimized later */
240 #if defined(OPTIMIZE_FPRF_UPDATE)
241 *gen_fprf_ptr++ = gen_opc_ptr;
242 #endif
243 tcg_gen_movi_i32(t0, 1);
244 gen_helper_compute_fprf(t0, arg, t0);
245 if (unlikely(set_rc)) {
246 tcg_gen_mov_i32(cpu_crf[1], t0);
247 }
248 gen_helper_float_check_status();
249 } else if (unlikely(set_rc)) {
250 /* We always need to compute fpcc */
251 tcg_gen_movi_i32(t0, 0);
252 gen_helper_compute_fprf(t0, arg, t0);
253 tcg_gen_mov_i32(cpu_crf[1], t0);
254 if (set_fprf)
255 gen_helper_float_check_status();
256 }
257
258 tcg_temp_free_i32(t0);
259 }
260
261 static always_inline void gen_optimize_fprf (void)
262 {
263 #if defined(OPTIMIZE_FPRF_UPDATE)
264 uint16_t **ptr;
265
266 for (ptr = gen_fprf_buf; ptr != (gen_fprf_ptr - 1); ptr++)
267 *ptr = INDEX_op_nop1;
268 gen_fprf_ptr = gen_fprf_buf;
269 #endif
270 }
271
272 static always_inline void gen_set_access_type(int access_type)
273 {
274 tcg_gen_movi_i32(cpu_access_type, access_type);
275 }
276
277 static always_inline void gen_update_nip (DisasContext *ctx, target_ulong nip)
278 {
279 #if defined(TARGET_PPC64)
280 if (ctx->sf_mode)
281 tcg_gen_movi_tl(cpu_nip, nip);
282 else
283 #endif
284 tcg_gen_movi_tl(cpu_nip, (uint32_t)nip);
285 }
286
287 #define GEN_EXCP(ctx, excp, error) \
288 do { \
289 TCGv_i32 t0 = tcg_const_i32(excp); \
290 TCGv_i32 t1 = tcg_const_i32(error); \
291 if ((ctx)->exception == POWERPC_EXCP_NONE) { \
292 gen_update_nip(ctx, (ctx)->nip); \
293 } \
294 gen_helper_raise_exception_err(t0, t1); \
295 tcg_temp_free_i32(t0); \
296 tcg_temp_free_i32(t1); \
297 ctx->exception = (excp); \
298 } while (0)
299
300 #define GEN_EXCP_INVAL(ctx) \
301 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
302 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_INVAL)
303
304 #define GEN_EXCP_PRIVOPC(ctx) \
305 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
306 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_OPC)
307
308 #define GEN_EXCP_PRIVREG(ctx) \
309 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
310 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_REG)
311
312 #define GEN_EXCP_NO_FP(ctx) \
313 GEN_EXCP(ctx, POWERPC_EXCP_FPU, 0)
314
315 #define GEN_EXCP_NO_AP(ctx) \
316 GEN_EXCP(ctx, POWERPC_EXCP_APU, 0)
317
318 #define GEN_EXCP_NO_VR(ctx) \
319 GEN_EXCP(ctx, POWERPC_EXCP_VPU, 0)
320
321 /* Stop translation */
322 static always_inline void GEN_STOP (DisasContext *ctx)
323 {
324 gen_update_nip(ctx, ctx->nip);
325 ctx->exception = POWERPC_EXCP_STOP;
326 }
327
328 /* No need to update nip here, as execution flow will change */
329 static always_inline void GEN_SYNC (DisasContext *ctx)
330 {
331 ctx->exception = POWERPC_EXCP_SYNC;
332 }
333
334 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
335 static void gen_##name (DisasContext *ctx); \
336 GEN_OPCODE(name, opc1, opc2, opc3, inval, type); \
337 static void gen_##name (DisasContext *ctx)
338
339 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
340 static void gen_##name (DisasContext *ctx); \
341 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type); \
342 static void gen_##name (DisasContext *ctx)
343
344 typedef struct opcode_t {
345 unsigned char opc1, opc2, opc3;
346 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
347 unsigned char pad[5];
348 #else
349 unsigned char pad[1];
350 #endif
351 opc_handler_t handler;
352 const char *oname;
353 } opcode_t;
354
355 /*****************************************************************************/
356 /*** Instruction decoding ***/
357 #define EXTRACT_HELPER(name, shift, nb) \
358 static always_inline uint32_t name (uint32_t opcode) \
359 { \
360 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
361 }
362
363 #define EXTRACT_SHELPER(name, shift, nb) \
364 static always_inline int32_t name (uint32_t opcode) \
365 { \
366 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
367 }
368
369 /* Opcode part 1 */
370 EXTRACT_HELPER(opc1, 26, 6);
371 /* Opcode part 2 */
372 EXTRACT_HELPER(opc2, 1, 5);
373 /* Opcode part 3 */
374 EXTRACT_HELPER(opc3, 6, 5);
375 /* Update Cr0 flags */
376 EXTRACT_HELPER(Rc, 0, 1);
377 /* Destination */
378 EXTRACT_HELPER(rD, 21, 5);
379 /* Source */
380 EXTRACT_HELPER(rS, 21, 5);
381 /* First operand */
382 EXTRACT_HELPER(rA, 16, 5);
383 /* Second operand */
384 EXTRACT_HELPER(rB, 11, 5);
385 /* Third operand */
386 EXTRACT_HELPER(rC, 6, 5);
387 /*** Get CRn ***/
388 EXTRACT_HELPER(crfD, 23, 3);
389 EXTRACT_HELPER(crfS, 18, 3);
390 EXTRACT_HELPER(crbD, 21, 5);
391 EXTRACT_HELPER(crbA, 16, 5);
392 EXTRACT_HELPER(crbB, 11, 5);
393 /* SPR / TBL */
394 EXTRACT_HELPER(_SPR, 11, 10);
395 static always_inline uint32_t SPR (uint32_t opcode)
396 {
397 uint32_t sprn = _SPR(opcode);
398
399 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
400 }
401 /*** Get constants ***/
402 EXTRACT_HELPER(IMM, 12, 8);
403 /* 16 bits signed immediate value */
404 EXTRACT_SHELPER(SIMM, 0, 16);
405 /* 16 bits unsigned immediate value */
406 EXTRACT_HELPER(UIMM, 0, 16);
407 /* Bit count */
408 EXTRACT_HELPER(NB, 11, 5);
409 /* Shift count */
410 EXTRACT_HELPER(SH, 11, 5);
411 /* Mask start */
412 EXTRACT_HELPER(MB, 6, 5);
413 /* Mask end */
414 EXTRACT_HELPER(ME, 1, 5);
415 /* Trap operand */
416 EXTRACT_HELPER(TO, 21, 5);
417
418 EXTRACT_HELPER(CRM, 12, 8);
419 EXTRACT_HELPER(FM, 17, 8);
420 EXTRACT_HELPER(SR, 16, 4);
421 EXTRACT_HELPER(FPIMM, 12, 4);
422
423 /*** Jump target decoding ***/
424 /* Displacement */
425 EXTRACT_SHELPER(d, 0, 16);
426 /* Immediate address */
427 static always_inline target_ulong LI (uint32_t opcode)
428 {
429 return (opcode >> 0) & 0x03FFFFFC;
430 }
431
432 static always_inline uint32_t BD (uint32_t opcode)
433 {
434 return (opcode >> 0) & 0xFFFC;
435 }
436
437 EXTRACT_HELPER(BO, 21, 5);
438 EXTRACT_HELPER(BI, 16, 5);
439 /* Absolute/relative address */
440 EXTRACT_HELPER(AA, 1, 1);
441 /* Link */
442 EXTRACT_HELPER(LK, 0, 1);
443
444 /* Create a mask between <start> and <end> bits */
445 static always_inline target_ulong MASK (uint32_t start, uint32_t end)
446 {
447 target_ulong ret;
448
449 #if defined(TARGET_PPC64)
450 if (likely(start == 0)) {
451 ret = UINT64_MAX << (63 - end);
452 } else if (likely(end == 63)) {
453 ret = UINT64_MAX >> start;
454 }
455 #else
456 if (likely(start == 0)) {
457 ret = UINT32_MAX << (31 - end);
458 } else if (likely(end == 31)) {
459 ret = UINT32_MAX >> start;
460 }
461 #endif
462 else {
463 ret = (((target_ulong)(-1ULL)) >> (start)) ^
464 (((target_ulong)(-1ULL) >> (end)) >> 1);
465 if (unlikely(start > end))
466 return ~ret;
467 }
468
469 return ret;
470 }
471
472 /*****************************************************************************/
473 /* PowerPC Instructions types definitions */
474 enum {
475 PPC_NONE = 0x0000000000000000ULL,
476 /* PowerPC base instructions set */
477 PPC_INSNS_BASE = 0x0000000000000001ULL,
478 /* integer operations instructions */
479 #define PPC_INTEGER PPC_INSNS_BASE
480 /* flow control instructions */
481 #define PPC_FLOW PPC_INSNS_BASE
482 /* virtual memory instructions */
483 #define PPC_MEM PPC_INSNS_BASE
484 /* ld/st with reservation instructions */
485 #define PPC_RES PPC_INSNS_BASE
486 /* spr/msr access instructions */
487 #define PPC_MISC PPC_INSNS_BASE
488 /* Deprecated instruction sets */
489 /* Original POWER instruction set */
490 PPC_POWER = 0x0000000000000002ULL,
491 /* POWER2 instruction set extension */
492 PPC_POWER2 = 0x0000000000000004ULL,
493 /* Power RTC support */
494 PPC_POWER_RTC = 0x0000000000000008ULL,
495 /* Power-to-PowerPC bridge (601) */
496 PPC_POWER_BR = 0x0000000000000010ULL,
497 /* 64 bits PowerPC instruction set */
498 PPC_64B = 0x0000000000000020ULL,
499 /* New 64 bits extensions (PowerPC 2.0x) */
500 PPC_64BX = 0x0000000000000040ULL,
501 /* 64 bits hypervisor extensions */
502 PPC_64H = 0x0000000000000080ULL,
503 /* New wait instruction (PowerPC 2.0x) */
504 PPC_WAIT = 0x0000000000000100ULL,
505 /* Time base mftb instruction */
506 PPC_MFTB = 0x0000000000000200ULL,
507
508 /* Fixed-point unit extensions */
509 /* PowerPC 602 specific */
510 PPC_602_SPEC = 0x0000000000000400ULL,
511 /* isel instruction */
512 PPC_ISEL = 0x0000000000000800ULL,
513 /* popcntb instruction */
514 PPC_POPCNTB = 0x0000000000001000ULL,
515 /* string load / store */
516 PPC_STRING = 0x0000000000002000ULL,
517
518 /* Floating-point unit extensions */
519 /* Optional floating point instructions */
520 PPC_FLOAT = 0x0000000000010000ULL,
521 /* New floating-point extensions (PowerPC 2.0x) */
522 PPC_FLOAT_EXT = 0x0000000000020000ULL,
523 PPC_FLOAT_FSQRT = 0x0000000000040000ULL,
524 PPC_FLOAT_FRES = 0x0000000000080000ULL,
525 PPC_FLOAT_FRSQRTE = 0x0000000000100000ULL,
526 PPC_FLOAT_FRSQRTES = 0x0000000000200000ULL,
527 PPC_FLOAT_FSEL = 0x0000000000400000ULL,
528 PPC_FLOAT_STFIWX = 0x0000000000800000ULL,
529
530 /* Vector/SIMD extensions */
531 /* Altivec support */
532 PPC_ALTIVEC = 0x0000000001000000ULL,
533 /* PowerPC 2.03 SPE extension */
534 PPC_SPE = 0x0000000002000000ULL,
535 /* PowerPC 2.03 SPE floating-point extension */
536 PPC_SPEFPU = 0x0000000004000000ULL,
537
538 /* Optional memory control instructions */
539 PPC_MEM_TLBIA = 0x0000000010000000ULL,
540 PPC_MEM_TLBIE = 0x0000000020000000ULL,
541 PPC_MEM_TLBSYNC = 0x0000000040000000ULL,
542 /* sync instruction */
543 PPC_MEM_SYNC = 0x0000000080000000ULL,
544 /* eieio instruction */
545 PPC_MEM_EIEIO = 0x0000000100000000ULL,
546
547 /* Cache control instructions */
548 PPC_CACHE = 0x0000000200000000ULL,
549 /* icbi instruction */
550 PPC_CACHE_ICBI = 0x0000000400000000ULL,
551 /* dcbz instruction with fixed cache line size */
552 PPC_CACHE_DCBZ = 0x0000000800000000ULL,
553 /* dcbz instruction with tunable cache line size */
554 PPC_CACHE_DCBZT = 0x0000001000000000ULL,
555 /* dcba instruction */
556 PPC_CACHE_DCBA = 0x0000002000000000ULL,
557 /* Freescale cache locking instructions */
558 PPC_CACHE_LOCK = 0x0000004000000000ULL,
559
560 /* MMU related extensions */
561 /* external control instructions */
562 PPC_EXTERN = 0x0000010000000000ULL,
563 /* segment register access instructions */
564 PPC_SEGMENT = 0x0000020000000000ULL,
565 /* PowerPC 6xx TLB management instructions */
566 PPC_6xx_TLB = 0x0000040000000000ULL,
567 /* PowerPC 74xx TLB management instructions */
568 PPC_74xx_TLB = 0x0000080000000000ULL,
569 /* PowerPC 40x TLB management instructions */
570 PPC_40x_TLB = 0x0000100000000000ULL,
571 /* segment register access instructions for PowerPC 64 "bridge" */
572 PPC_SEGMENT_64B = 0x0000200000000000ULL,
573 /* SLB management */
574 PPC_SLBI = 0x0000400000000000ULL,
575
576 /* Embedded PowerPC dedicated instructions */
577 PPC_WRTEE = 0x0001000000000000ULL,
578 /* PowerPC 40x exception model */
579 PPC_40x_EXCP = 0x0002000000000000ULL,
580 /* PowerPC 405 Mac instructions */
581 PPC_405_MAC = 0x0004000000000000ULL,
582 /* PowerPC 440 specific instructions */
583 PPC_440_SPEC = 0x0008000000000000ULL,
584 /* BookE (embedded) PowerPC specification */
585 PPC_BOOKE = 0x0010000000000000ULL,
586 /* mfapidi instruction */
587 PPC_MFAPIDI = 0x0020000000000000ULL,
588 /* tlbiva instruction */
589 PPC_TLBIVA = 0x0040000000000000ULL,
590 /* tlbivax instruction */
591 PPC_TLBIVAX = 0x0080000000000000ULL,
592 /* PowerPC 4xx dedicated instructions */
593 PPC_4xx_COMMON = 0x0100000000000000ULL,
594 /* PowerPC 40x ibct instructions */
595 PPC_40x_ICBT = 0x0200000000000000ULL,
596 /* rfmci is not implemented in all BookE PowerPC */
597 PPC_RFMCI = 0x0400000000000000ULL,
598 /* rfdi instruction */
599 PPC_RFDI = 0x0800000000000000ULL,
600 /* DCR accesses */
601 PPC_DCR = 0x1000000000000000ULL,
602 /* DCR extended accesse */
603 PPC_DCRX = 0x2000000000000000ULL,
604 /* user-mode DCR access, implemented in PowerPC 460 */
605 PPC_DCRUX = 0x4000000000000000ULL,
606 };
607
608 /*****************************************************************************/
609 /* PowerPC instructions table */
610 #if HOST_LONG_BITS == 64
611 #define OPC_ALIGN 8
612 #else
613 #define OPC_ALIGN 4
614 #endif
615 #if defined(__APPLE__)
616 #define OPCODES_SECTION \
617 __attribute__ ((section("__TEXT,__opcodes"), unused, aligned (OPC_ALIGN) ))
618 #else
619 #define OPCODES_SECTION \
620 __attribute__ ((section(".opcodes"), unused, aligned (OPC_ALIGN) ))
621 #endif
622
623 #if defined(DO_PPC_STATISTICS)
624 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
625 OPCODES_SECTION opcode_t opc_##name = { \
626 .opc1 = op1, \
627 .opc2 = op2, \
628 .opc3 = op3, \
629 .pad = { 0, }, \
630 .handler = { \
631 .inval = invl, \
632 .type = _typ, \
633 .handler = &gen_##name, \
634 .oname = stringify(name), \
635 }, \
636 .oname = stringify(name), \
637 }
638 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
639 OPCODES_SECTION opcode_t opc_##name = { \
640 .opc1 = op1, \
641 .opc2 = op2, \
642 .opc3 = op3, \
643 .pad = { 0, }, \
644 .handler = { \
645 .inval = invl, \
646 .type = _typ, \
647 .handler = &gen_##name, \
648 .oname = onam, \
649 }, \
650 .oname = onam, \
651 }
652 #else
653 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
654 OPCODES_SECTION opcode_t opc_##name = { \
655 .opc1 = op1, \
656 .opc2 = op2, \
657 .opc3 = op3, \
658 .pad = { 0, }, \
659 .handler = { \
660 .inval = invl, \
661 .type = _typ, \
662 .handler = &gen_##name, \
663 }, \
664 .oname = stringify(name), \
665 }
666 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
667 OPCODES_SECTION opcode_t opc_##name = { \
668 .opc1 = op1, \
669 .opc2 = op2, \
670 .opc3 = op3, \
671 .pad = { 0, }, \
672 .handler = { \
673 .inval = invl, \
674 .type = _typ, \
675 .handler = &gen_##name, \
676 }, \
677 .oname = onam, \
678 }
679 #endif
680
681 #define GEN_OPCODE_MARK(name) \
682 OPCODES_SECTION opcode_t opc_##name = { \
683 .opc1 = 0xFF, \
684 .opc2 = 0xFF, \
685 .opc3 = 0xFF, \
686 .pad = { 0, }, \
687 .handler = { \
688 .inval = 0x00000000, \
689 .type = 0x00, \
690 .handler = NULL, \
691 }, \
692 .oname = stringify(name), \
693 }
694
695 /* SPR load/store helpers */
696 static always_inline void gen_load_spr(TCGv t, int reg)
697 {
698 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
699 }
700
701 static always_inline void gen_store_spr(int reg, TCGv t)
702 {
703 tcg_gen_st_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
704 }
705
706 /* Start opcode list */
707 GEN_OPCODE_MARK(start);
708
709 /* Invalid instruction */
710 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE)
711 {
712 GEN_EXCP_INVAL(ctx);
713 }
714
715 static opc_handler_t invalid_handler = {
716 .inval = 0xFFFFFFFF,
717 .type = PPC_NONE,
718 .handler = gen_invalid,
719 };
720
721 /*** Integer comparison ***/
722
723 static always_inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
724 {
725 int l1, l2, l3;
726
727 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_xer);
728 tcg_gen_shri_i32(cpu_crf[crf], cpu_crf[crf], XER_SO);
729 tcg_gen_andi_i32(cpu_crf[crf], cpu_crf[crf], 1);
730
731 l1 = gen_new_label();
732 l2 = gen_new_label();
733 l3 = gen_new_label();
734 if (s) {
735 tcg_gen_brcond_tl(TCG_COND_LT, arg0, arg1, l1);
736 tcg_gen_brcond_tl(TCG_COND_GT, arg0, arg1, l2);
737 } else {
738 tcg_gen_brcond_tl(TCG_COND_LTU, arg0, arg1, l1);
739 tcg_gen_brcond_tl(TCG_COND_GTU, arg0, arg1, l2);
740 }
741 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_EQ);
742 tcg_gen_br(l3);
743 gen_set_label(l1);
744 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_LT);
745 tcg_gen_br(l3);
746 gen_set_label(l2);
747 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_GT);
748 gen_set_label(l3);
749 }
750
751 static always_inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
752 {
753 TCGv t0 = tcg_const_local_tl(arg1);
754 gen_op_cmp(arg0, t0, s, crf);
755 tcg_temp_free(t0);
756 }
757
758 #if defined(TARGET_PPC64)
759 static always_inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
760 {
761 TCGv t0, t1;
762 t0 = tcg_temp_local_new();
763 t1 = tcg_temp_local_new();
764 if (s) {
765 tcg_gen_ext32s_tl(t0, arg0);
766 tcg_gen_ext32s_tl(t1, arg1);
767 } else {
768 tcg_gen_ext32u_tl(t0, arg0);
769 tcg_gen_ext32u_tl(t1, arg1);
770 }
771 gen_op_cmp(t0, t1, s, crf);
772 tcg_temp_free(t1);
773 tcg_temp_free(t0);
774 }
775
776 static always_inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
777 {
778 TCGv t0 = tcg_const_local_tl(arg1);
779 gen_op_cmp32(arg0, t0, s, crf);
780 tcg_temp_free(t0);
781 }
782 #endif
783
784 static always_inline void gen_set_Rc0 (DisasContext *ctx, TCGv reg)
785 {
786 #if defined(TARGET_PPC64)
787 if (!(ctx->sf_mode))
788 gen_op_cmpi32(reg, 0, 1, 0);
789 else
790 #endif
791 gen_op_cmpi(reg, 0, 1, 0);
792 }
793
794 /* cmp */
795 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER)
796 {
797 #if defined(TARGET_PPC64)
798 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
799 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
800 1, crfD(ctx->opcode));
801 else
802 #endif
803 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
804 1, crfD(ctx->opcode));
805 }
806
807 /* cmpi */
808 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
809 {
810 #if defined(TARGET_PPC64)
811 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
812 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
813 1, crfD(ctx->opcode));
814 else
815 #endif
816 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
817 1, crfD(ctx->opcode));
818 }
819
820 /* cmpl */
821 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER)
822 {
823 #if defined(TARGET_PPC64)
824 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
825 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
826 0, crfD(ctx->opcode));
827 else
828 #endif
829 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
830 0, crfD(ctx->opcode));
831 }
832
833 /* cmpli */
834 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
835 {
836 #if defined(TARGET_PPC64)
837 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
838 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
839 0, crfD(ctx->opcode));
840 else
841 #endif
842 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
843 0, crfD(ctx->opcode));
844 }
845
846 /* isel (PowerPC 2.03 specification) */
847 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL)
848 {
849 int l1, l2;
850 uint32_t bi = rC(ctx->opcode);
851 uint32_t mask;
852 TCGv_i32 t0;
853
854 l1 = gen_new_label();
855 l2 = gen_new_label();
856
857 mask = 1 << (3 - (bi & 0x03));
858 t0 = tcg_temp_new_i32();
859 tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
860 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
861 if (rA(ctx->opcode) == 0)
862 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
863 else
864 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
865 tcg_gen_br(l2);
866 gen_set_label(l1);
867 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
868 gen_set_label(l2);
869 tcg_temp_free_i32(t0);
870 }
871
872 /*** Integer arithmetic ***/
873
874 static always_inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, TCGv arg1, TCGv arg2, int sub)
875 {
876 int l1;
877 TCGv t0;
878
879 l1 = gen_new_label();
880 /* Start with XER OV disabled, the most likely case */
881 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
882 t0 = tcg_temp_local_new();
883 tcg_gen_xor_tl(t0, arg0, arg1);
884 #if defined(TARGET_PPC64)
885 if (!ctx->sf_mode)
886 tcg_gen_ext32s_tl(t0, t0);
887 #endif
888 if (sub)
889 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
890 else
891 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
892 tcg_gen_xor_tl(t0, arg1, arg2);
893 #if defined(TARGET_PPC64)
894 if (!ctx->sf_mode)
895 tcg_gen_ext32s_tl(t0, t0);
896 #endif
897 if (sub)
898 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
899 else
900 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
901 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
902 gen_set_label(l1);
903 tcg_temp_free(t0);
904 }
905
906 static always_inline void gen_op_arith_compute_ca(DisasContext *ctx, TCGv arg1, TCGv arg2, int sub)
907 {
908 int l1 = gen_new_label();
909
910 #if defined(TARGET_PPC64)
911 if (!(ctx->sf_mode)) {
912 TCGv t0, t1;
913 t0 = tcg_temp_new();
914 t1 = tcg_temp_new();
915
916 tcg_gen_ext32u_tl(t0, arg1);
917 tcg_gen_ext32u_tl(t1, arg2);
918 if (sub) {
919 tcg_gen_brcond_tl(TCG_COND_GTU, t0, t1, l1);
920 } else {
921 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
922 }
923 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
924 gen_set_label(l1);
925 tcg_temp_free(t0);
926 tcg_temp_free(t1);
927 } else
928 #endif
929 {
930 if (sub) {
931 tcg_gen_brcond_tl(TCG_COND_GTU, arg1, arg2, l1);
932 } else {
933 tcg_gen_brcond_tl(TCG_COND_GEU, arg1, arg2, l1);
934 }
935 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
936 gen_set_label(l1);
937 }
938 }
939
940 /* Common add function */
941 static always_inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
942 int add_ca, int compute_ca, int compute_ov)
943 {
944 TCGv t0, t1;
945
946 if ((!compute_ca && !compute_ov) ||
947 (!TCGV_EQUAL(ret,arg1) && !TCGV_EQUAL(ret, arg2))) {
948 t0 = ret;
949 } else {
950 t0 = tcg_temp_local_new();
951 }
952
953 if (add_ca) {
954 t1 = tcg_temp_local_new();
955 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
956 tcg_gen_shri_tl(t1, t1, XER_CA);
957 }
958
959 if (compute_ca && compute_ov) {
960 /* Start with XER CA and OV disabled, the most likely case */
961 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
962 } else if (compute_ca) {
963 /* Start with XER CA disabled, the most likely case */
964 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
965 } else if (compute_ov) {
966 /* Start with XER OV disabled, the most likely case */
967 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
968 }
969
970 tcg_gen_add_tl(t0, arg1, arg2);
971
972 if (compute_ca) {
973 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
974 }
975 if (add_ca) {
976 tcg_gen_add_tl(t0, t0, t1);
977 gen_op_arith_compute_ca(ctx, t0, t1, 0);
978 tcg_temp_free(t1);
979 }
980 if (compute_ov) {
981 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
982 }
983
984 if (unlikely(Rc(ctx->opcode) != 0))
985 gen_set_Rc0(ctx, t0);
986
987 if (!TCGV_EQUAL(t0, ret)) {
988 tcg_gen_mov_tl(ret, t0);
989 tcg_temp_free(t0);
990 }
991 }
992 /* Add functions with two operands */
993 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
994 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER) \
995 { \
996 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
997 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
998 add_ca, compute_ca, compute_ov); \
999 }
1000 /* Add functions with one operand and one immediate */
1001 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
1002 add_ca, compute_ca, compute_ov) \
1003 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER) \
1004 { \
1005 TCGv t0 = tcg_const_local_tl(const_val); \
1006 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1007 cpu_gpr[rA(ctx->opcode)], t0, \
1008 add_ca, compute_ca, compute_ov); \
1009 tcg_temp_free(t0); \
1010 }
1011
1012 /* add add. addo addo. */
1013 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
1014 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
1015 /* addc addc. addco addco. */
1016 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
1017 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
1018 /* adde adde. addeo addeo. */
1019 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
1020 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
1021 /* addme addme. addmeo addmeo. */
1022 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
1023 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
1024 /* addze addze. addzeo addzeo.*/
1025 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
1026 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
1027 /* addi */
1028 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1029 {
1030 target_long simm = SIMM(ctx->opcode);
1031
1032 if (rA(ctx->opcode) == 0) {
1033 /* li case */
1034 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
1035 } else {
1036 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm);
1037 }
1038 }
1039 /* addic addic.*/
1040 static always_inline void gen_op_addic (DisasContext *ctx, TCGv ret, TCGv arg1,
1041 int compute_Rc0)
1042 {
1043 target_long simm = SIMM(ctx->opcode);
1044
1045 /* Start with XER CA and OV disabled, the most likely case */
1046 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1047
1048 if (likely(simm != 0)) {
1049 TCGv t0 = tcg_temp_local_new();
1050 tcg_gen_addi_tl(t0, arg1, simm);
1051 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
1052 tcg_gen_mov_tl(ret, t0);
1053 tcg_temp_free(t0);
1054 } else {
1055 tcg_gen_mov_tl(ret, arg1);
1056 }
1057 if (compute_Rc0) {
1058 gen_set_Rc0(ctx, ret);
1059 }
1060 }
1061 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1062 {
1063 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
1064 }
1065 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1066 {
1067 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
1068 }
1069 /* addis */
1070 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1071 {
1072 target_long simm = SIMM(ctx->opcode);
1073
1074 if (rA(ctx->opcode) == 0) {
1075 /* lis case */
1076 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
1077 } else {
1078 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm << 16);
1079 }
1080 }
1081
1082 static always_inline void gen_op_arith_divw (DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
1083 int sign, int compute_ov)
1084 {
1085 int l1 = gen_new_label();
1086 int l2 = gen_new_label();
1087 TCGv_i32 t0 = tcg_temp_local_new_i32();
1088 TCGv_i32 t1 = tcg_temp_local_new_i32();
1089
1090 tcg_gen_trunc_tl_i32(t0, arg1);
1091 tcg_gen_trunc_tl_i32(t1, arg2);
1092 tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1);
1093 if (sign) {
1094 int l3 = gen_new_label();
1095 tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3);
1096 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1);
1097 gen_set_label(l3);
1098 tcg_gen_div_i32(t0, t0, t1);
1099 } else {
1100 tcg_gen_divu_i32(t0, t0, t1);
1101 }
1102 if (compute_ov) {
1103 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1104 }
1105 tcg_gen_br(l2);
1106 gen_set_label(l1);
1107 if (sign) {
1108 tcg_gen_sari_i32(t0, t0, 31);
1109 } else {
1110 tcg_gen_movi_i32(t0, 0);
1111 }
1112 if (compute_ov) {
1113 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1114 }
1115 gen_set_label(l2);
1116 tcg_gen_extu_i32_tl(ret, t0);
1117 tcg_temp_free_i32(t0);
1118 tcg_temp_free_i32(t1);
1119 if (unlikely(Rc(ctx->opcode) != 0))
1120 gen_set_Rc0(ctx, ret);
1121 }
1122 /* Div functions */
1123 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
1124 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) \
1125 { \
1126 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
1127 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1128 sign, compute_ov); \
1129 }
1130 /* divwu divwu. divwuo divwuo. */
1131 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1132 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1133 /* divw divw. divwo divwo. */
1134 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1135 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1136 #if defined(TARGET_PPC64)
1137 static always_inline void gen_op_arith_divd (DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
1138 int sign, int compute_ov)
1139 {
1140 int l1 = gen_new_label();
1141 int l2 = gen_new_label();
1142
1143 tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1);
1144 if (sign) {
1145 int l3 = gen_new_label();
1146 tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3);
1147 tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1);
1148 gen_set_label(l3);
1149 tcg_gen_div_i64(ret, arg1, arg2);
1150 } else {
1151 tcg_gen_divu_i64(ret, arg1, arg2);
1152 }
1153 if (compute_ov) {
1154 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1155 }
1156 tcg_gen_br(l2);
1157 gen_set_label(l1);
1158 if (sign) {
1159 tcg_gen_sari_i64(ret, arg1, 63);
1160 } else {
1161 tcg_gen_movi_i64(ret, 0);
1162 }
1163 if (compute_ov) {
1164 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1165 }
1166 gen_set_label(l2);
1167 if (unlikely(Rc(ctx->opcode) != 0))
1168 gen_set_Rc0(ctx, ret);
1169 }
1170 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1171 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) \
1172 { \
1173 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1174 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1175 sign, compute_ov); \
1176 }
1177 /* divwu divwu. divwuo divwuo. */
1178 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1179 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1180 /* divw divw. divwo divwo. */
1181 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1182 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1183 #endif
1184
1185 /* mulhw mulhw. */
1186 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER)
1187 {
1188 TCGv_i64 t0, t1;
1189
1190 t0 = tcg_temp_new_i64();
1191 t1 = tcg_temp_new_i64();
1192 #if defined(TARGET_PPC64)
1193 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1194 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1195 tcg_gen_mul_i64(t0, t0, t1);
1196 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1197 #else
1198 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1199 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1200 tcg_gen_mul_i64(t0, t0, t1);
1201 tcg_gen_shri_i64(t0, t0, 32);
1202 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1203 #endif
1204 tcg_temp_free_i64(t0);
1205 tcg_temp_free_i64(t1);
1206 if (unlikely(Rc(ctx->opcode) != 0))
1207 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1208 }
1209 /* mulhwu mulhwu. */
1210 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER)
1211 {
1212 TCGv_i64 t0, t1;
1213
1214 t0 = tcg_temp_new_i64();
1215 t1 = tcg_temp_new_i64();
1216 #if defined(TARGET_PPC64)
1217 tcg_gen_ext32u_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1218 tcg_gen_ext32u_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1219 tcg_gen_mul_i64(t0, t0, t1);
1220 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1221 #else
1222 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1223 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1224 tcg_gen_mul_i64(t0, t0, t1);
1225 tcg_gen_shri_i64(t0, t0, 32);
1226 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1227 #endif
1228 tcg_temp_free_i64(t0);
1229 tcg_temp_free_i64(t1);
1230 if (unlikely(Rc(ctx->opcode) != 0))
1231 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1232 }
1233 /* mullw mullw. */
1234 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER)
1235 {
1236 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1237 cpu_gpr[rB(ctx->opcode)]);
1238 tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]);
1239 if (unlikely(Rc(ctx->opcode) != 0))
1240 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1241 }
1242 /* mullwo mullwo. */
1243 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER)
1244 {
1245 int l1;
1246 TCGv_i64 t0, t1;
1247
1248 t0 = tcg_temp_new_i64();
1249 t1 = tcg_temp_new_i64();
1250 l1 = gen_new_label();
1251 /* Start with XER OV disabled, the most likely case */
1252 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1253 #if defined(TARGET_PPC64)
1254 tcg_gen_ext32s_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1255 tcg_gen_ext32s_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1256 #else
1257 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1258 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1259 #endif
1260 tcg_gen_mul_i64(t0, t0, t1);
1261 #if defined(TARGET_PPC64)
1262 tcg_gen_ext32s_i64(cpu_gpr[rD(ctx->opcode)], t0);
1263 tcg_gen_brcond_i64(TCG_COND_EQ, t0, cpu_gpr[rD(ctx->opcode)], l1);
1264 #else
1265 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1266 tcg_gen_ext32s_i64(t1, t0);
1267 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
1268 #endif
1269 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1270 gen_set_label(l1);
1271 tcg_temp_free_i64(t0);
1272 tcg_temp_free_i64(t1);
1273 if (unlikely(Rc(ctx->opcode) != 0))
1274 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1275 }
1276 /* mulli */
1277 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1278 {
1279 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1280 SIMM(ctx->opcode));
1281 }
1282 #if defined(TARGET_PPC64)
1283 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
1284 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) \
1285 { \
1286 gen_helper_##name (cpu_gpr[rD(ctx->opcode)], \
1287 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
1288 if (unlikely(Rc(ctx->opcode) != 0)) \
1289 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1290 }
1291 /* mulhd mulhd. */
1292 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00);
1293 /* mulhdu mulhdu. */
1294 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02);
1295 /* mulld mulld. */
1296 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B)
1297 {
1298 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1299 cpu_gpr[rB(ctx->opcode)]);
1300 if (unlikely(Rc(ctx->opcode) != 0))
1301 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1302 }
1303 /* mulldo mulldo. */
1304 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17);
1305 #endif
1306
1307 /* neg neg. nego nego. */
1308 static always_inline void gen_op_arith_neg (DisasContext *ctx, TCGv ret, TCGv arg1, int ov_check)
1309 {
1310 int l1 = gen_new_label();
1311 int l2 = gen_new_label();
1312 TCGv t0 = tcg_temp_local_new();
1313 #if defined(TARGET_PPC64)
1314 if (ctx->sf_mode) {
1315 tcg_gen_mov_tl(t0, arg1);
1316 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT64_MIN, l1);
1317 } else
1318 #endif
1319 {
1320 tcg_gen_ext32s_tl(t0, arg1);
1321 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT32_MIN, l1);
1322 }
1323 tcg_gen_neg_tl(ret, arg1);
1324 if (ov_check) {
1325 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1326 }
1327 tcg_gen_br(l2);
1328 gen_set_label(l1);
1329 tcg_gen_mov_tl(ret, t0);
1330 if (ov_check) {
1331 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1332 }
1333 gen_set_label(l2);
1334 tcg_temp_free(t0);
1335 if (unlikely(Rc(ctx->opcode) != 0))
1336 gen_set_Rc0(ctx, ret);
1337 }
1338 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER)
1339 {
1340 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
1341 }
1342 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER)
1343 {
1344 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
1345 }
1346
1347 /* Common subf function */
1348 static always_inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
1349 int add_ca, int compute_ca, int compute_ov)
1350 {
1351 TCGv t0, t1;
1352
1353 if ((!compute_ca && !compute_ov) ||
1354 (!TCGV_EQUAL(ret, arg1) && !TCGV_EQUAL(ret, arg2))) {
1355 t0 = ret;
1356 } else {
1357 t0 = tcg_temp_local_new();
1358 }
1359
1360 if (add_ca) {
1361 t1 = tcg_temp_local_new();
1362 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
1363 tcg_gen_shri_tl(t1, t1, XER_CA);
1364 }
1365
1366 if (compute_ca && compute_ov) {
1367 /* Start with XER CA and OV disabled, the most likely case */
1368 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
1369 } else if (compute_ca) {
1370 /* Start with XER CA disabled, the most likely case */
1371 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1372 } else if (compute_ov) {
1373 /* Start with XER OV disabled, the most likely case */
1374 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1375 }
1376
1377 if (add_ca) {
1378 tcg_gen_not_tl(t0, arg1);
1379 tcg_gen_add_tl(t0, t0, arg2);
1380 gen_op_arith_compute_ca(ctx, t0, arg2, 0);
1381 tcg_gen_add_tl(t0, t0, t1);
1382 gen_op_arith_compute_ca(ctx, t0, t1, 0);
1383 tcg_temp_free(t1);
1384 } else {
1385 tcg_gen_sub_tl(t0, arg2, arg1);
1386 if (compute_ca) {
1387 gen_op_arith_compute_ca(ctx, t0, arg2, 1);
1388 }
1389 }
1390 if (compute_ov) {
1391 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1392 }
1393
1394 if (unlikely(Rc(ctx->opcode) != 0))
1395 gen_set_Rc0(ctx, t0);
1396
1397 if (!TCGV_EQUAL(t0, ret)) {
1398 tcg_gen_mov_tl(ret, t0);
1399 tcg_temp_free(t0);
1400 }
1401 }
1402 /* Sub functions with Two operands functions */
1403 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1404 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER) \
1405 { \
1406 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1407 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1408 add_ca, compute_ca, compute_ov); \
1409 }
1410 /* Sub functions with one operand and one immediate */
1411 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1412 add_ca, compute_ca, compute_ov) \
1413 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER) \
1414 { \
1415 TCGv t0 = tcg_const_local_tl(const_val); \
1416 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1417 cpu_gpr[rA(ctx->opcode)], t0, \
1418 add_ca, compute_ca, compute_ov); \
1419 tcg_temp_free(t0); \
1420 }
1421 /* subf subf. subfo subfo. */
1422 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1423 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1424 /* subfc subfc. subfco subfco. */
1425 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1426 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1427 /* subfe subfe. subfeo subfo. */
1428 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1429 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1430 /* subfme subfme. subfmeo subfmeo. */
1431 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1432 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1433 /* subfze subfze. subfzeo subfzeo.*/
1434 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1435 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1436 /* subfic */
1437 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1438 {
1439 /* Start with XER CA and OV disabled, the most likely case */
1440 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1441 TCGv t0 = tcg_temp_local_new();
1442 TCGv t1 = tcg_const_local_tl(SIMM(ctx->opcode));
1443 tcg_gen_sub_tl(t0, t1, cpu_gpr[rA(ctx->opcode)]);
1444 gen_op_arith_compute_ca(ctx, t0, t1, 1);
1445 tcg_temp_free(t1);
1446 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
1447 tcg_temp_free(t0);
1448 }
1449
1450 /*** Integer logical ***/
1451 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1452 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) \
1453 { \
1454 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1455 cpu_gpr[rB(ctx->opcode)]); \
1456 if (unlikely(Rc(ctx->opcode) != 0)) \
1457 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1458 }
1459
1460 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1461 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) \
1462 { \
1463 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1464 if (unlikely(Rc(ctx->opcode) != 0)) \
1465 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1466 }
1467
1468 /* and & and. */
1469 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1470 /* andc & andc. */
1471 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1472 /* andi. */
1473 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1474 {
1475 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1476 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1477 }
1478 /* andis. */
1479 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1480 {
1481 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1482 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1483 }
1484 /* cntlzw */
1485 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER)
1486 {
1487 gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1488 if (unlikely(Rc(ctx->opcode) != 0))
1489 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1490 }
1491 /* eqv & eqv. */
1492 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1493 /* extsb & extsb. */
1494 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1495 /* extsh & extsh. */
1496 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1497 /* nand & nand. */
1498 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1499 /* nor & nor. */
1500 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1501 /* or & or. */
1502 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER)
1503 {
1504 int rs, ra, rb;
1505
1506 rs = rS(ctx->opcode);
1507 ra = rA(ctx->opcode);
1508 rb = rB(ctx->opcode);
1509 /* Optimisation for mr. ri case */
1510 if (rs != ra || rs != rb) {
1511 if (rs != rb)
1512 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1513 else
1514 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1515 if (unlikely(Rc(ctx->opcode) != 0))
1516 gen_set_Rc0(ctx, cpu_gpr[ra]);
1517 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1518 gen_set_Rc0(ctx, cpu_gpr[rs]);
1519 #if defined(TARGET_PPC64)
1520 } else {
1521 int prio = 0;
1522
1523 switch (rs) {
1524 case 1:
1525 /* Set process priority to low */
1526 prio = 2;
1527 break;
1528 case 6:
1529 /* Set process priority to medium-low */
1530 prio = 3;
1531 break;
1532 case 2:
1533 /* Set process priority to normal */
1534 prio = 4;
1535 break;
1536 #if !defined(CONFIG_USER_ONLY)
1537 case 31:
1538 if (ctx->supervisor > 0) {
1539 /* Set process priority to very low */
1540 prio = 1;
1541 }
1542 break;
1543 case 5:
1544 if (ctx->supervisor > 0) {
1545 /* Set process priority to medium-hight */
1546 prio = 5;
1547 }
1548 break;
1549 case 3:
1550 if (ctx->supervisor > 0) {
1551 /* Set process priority to high */
1552 prio = 6;
1553 }
1554 break;
1555 case 7:
1556 if (ctx->supervisor > 1) {
1557 /* Set process priority to very high */
1558 prio = 7;
1559 }
1560 break;
1561 #endif
1562 default:
1563 /* nop */
1564 break;
1565 }
1566 if (prio) {
1567 TCGv t0 = tcg_temp_new();
1568 gen_load_spr(t0, SPR_PPR);
1569 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1570 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1571 gen_store_spr(SPR_PPR, t0);
1572 tcg_temp_free(t0);
1573 }
1574 #endif
1575 }
1576 }
1577 /* orc & orc. */
1578 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1579 /* xor & xor. */
1580 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER)
1581 {
1582 /* Optimisation for "set to zero" case */
1583 if (rS(ctx->opcode) != rB(ctx->opcode))
1584 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1585 else
1586 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1587 if (unlikely(Rc(ctx->opcode) != 0))
1588 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1589 }
1590 /* ori */
1591 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1592 {
1593 target_ulong uimm = UIMM(ctx->opcode);
1594
1595 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1596 /* NOP */
1597 /* XXX: should handle special NOPs for POWER series */
1598 return;
1599 }
1600 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1601 }
1602 /* oris */
1603 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1604 {
1605 target_ulong uimm = UIMM(ctx->opcode);
1606
1607 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1608 /* NOP */
1609 return;
1610 }
1611 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1612 }
1613 /* xori */
1614 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1615 {
1616 target_ulong uimm = UIMM(ctx->opcode);
1617
1618 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1619 /* NOP */
1620 return;
1621 }
1622 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1623 }
1624 /* xoris */
1625 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1626 {
1627 target_ulong uimm = UIMM(ctx->opcode);
1628
1629 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1630 /* NOP */
1631 return;
1632 }
1633 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1634 }
1635 /* popcntb : PowerPC 2.03 specification */
1636 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB)
1637 {
1638 #if defined(TARGET_PPC64)
1639 if (ctx->sf_mode)
1640 gen_helper_popcntb_64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1641 else
1642 #endif
1643 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1644 }
1645
1646 #if defined(TARGET_PPC64)
1647 /* extsw & extsw. */
1648 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1649 /* cntlzd */
1650 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B)
1651 {
1652 gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1653 if (unlikely(Rc(ctx->opcode) != 0))
1654 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1655 }
1656 #endif
1657
1658 /*** Integer rotate ***/
1659 /* rlwimi & rlwimi. */
1660 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1661 {
1662 uint32_t mb, me, sh;
1663
1664 mb = MB(ctx->opcode);
1665 me = ME(ctx->opcode);
1666 sh = SH(ctx->opcode);
1667 if (likely(sh == 0 && mb == 0 && me == 31)) {
1668 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1669 } else {
1670 target_ulong mask;
1671 TCGv t1;
1672 TCGv t0 = tcg_temp_new();
1673 #if defined(TARGET_PPC64)
1674 TCGv_i32 t2 = tcg_temp_new_i32();
1675 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1676 tcg_gen_rotli_i32(t2, t2, sh);
1677 tcg_gen_extu_i32_i64(t0, t2);
1678 tcg_temp_free_i32(t2);
1679 #else
1680 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1681 #endif
1682 #if defined(TARGET_PPC64)
1683 mb += 32;
1684 me += 32;
1685 #endif
1686 mask = MASK(mb, me);
1687 t1 = tcg_temp_new();
1688 tcg_gen_andi_tl(t0, t0, mask);
1689 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1690 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1691 tcg_temp_free(t0);
1692 tcg_temp_free(t1);
1693 }
1694 if (unlikely(Rc(ctx->opcode) != 0))
1695 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1696 }
1697 /* rlwinm & rlwinm. */
1698 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1699 {
1700 uint32_t mb, me, sh;
1701
1702 sh = SH(ctx->opcode);
1703 mb = MB(ctx->opcode);
1704 me = ME(ctx->opcode);
1705
1706 if (likely(mb == 0 && me == (31 - sh))) {
1707 if (likely(sh == 0)) {
1708 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1709 } else {
1710 TCGv t0 = tcg_temp_new();
1711 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1712 tcg_gen_shli_tl(t0, t0, sh);
1713 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1714 tcg_temp_free(t0);
1715 }
1716 } else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1717 TCGv t0 = tcg_temp_new();
1718 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1719 tcg_gen_shri_tl(t0, t0, mb);
1720 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1721 tcg_temp_free(t0);
1722 } else {
1723 TCGv t0 = tcg_temp_new();
1724 #if defined(TARGET_PPC64)
1725 TCGv_i32 t1 = tcg_temp_new_i32();
1726 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1727 tcg_gen_rotli_i32(t1, t1, sh);
1728 tcg_gen_extu_i32_i64(t0, t1);
1729 tcg_temp_free_i32(t1);
1730 #else
1731 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1732 #endif
1733 #if defined(TARGET_PPC64)
1734 mb += 32;
1735 me += 32;
1736 #endif
1737 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1738 tcg_temp_free(t0);
1739 }
1740 if (unlikely(Rc(ctx->opcode) != 0))
1741 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1742 }
1743 /* rlwnm & rlwnm. */
1744 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1745 {
1746 uint32_t mb, me;
1747 TCGv t0;
1748 #if defined(TARGET_PPC64)
1749 TCGv_i32 t1, t2;
1750 #endif
1751
1752 mb = MB(ctx->opcode);
1753 me = ME(ctx->opcode);
1754 t0 = tcg_temp_new();
1755 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1756 #if defined(TARGET_PPC64)
1757 t1 = tcg_temp_new_i32();
1758 t2 = tcg_temp_new_i32();
1759 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1760 tcg_gen_trunc_i64_i32(t2, t0);
1761 tcg_gen_rotl_i32(t1, t1, t2);
1762 tcg_gen_extu_i32_i64(t0, t1);
1763 tcg_temp_free_i32(t1);
1764 tcg_temp_free_i32(t2);
1765 #else
1766 tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1767 #endif
1768 if (unlikely(mb != 0 || me != 31)) {
1769 #if defined(TARGET_PPC64)
1770 mb += 32;
1771 me += 32;
1772 #endif
1773 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1774 } else {
1775 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1776 }
1777 tcg_temp_free(t0);
1778 if (unlikely(Rc(ctx->opcode) != 0))
1779 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1780 }
1781
1782 #if defined(TARGET_PPC64)
1783 #define GEN_PPC64_R2(name, opc1, opc2) \
1784 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1785 { \
1786 gen_##name(ctx, 0); \
1787 } \
1788 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1789 PPC_64B) \
1790 { \
1791 gen_##name(ctx, 1); \
1792 }
1793 #define GEN_PPC64_R4(name, opc1, opc2) \
1794 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1795 { \
1796 gen_##name(ctx, 0, 0); \
1797 } \
1798 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
1799 PPC_64B) \
1800 { \
1801 gen_##name(ctx, 0, 1); \
1802 } \
1803 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1804 PPC_64B) \
1805 { \
1806 gen_##name(ctx, 1, 0); \
1807 } \
1808 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
1809 PPC_64B) \
1810 { \
1811 gen_##name(ctx, 1, 1); \
1812 }
1813
1814 static always_inline void gen_rldinm (DisasContext *ctx, uint32_t mb,
1815 uint32_t me, uint32_t sh)
1816 {
1817 if (likely(sh != 0 && mb == 0 && me == (63 - sh))) {
1818 tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1819 } else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1820 tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1821 } else {
1822 TCGv t0 = tcg_temp_new();
1823 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1824 if (likely(mb == 0 && me == 63)) {
1825 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1826 } else {
1827 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1828 }
1829 tcg_temp_free(t0);
1830 }
1831 if (unlikely(Rc(ctx->opcode) != 0))
1832 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1833 }
1834 /* rldicl - rldicl. */
1835 static always_inline void gen_rldicl (DisasContext *ctx, int mbn, int shn)
1836 {
1837 uint32_t sh, mb;
1838
1839 sh = SH(ctx->opcode) | (shn << 5);
1840 mb = MB(ctx->opcode) | (mbn << 5);
1841 gen_rldinm(ctx, mb, 63, sh);
1842 }
1843 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1844 /* rldicr - rldicr. */
1845 static always_inline void gen_rldicr (DisasContext *ctx, int men, int shn)
1846 {
1847 uint32_t sh, me;
1848
1849 sh = SH(ctx->opcode) | (shn << 5);
1850 me = MB(ctx->opcode) | (men << 5);
1851 gen_rldinm(ctx, 0, me, sh);
1852 }
1853 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1854 /* rldic - rldic. */
1855 static always_inline void gen_rldic (DisasContext *ctx, int mbn, int shn)
1856 {
1857 uint32_t sh, mb;
1858
1859 sh = SH(ctx->opcode) | (shn << 5);
1860 mb = MB(ctx->opcode) | (mbn << 5);
1861 gen_rldinm(ctx, mb, 63 - sh, sh);
1862 }
1863 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1864
1865 static always_inline void gen_rldnm (DisasContext *ctx, uint32_t mb,
1866 uint32_t me)
1867 {
1868 TCGv t0;
1869
1870 mb = MB(ctx->opcode);
1871 me = ME(ctx->opcode);
1872 t0 = tcg_temp_new();
1873 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1874 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1875 if (unlikely(mb != 0 || me != 63)) {
1876 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1877 } else {
1878 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1879 }
1880 tcg_temp_free(t0);
1881 if (unlikely(Rc(ctx->opcode) != 0))
1882 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1883 }
1884
1885 /* rldcl - rldcl. */
1886 static always_inline void gen_rldcl (DisasContext *ctx, int mbn)
1887 {
1888 uint32_t mb;
1889
1890 mb = MB(ctx->opcode) | (mbn << 5);
1891 gen_rldnm(ctx, mb, 63);
1892 }
1893 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1894 /* rldcr - rldcr. */
1895 static always_inline void gen_rldcr (DisasContext *ctx, int men)
1896 {
1897 uint32_t me;
1898
1899 me = MB(ctx->opcode) | (men << 5);
1900 gen_rldnm(ctx, 0, me);
1901 }
1902 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1903 /* rldimi - rldimi. */
1904 static always_inline void gen_rldimi (DisasContext *ctx, int mbn, int shn)
1905 {
1906 uint32_t sh, mb, me;
1907
1908 sh = SH(ctx->opcode) | (shn << 5);
1909 mb = MB(ctx->opcode) | (mbn << 5);
1910 me = 63 - sh;
1911 if (unlikely(sh == 0 && mb == 0)) {
1912 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1913 } else {
1914 TCGv t0, t1;
1915 target_ulong mask;
1916
1917 t0 = tcg_temp_new();
1918 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1919 t1 = tcg_temp_new();
1920 mask = MASK(mb, me);
1921 tcg_gen_andi_tl(t0, t0, mask);
1922 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1923 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1924 tcg_temp_free(t0);
1925 tcg_temp_free(t1);
1926 }
1927 if (unlikely(Rc(ctx->opcode) != 0))
1928 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1929 }
1930 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1931 #endif
1932
1933 /*** Integer shift ***/
1934 /* slw & slw. */
1935 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER)
1936 {
1937 TCGv t0;
1938 int l1, l2;
1939 l1 = gen_new_label();
1940 l2 = gen_new_label();
1941
1942 t0 = tcg_temp_local_new();
1943 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1944 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1);
1945 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1946 tcg_gen_br(l2);
1947 gen_set_label(l1);
1948 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0);
1949 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
1950 gen_set_label(l2);
1951 tcg_temp_free(t0);
1952 if (unlikely(Rc(ctx->opcode) != 0))
1953 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1954 }
1955 /* sraw & sraw. */
1956 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER)
1957 {
1958 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)],
1959 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1960 if (unlikely(Rc(ctx->opcode) != 0))
1961 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1962 }
1963 /* srawi & srawi. */
1964 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER)
1965 {
1966 int sh = SH(ctx->opcode);
1967 if (sh != 0) {
1968 int l1, l2;
1969 TCGv t0;
1970 l1 = gen_new_label();
1971 l2 = gen_new_label();
1972 t0 = tcg_temp_local_new();
1973 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1974 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
1975 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1976 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1977 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1978 tcg_gen_br(l2);
1979 gen_set_label(l1);
1980 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1981 gen_set_label(l2);
1982 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1983 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], t0, sh);
1984 tcg_temp_free(t0);
1985 } else {
1986 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1987 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1988 }
1989 if (unlikely(Rc(ctx->opcode) != 0))
1990 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1991 }
1992 /* srw & srw. */
1993 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER)
1994 {
1995 TCGv t0, t1;
1996 int l1, l2;
1997 l1 = gen_new_label();
1998 l2 = gen_new_label();
1999
2000 t0 = tcg_temp_local_new();
2001 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
2002 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1);
2003 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2004 tcg_gen_br(l2);
2005 gen_set_label(l1);
2006 t1 = tcg_temp_new();
2007 tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]);
2008 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t1, t0);
2009 tcg_temp_free(t1);
2010 gen_set_label(l2);
2011 tcg_temp_free(t0);
2012 if (unlikely(Rc(ctx->opcode) != 0))
2013 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2014 }
2015 #if defined(TARGET_PPC64)
2016 /* sld & sld. */
2017 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B)
2018 {
2019 TCGv t0;
2020 int l1, l2;
2021 l1 = gen_new_label();
2022 l2 = gen_new_label();
2023
2024 t0 = tcg_temp_local_new();
2025 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f);
2026 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1);
2027 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2028 tcg_gen_br(l2);
2029 gen_set_label(l1);
2030 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0);
2031 gen_set_label(l2);
2032 tcg_temp_free(t0);
2033 if (unlikely(Rc(ctx->opcode) != 0))
2034 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2035 }
2036 /* srad & srad. */
2037 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B)
2038 {
2039 gen_helper_srad(cpu_gpr[rA(ctx->opcode)],
2040 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2041 if (unlikely(Rc(ctx->opcode) != 0))
2042 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2043 }
2044 /* sradi & sradi. */
2045 static always_inline void gen_sradi (DisasContext *ctx, int n)
2046 {
2047 int sh = SH(ctx->opcode) + (n << 5);
2048 if (sh != 0) {
2049 int l1, l2;
2050 TCGv t0;
2051 l1 = gen_new_label();
2052 l2 = gen_new_label();
2053 t0 = tcg_temp_local_new();
2054 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
2055 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
2056 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2057 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
2058 tcg_gen_br(l2);
2059 gen_set_label(l1);
2060 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
2061 gen_set_label(l2);
2062 tcg_temp_free(t0);
2063 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
2064 } else {
2065 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2066 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
2067 }
2068 if (unlikely(Rc(ctx->opcode) != 0))
2069 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2070 }
2071 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B)
2072 {
2073 gen_sradi(ctx, 0);
2074 }
2075 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B)
2076 {
2077 gen_sradi(ctx, 1);
2078 }
2079 /* srd & srd. */
2080 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B)
2081 {
2082 TCGv t0;
2083 int l1, l2;
2084 l1 = gen_new_label();
2085 l2 = gen_new_label();
2086
2087 t0 = tcg_temp_local_new();
2088 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f);
2089 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1);
2090 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2091 tcg_gen_br(l2);
2092 gen_set_label(l1);
2093 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0);
2094 gen_set_label(l2);
2095 tcg_temp_free(t0);
2096 if (unlikely(Rc(ctx->opcode) != 0))
2097 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2098 }
2099 #endif
2100
2101 /*** Floating-Point arithmetic ***/
2102 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
2103 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type) \
2104 { \
2105 if (unlikely(!ctx->fpu_enabled)) { \
2106 GEN_EXCP_NO_FP(ctx); \
2107 return; \
2108 } \
2109 gen_reset_fpstatus(); \
2110 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2111 cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2112 if (isfloat) { \
2113 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2114 } \
2115 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \
2116 Rc(ctx->opcode) != 0); \
2117 }
2118
2119 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
2120 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
2121 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
2122
2123 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2124 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
2125 { \
2126 if (unlikely(!ctx->fpu_enabled)) { \
2127 GEN_EXCP_NO_FP(ctx); \
2128 return; \
2129 } \
2130 gen_reset_fpstatus(); \
2131 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2132 cpu_fpr[rB(ctx->opcode)]); \
2133 if (isfloat) { \
2134 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2135 } \
2136 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2137 set_fprf, Rc(ctx->opcode) != 0); \
2138 }
2139 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2140 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2141 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2142
2143 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2144 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
2145 { \
2146 if (unlikely(!ctx->fpu_enabled)) { \
2147 GEN_EXCP_NO_FP(ctx); \
2148 return; \
2149 } \
2150 gen_reset_fpstatus(); \
2151 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2152 cpu_fpr[rC(ctx->opcode)]); \
2153 if (isfloat) { \
2154 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2155 } \
2156 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2157 set_fprf, Rc(ctx->opcode) != 0); \
2158 }
2159 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2160 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2161 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2162
2163 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2164 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type) \
2165 { \
2166 if (unlikely(!ctx->fpu_enabled)) { \
2167 GEN_EXCP_NO_FP(ctx); \
2168 return; \
2169 } \
2170 gen_reset_fpstatus(); \
2171 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2172 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2173 set_fprf, Rc(ctx->opcode) != 0); \
2174 }
2175
2176 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2177 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type) \
2178 { \
2179 if (unlikely(!ctx->fpu_enabled)) { \
2180 GEN_EXCP_NO_FP(ctx); \
2181 return; \
2182 } \
2183 gen_reset_fpstatus(); \
2184 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2185 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2186 set_fprf, Rc(ctx->opcode) != 0); \
2187 }
2188
2189 /* fadd - fadds */
2190 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
2191 /* fdiv - fdivs */
2192 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
2193 /* fmul - fmuls */
2194 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
2195
2196 /* fre */
2197 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
2198
2199 /* fres */
2200 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
2201
2202 /* frsqrte */
2203 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
2204
2205 /* frsqrtes */
2206 GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES)
2207 {
2208 if (unlikely(!ctx->fpu_enabled)) {
2209 GEN_EXCP_NO_FP(ctx);
2210 return;
2211 }
2212 gen_reset_fpstatus();
2213 gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2214 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2215 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2216 }
2217
2218 /* fsel */
2219 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
2220 /* fsub - fsubs */
2221 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
2222 /* Optional: */
2223 /* fsqrt */
2224 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
2225 {
2226 if (unlikely(!ctx->fpu_enabled)) {
2227 GEN_EXCP_NO_FP(ctx);
2228 return;
2229 }
2230 gen_reset_fpstatus();
2231 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2232 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2233 }
2234
2235 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
2236 {
2237 if (unlikely(!ctx->fpu_enabled)) {
2238 GEN_EXCP_NO_FP(ctx);
2239 return;
2240 }
2241 gen_reset_fpstatus();
2242 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2243 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2244 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2245 }
2246
2247 /*** Floating-Point multiply-and-add ***/
2248 /* fmadd - fmadds */
2249 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
2250 /* fmsub - fmsubs */
2251 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
2252 /* fnmadd - fnmadds */
2253 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
2254 /* fnmsub - fnmsubs */
2255 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
2256
2257 /*** Floating-Point round & convert ***/
2258 /* fctiw */
2259 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
2260 /* fctiwz */
2261 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
2262 /* frsp */
2263 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
2264 #if defined(TARGET_PPC64)
2265 /* fcfid */
2266 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
2267 /* fctid */
2268 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
2269 /* fctidz */
2270 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
2271 #endif
2272
2273 /* frin */
2274 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
2275 /* friz */
2276 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
2277 /* frip */
2278 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
2279 /* frim */
2280 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
2281
2282 /*** Floating-Point compare ***/
2283 /* fcmpo */
2284 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT)
2285 {
2286 if (unlikely(!ctx->fpu_enabled)) {
2287 GEN_EXCP_NO_FP(ctx);
2288 return;
2289 }
2290 gen_reset_fpstatus();
2291 gen_helper_fcmpo(cpu_crf[crfD(ctx->opcode)],
2292 cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2293 gen_helper_float_check_status();
2294 }
2295
2296 /* fcmpu */
2297 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT)
2298 {
2299 if (unlikely(!ctx->fpu_enabled)) {
2300 GEN_EXCP_NO_FP(ctx);
2301 return;
2302 }
2303 gen_reset_fpstatus();
2304 gen_helper_fcmpu(cpu_crf[crfD(ctx->opcode)],
2305 cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2306 gen_helper_float_check_status();
2307 }
2308
2309 /*** Floating-point move ***/
2310 /* fabs */
2311 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2312 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT);
2313
2314 /* fmr - fmr. */
2315 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2316 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT)
2317 {
2318 if (unlikely(!ctx->fpu_enabled)) {
2319 GEN_EXCP_NO_FP(ctx);
2320 return;
2321 }
2322 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2323 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2324 }
2325
2326 /* fnabs */
2327 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2328 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT);
2329 /* fneg */
2330 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2331 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT);
2332
2333 /*** Floating-Point status & ctrl register ***/
2334 /* mcrfs */
2335 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT)
2336 {
2337 int bfa;
2338
2339 if (unlikely(!ctx->fpu_enabled)) {
2340 GEN_EXCP_NO_FP(ctx);
2341 return;
2342 }
2343 gen_optimize_fprf();
2344 bfa = 4 * (7 - crfS(ctx->opcode));
2345 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_fpscr, bfa);
2346 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
2347 tcg_gen_andi_i32(cpu_fpscr, cpu_fpscr, ~(0xF << bfa));
2348 }
2349
2350 /* mffs */
2351 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT)
2352 {
2353 if (unlikely(!ctx->fpu_enabled)) {
2354 GEN_EXCP_NO_FP(ctx);
2355 return;
2356 }
2357 gen_optimize_fprf();
2358 gen_reset_fpstatus();
2359 tcg_gen_extu_i32_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr);
2360 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2361 }
2362
2363 /* mtfsb0 */
2364 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT)
2365 {
2366 uint8_t crb;
2367
2368 if (unlikely(!ctx->fpu_enabled)) {
2369 GEN_EXCP_NO_FP(ctx);
2370 return;
2371 }
2372 crb = 32 - (crbD(ctx->opcode) >> 2);
2373 gen_optimize_fprf();
2374 gen_reset_fpstatus();
2375 if (likely(crb != 30 && crb != 29))
2376 tcg_gen_andi_i32(cpu_fpscr, cpu_fpscr, ~(1 << crb));
2377 if (unlikely(Rc(ctx->opcode) != 0)) {
2378 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2379 }
2380 }
2381
2382 /* mtfsb1 */
2383 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT)
2384 {
2385 uint8_t crb;
2386
2387 if (unlikely(!ctx->fpu_enabled)) {
2388 GEN_EXCP_NO_FP(ctx);
2389 return;
2390 }
2391 crb = 32 - (crbD(ctx->opcode) >> 2);
2392 gen_optimize_fprf();
2393 gen_reset_fpstatus();
2394 /* XXX: we pretend we can only do IEEE floating-point computations */
2395 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
2396 TCGv_i32 t0 = tcg_const_i32(crb);
2397 gen_helper_fpscr_setbit(t0);
2398 tcg_temp_free_i32(t0);
2399 }
2400 if (unlikely(Rc(ctx->opcode) != 0)) {
2401 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2402 }
2403 /* We can raise a differed exception */
2404 gen_helper_float_check_status();
2405 }
2406
2407 /* mtfsf */
2408 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x02010000, PPC_FLOAT)
2409 {
2410 TCGv_i32 t0;
2411
2412 if (unlikely(!ctx->fpu_enabled)) {
2413 GEN_EXCP_NO_FP(ctx);
2414 return;
2415 }
2416 gen_optimize_fprf();
2417 gen_reset_fpstatus();
2418 t0 = tcg_const_i32(FM(ctx->opcode));
2419 gen_helper_store_fpscr(cpu_fpr[rB(ctx->opcode)], t0);
2420 tcg_temp_free_i32(t0);
2421 if (unlikely(Rc(ctx->opcode) != 0)) {
2422 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2423 }
2424 /* We can raise a differed exception */
2425 gen_helper_float_check_status();
2426 }
2427
2428 /* mtfsfi */
2429 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT)
2430 {
2431 int bf, sh;
2432 TCGv_i64 t0;
2433 TCGv_i32 t1;
2434
2435 if (unlikely(!ctx->fpu_enabled)) {
2436 GEN_EXCP_NO_FP(ctx);
2437 return;
2438 }
2439 bf = crbD(ctx->opcode) >> 2;
2440 sh = 7 - bf;
2441 gen_optimize_fprf();
2442 gen_reset_fpstatus();
2443 t0 = tcg_const_i64(FPIMM(ctx->opcode) << (4 * sh));
2444 t1 = tcg_const_i32(1 << sh);
2445 gen_helper_store_fpscr(t0, t1);
2446 tcg_temp_free_i64(t0);
2447 tcg_temp_free_i32(t1);
2448 if (unlikely(Rc(ctx->opcode) != 0)) {
2449 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2450 }
2451 /* We can raise a differed exception */
2452 gen_helper_float_check_status();
2453 }
2454
2455 /*** Addressing modes ***/
2456 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2457 static always_inline void gen_addr_imm_index (TCGv EA,
2458 DisasContext *ctx,
2459 target_long maskl)
2460 {
2461 target_long simm = SIMM(ctx->opcode);
2462
2463 simm &= ~maskl;
2464 if (rA(ctx->opcode) == 0)
2465 tcg_gen_movi_tl(EA, simm);
2466 else if (likely(simm != 0))
2467 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2468 else
2469 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2470 }
2471
2472 static always_inline void gen_addr_reg_index (TCGv EA,
2473 DisasContext *ctx)
2474 {
2475 if (rA(ctx->opcode) == 0)
2476 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2477 else
2478 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2479 }
2480
2481 static always_inline void gen_addr_register (TCGv EA,
2482 DisasContext *ctx)
2483 {
2484 if (rA(ctx->opcode) == 0)
2485 tcg_gen_movi_tl(EA, 0);
2486 else
2487 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2488 }
2489
2490 static always_inline void gen_check_align (DisasContext *ctx, TCGv EA, int mask)
2491 {
2492 int l1 = gen_new_label();
2493 TCGv t0 = tcg_temp_new();
2494 TCGv_i32 t1, t2;
2495 /* NIP cannot be restored if the memory exception comes from an helper */
2496 gen_update_nip(ctx, ctx->nip - 4);
2497 tcg_gen_andi_tl(t0, EA, mask);
2498 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2499 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2500 t2 = tcg_const_i32(0);
2501 gen_helper_raise_exception_err(t1, t2);
2502 tcg_temp_free_i32(t1);
2503 tcg_temp_free_i32(t2);
2504 gen_set_label(l1);
2505 tcg_temp_free(t0);
2506 }
2507
2508 /*** Integer load ***/
2509 #if defined(TARGET_PPC64)
2510 #define GEN_QEMU_LD_PPC64(width) \
2511 static always_inline void gen_qemu_ld##width##_ppc64(TCGv t0, TCGv t1, int flags)\
2512 { \
2513 if (likely(flags & 2)) \
2514 tcg_gen_qemu_ld##width(t0, t1, flags >> 2); \
2515 else { \
2516 TCGv addr = tcg_temp_new(); \
2517 tcg_gen_ext32u_tl(addr, t1); \
2518 tcg_gen_qemu_ld##width(t0, addr, flags >> 2); \
2519 tcg_temp_free(addr); \
2520 } \
2521 }
2522 GEN_QEMU_LD_PPC64(8u)
2523 GEN_QEMU_LD_PPC64(8s)
2524 GEN_QEMU_LD_PPC64(16u)
2525 GEN_QEMU_LD_PPC64(16s)
2526 GEN_QEMU_LD_PPC64(32u)
2527 GEN_QEMU_LD_PPC64(32s)
2528 GEN_QEMU_LD_PPC64(64)
2529
2530 #define GEN_QEMU_ST_PPC64(width) \
2531 static always_inline void gen_qemu_st##width##_ppc64(TCGv t0, TCGv t1, int flags)\
2532 { \
2533 if (likely(flags & 2)) \
2534 tcg_gen_qemu_st##width(t0, t1, flags >> 2); \
2535 else { \
2536 TCGv addr = tcg_temp_new(); \
2537 tcg_gen_ext32u_tl(addr, t1); \
2538 tcg_gen_qemu_st##width(t0, addr, flags >> 2); \
2539 tcg_temp_free(addr); \
2540 } \
2541 }
2542 GEN_QEMU_ST_PPC64(8)
2543 GEN_QEMU_ST_PPC64(16)
2544 GEN_QEMU_ST_PPC64(32)
2545 GEN_QEMU_ST_PPC64(64)
2546
2547 static always_inline void gen_qemu_ld8u(TCGv arg0, TCGv arg1, int flags)
2548 {
2549 gen_qemu_ld8u_ppc64(arg0, arg1, flags);
2550 }
2551
2552 static always_inline void gen_qemu_ld8s(TCGv arg0, TCGv arg1, int flags)
2553 {
2554 gen_qemu_ld8s_ppc64(arg0, arg1, flags);
2555 }
2556
2557 static always_inline void gen_qemu_ld16u(TCGv arg0, TCGv arg1, int flags)
2558 {
2559 if (unlikely(flags & 1)) {
2560 TCGv_i32 t0;
2561 gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2562 t0 = tcg_temp_new_i32();
2563 tcg_gen_trunc_tl_i32(t0, arg0);
2564 tcg_gen_bswap16_i32(t0, t0);
2565 tcg_gen_extu_i32_tl(arg0, t0);
2566 tcg_temp_free_i32(t0);
2567 } else
2568 gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2569 }
2570
2571 static always_inline void gen_qemu_ld16s(TCGv arg0, TCGv arg1, int flags)
2572 {
2573 if (unlikely(flags & 1)) {
2574 TCGv_i32 t0;
2575 gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2576 t0 = tcg_temp_new_i32();
2577 tcg_gen_trunc_tl_i32(t0, arg0);
2578 tcg_gen_bswap16_i32(t0, t0);
2579 tcg_gen_extu_i32_tl(arg0, t0);
2580 tcg_gen_ext16s_tl(arg0, arg0);
2581 tcg_temp_free_i32(t0);
2582 } else
2583 gen_qemu_ld16s_ppc64(arg0, arg1, flags);
2584 }
2585
2586 static always_inline void gen_qemu_ld32u(TCGv arg0, TCGv arg1, int flags)
2587 {
2588 if (unlikely(flags & 1)) {
2589 TCGv_i32 t0;
2590 gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2591 t0 = tcg_temp_new_i32();
2592 tcg_gen_trunc_tl_i32(t0, arg0);
2593 tcg_gen_bswap_i32(t0, t0);
2594 tcg_gen_extu_i32_tl(arg0, t0);
2595 tcg_temp_free_i32(t0);
2596 } else
2597 gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2598 }
2599
2600 static always_inline void gen_qemu_ld32s(TCGv arg0, TCGv arg1, int flags)
2601 {
2602 if (unlikely(flags & 1)) {
2603 TCGv_i32 t0;
2604 gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2605 t0 = tcg_temp_new_i32();
2606 tcg_gen_trunc_tl_i32(t0, arg0);
2607 tcg_gen_bswap_i32(t0, t0);
2608 tcg_gen_ext_i32_tl(arg0, t0);
2609 tcg_temp_free_i32(t0);
2610 } else
2611 gen_qemu_ld32s_ppc64(arg0, arg1, flags);
2612 }
2613
2614 static always_inline void gen_qemu_ld64(TCGv arg0, TCGv arg1, int flags)
2615 {
2616 gen_qemu_ld64_ppc64(arg0, arg1, flags);
2617 if (unlikely(flags & 1))
2618 tcg_gen_bswap_i64(arg0, arg0);
2619 }
2620
2621 static always_inline void gen_qemu_st8(TCGv arg0, TCGv arg1, int flags)
2622 {
2623 gen_qemu_st8_ppc64(arg0, arg1, flags);
2624 }
2625
2626 static always_inline void gen_qemu_st16(TCGv arg0, TCGv arg1, int flags)
2627 {
2628 if (unlikely(flags & 1)) {
2629 TCGv_i32 t0;
2630 TCGv_i64 t1;
2631 t0 = tcg_temp_new_i32();
2632 tcg_gen_trunc_tl_i32(t0, arg0);
2633 tcg_gen_ext16u_i32(t0, t0);
2634 tcg_gen_bswap16_i32(t0, t0);
2635 t1 = tcg_temp_new_i64();
2636 tcg_gen_extu_i32_tl(t1, t0);
2637 tcg_temp_free_i32(t0);
2638 gen_qemu_st16_ppc64(t1, arg1, flags);
2639 tcg_temp_free_i64(t1);
2640 } else
2641 gen_qemu_st16_ppc64(arg0, arg1, flags);
2642 }
2643
2644 static always_inline void gen_qemu_st32(TCGv arg0, TCGv arg1, int flags)
2645 {
2646 if (unlikely(flags & 1)) {
2647 TCGv_i32 t0;
2648 TCGv_i64 t1;
2649 t0 = tcg_temp_new_i32();
2650 tcg_gen_trunc_tl_i32(t0, arg0);
2651 tcg_gen_bswap_i32(t0, t0);
2652 t1 = tcg_temp_new_i64();
2653 tcg_gen_extu_i32_tl(t1, t0);
2654 tcg_temp_free_i32(t0);
2655 gen_qemu_st32_ppc64(t1, arg1, flags);
2656 tcg_temp_free_i64(t1);
2657 } else
2658 gen_qemu_st32_ppc64(arg0, arg1, flags);
2659 }
2660
2661 static always_inline void gen_qemu_st64(TCGv arg0, TCGv arg1, int flags)
2662 {
2663 if (unlikely(flags & 1)) {
2664 TCGv_i64 t0 = tcg_temp_new_i64();
2665 tcg_gen_bswap_i64(t0, arg0);
2666 gen_qemu_st64_ppc64(t0, arg1, flags);
2667 tcg_temp_free_i64(t0);
2668 } else
2669 gen_qemu_st64_ppc64(arg0, arg1, flags);
2670 }
2671
2672
2673 #else /* defined(TARGET_PPC64) */
2674 #define GEN_QEMU_LD_PPC32(width) \
2675 static always_inline void gen_qemu_ld##width##_ppc32(TCGv arg0, TCGv arg1, int flags) \
2676 { \
2677 tcg_gen_qemu_ld##width(arg0, arg1, flags >> 1); \
2678 }
2679 GEN_QEMU_LD_PPC32(8u)
2680 GEN_QEMU_LD_PPC32(8s)
2681 GEN_QEMU_LD_PPC32(16u)
2682 GEN_QEMU_LD_PPC32(16s)
2683 GEN_QEMU_LD_PPC32(32u)
2684 GEN_QEMU_LD_PPC32(32s)
2685 static always_inline void gen_qemu_ld64_ppc32(TCGv_i64 arg0, TCGv arg1, int flags)
2686 {
2687 tcg_gen_qemu_ld64(arg0, arg1, flags >> 1);
2688 }
2689
2690 #define GEN_QEMU_ST_PPC32(width) \
2691 static always_inline void gen_qemu_st##width##_ppc32(TCGv arg0, TCGv arg1, int flags) \
2692 { \
2693 tcg_gen_qemu_st##width(arg0, arg1, flags >> 1); \
2694 }
2695 GEN_QEMU_ST_PPC32(8)
2696 GEN_QEMU_ST_PPC32(16)
2697 GEN_QEMU_ST_PPC32(32)
2698 static always_inline void gen_qemu_st64_ppc32(TCGv_i64 arg0, TCGv arg1, int flags)
2699 {
2700 tcg_gen_qemu_st64(arg0, arg1, flags >> 1);
2701 }
2702
2703 static always_inline void gen_qemu_ld8u(TCGv arg0, TCGv arg1, int flags)
2704 {
2705 gen_qemu_ld8u_ppc32(arg0, arg1, flags >> 1);
2706 }
2707
2708 static always_inline void gen_qemu_ld8s(TCGv arg0, TCGv arg1, int flags)
2709 {
2710 gen_qemu_ld8s_ppc32(arg0, arg1, flags >> 1);
2711 }
2712
2713 static always_inline void gen_qemu_ld16u(TCGv arg0, TCGv arg1, int flags)
2714 {
2715 gen_qemu_ld16u_ppc32(arg0, arg1, flags >> 1);
2716 if (unlikely(flags & 1))
2717 tcg_gen_bswap16_i32(arg0, arg0);
2718 }
2719
2720 static always_inline void gen_qemu_ld16s(TCGv arg0, TCGv arg1, int flags)
2721 {
2722 if (unlikely(flags & 1)) {
2723 gen_qemu_ld16u_ppc32(arg0, arg1, flags);
2724 tcg_gen_bswap16_i32(arg0, arg0);
2725 tcg_gen_ext16s_i32(arg0, arg0);
2726 } else
2727 gen_qemu_ld16s_ppc32(arg0, arg1, flags);
2728 }
2729
2730 static always_inline void gen_qemu_ld32u(TCGv arg0, TCGv arg1, int flags)
2731 {
2732 gen_qemu_ld32u_ppc32(arg0, arg1, flags);
2733 if (unlikely(flags & 1))
2734 tcg_gen_bswap_i32(arg0, arg0);
2735 }
2736
2737 static always_inline void gen_qemu_ld64(TCGv_i64 arg0, TCGv arg1, int flags)
2738 {
2739 gen_qemu_ld64_ppc32(arg0, arg1, flags);
2740 if (unlikely(flags & 1))
2741 tcg_gen_bswap_i64(arg0, arg0);
2742 }
2743
2744 static always_inline void gen_qemu_st8(TCGv arg0, TCGv arg1, int flags)
2745 {
2746 gen_qemu_st8_ppc32(arg0, arg1, flags);
2747 }
2748
2749 static always_inline void gen_qemu_st16(TCGv arg0, TCGv arg1, int flags)
2750 {
2751 if (unlikely(flags & 1)) {
2752 TCGv_i32 temp = tcg_temp_new_i32();
2753 tcg_gen_ext16u_i32(temp, arg0);
2754 tcg_gen_bswap16_i32(temp, temp);
2755 gen_qemu_st16_ppc32(temp, arg1, flags);
2756 tcg_temp_free_i32(temp);
2757 } else
2758 gen_qemu_st16_ppc32(arg0, arg1, flags);
2759 }
2760
2761 static always_inline void gen_qemu_st32(TCGv arg0, TCGv arg1, int flags)
2762 {
2763 if (unlikely(flags & 1)) {
2764 TCGv_i32 temp = tcg_temp_new_i32();
2765 tcg_gen_bswap_i32(temp, arg0);
2766 gen_qemu_st32_ppc32(temp, arg1, flags);
2767 tcg_temp_free_i32(temp);
2768 } else
2769 gen_qemu_st32_ppc32(arg0, arg1, flags);
2770 }
2771
2772 static always_inline void gen_qemu_st64(TCGv_i64 arg0, TCGv arg1, int flags)
2773 {
2774 if (unlikely(flags & 1)) {
2775 TCGv_i64 temp = tcg_temp_new_i64();
2776 tcg_gen_bswap_i64(temp, arg0);
2777 gen_qemu_st64_ppc32(temp, arg1, flags);
2778 tcg_temp_free_i64(temp);
2779 } else
2780 gen_qemu_st64_ppc32(arg0, arg1, flags);
2781 }
2782 #endif
2783
2784 #define GEN_LD(name, ldop, opc, type) \
2785 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \
2786 { \
2787 TCGv EA = tcg_temp_new(); \
2788 gen_set_access_type(ACCESS_INT); \
2789 gen_addr_imm_index(EA, ctx, 0); \
2790 gen_qemu_##ldop(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2791 tcg_temp_free(EA); \
2792 }
2793
2794 #define GEN_LDU(name, ldop, opc, type) \
2795 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2796 { \
2797 TCGv EA; \
2798 if (unlikely(rA(ctx->opcode) == 0 || \
2799 rA(ctx->opcode) == rD(ctx->opcode))) { \
2800 GEN_EXCP_INVAL(ctx); \
2801 return; \
2802 } \
2803 EA = tcg_temp_new(); \
2804 gen_set_access_type(ACCESS_INT); \
2805 if (type == PPC_64B) \
2806 gen_addr_imm_index(EA, ctx, 0x03); \
2807 else \
2808 gen_addr_imm_index(EA, ctx, 0); \
2809 gen_qemu_##ldop(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2810 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2811 tcg_temp_free(EA); \
2812 }
2813
2814 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
2815 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2816 { \
2817 TCGv EA; \
2818 if (unlikely(rA(ctx->opcode) == 0 || \
2819 rA(ctx->opcode) == rD(ctx->opcode))) { \
2820 GEN_EXCP_INVAL(ctx); \
2821 return; \
2822 } \
2823 EA = tcg_temp_new(); \
2824 gen_set_access_type(ACCESS_INT); \
2825 gen_addr_reg_index(EA, ctx); \
2826 gen_qemu_##ldop(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2827 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2828 tcg_temp_free(EA); \
2829 }
2830
2831 #define GEN_LDX(name, ldop, opc2, opc3, type) \
2832 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \
2833 { \
2834 TCGv EA = tcg_temp_new(); \
2835 gen_set_access_type(ACCESS_INT); \
2836 gen_addr_reg_index(EA, ctx); \
2837 gen_qemu_##ldop(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2838 tcg_temp_free(EA); \
2839 }
2840
2841 #define GEN_LDS(name, ldop, op, type) \
2842 GEN_LD(name, ldop, op | 0x20, type); \
2843 GEN_LDU(name, ldop, op | 0x21, type); \
2844 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2845 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2846
2847 /* lbz lbzu lbzux lbzx */
2848 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2849 /* lha lhau lhaux lhax */
2850 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2851 /* lhz lhzu lhzux lhzx */
2852 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2853 /* lwz lwzu lwzux lwzx */
2854 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2855 #if defined(TARGET_PPC64)
2856 /* lwaux */
2857 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2858 /* lwax */
2859 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2860 /* ldux */
2861 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B);
2862 /* ldx */
2863 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B);
2864 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B)
2865 {
2866 TCGv EA;
2867 if (Rc(ctx->opcode)) {
2868 if (unlikely(rA(ctx->opcode) == 0 ||
2869 rA(ctx->opcode) == rD(ctx->opcode))) {
2870 GEN_EXCP_INVAL(ctx);
2871 return;
2872 }
2873 }
2874 EA = tcg_temp_new();
2875 gen_set_access_type(ACCESS_INT);
2876 gen_addr_imm_index(EA, ctx, 0x03);
2877 if (ctx->opcode & 0x02) {
2878 /* lwa (lwau is undefined) */
2879 gen_qemu_ld32s(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx);
2880 } else {
2881 /* ld - ldu */
2882 gen_qemu_ld64(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx);
2883 }
2884 if (Rc(ctx->opcode))
2885 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2886 tcg_temp_free(EA);
2887 }
2888 /* lq */
2889 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX)
2890 {
2891 #if defined(CONFIG_USER_ONLY)
2892 GEN_EXCP_PRIVOPC(ctx);
2893 #else
2894 int ra, rd;
2895 TCGv EA;
2896
2897 /* Restore CPU state */
2898 if (unlikely(ctx->supervisor == 0)) {
2899 GEN_EXCP_PRIVOPC(ctx);
2900 return;
2901 }
2902 ra = rA(ctx->opcode);
2903 rd = rD(ctx->opcode);
2904 if (unlikely((rd & 1) || rd == ra)) {
2905 GEN_EXCP_INVAL(ctx);
2906 return;
2907 }
2908 if (unlikely(ctx->mem_idx & 1)) {
2909 /* Little-endian mode is not handled */
2910 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2911 return;
2912 }
2913 EA = tcg_temp_new();
2914 gen_set_access_type(ACCESS_INT);
2915 gen_addr_imm_index(EA, ctx, 0x0F);
2916 gen_qemu_ld64(cpu_gpr[rd], EA, ctx->mem_idx);
2917 tcg_gen_addi_tl(EA, EA, 8);
2918 gen_qemu_ld64(cpu_gpr[rd+1], EA, ctx->mem_idx);
2919 tcg_temp_free(EA);
2920 #endif
2921 }
2922 #endif
2923
2924 /*** Integer store ***/
2925 #define GEN_ST(name, stop, opc, type) \
2926 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \
2927 { \
2928 TCGv EA = tcg_temp_new(); \
2929 gen_set_access_type(ACCESS_INT); \
2930 gen_addr_imm_index(EA, ctx, 0); \
2931 gen_qemu_##stop(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2932 tcg_temp_free(EA); \
2933 }
2934
2935 #define GEN_STU(name, stop, opc, type) \
2936 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2937 { \
2938 TCGv EA; \
2939 if (unlikely(rA(ctx->opcode) == 0)) { \
2940 GEN_EXCP_INVAL(ctx); \
2941 return; \
2942 } \
2943 EA = tcg_temp_new(); \
2944 gen_set_access_type(ACCESS_INT); \
2945 if (type == PPC_64B) \
2946 gen_addr_imm_index(EA, ctx, 0x03); \
2947 else \
2948 gen_addr_imm_index(EA, ctx, 0); \
2949 gen_qemu_##stop(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2950 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2951 tcg_temp_free(EA); \
2952 }
2953
2954 #define GEN_STUX(name, stop, opc2, opc3, type) \
2955 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2956 { \
2957 TCGv EA; \
2958 if (unlikely(rA(ctx->opcode) == 0)) { \
2959 GEN_EXCP_INVAL(ctx); \
2960 return; \
2961 } \
2962 EA = tcg_temp_new(); \
2963 gen_set_access_type(ACCESS_INT); \
2964 gen_addr_reg_index(EA, ctx); \
2965 gen_qemu_##stop(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2966 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2967 tcg_temp_free(EA); \
2968 }
2969
2970 #define GEN_STX(name, stop, opc2, opc3, type) \
2971 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \
2972 { \
2973 TCGv EA = tcg_temp_new(); \
2974 gen_set_access_type(ACCESS_INT); \
2975 gen_addr_reg_index(EA, ctx); \
2976 gen_qemu_##stop(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2977 tcg_temp_free(EA); \
2978 }
2979
2980 #define GEN_STS(name, stop, op, type) \
2981 GEN_ST(name, stop, op | 0x20, type); \
2982 GEN_STU(name, stop, op | 0x21, type); \
2983 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2984 GEN_STX(name, stop, 0x17, op | 0x00, type)
2985
2986 /* stb stbu stbux stbx */
2987 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2988 /* sth sthu sthux sthx */
2989 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2990 /* stw stwu stwux stwx */
2991 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2992 #if defined(TARGET_PPC64)
2993 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B);
2994 GEN_STX(std, st64, 0x15, 0x04, PPC_64B);
2995 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B)
2996 {
2997 int rs;
2998 TCGv EA;
2999
3000 rs = rS(ctx->opcode);
3001 if ((ctx->opcode & 0x3) == 0x2) {
3002 #if defined(CONFIG_USER_ONLY)
3003 GEN_EXCP_PRIVOPC(ctx);
3004 #else
3005 /* stq */
3006 if (unlikely(ctx->supervisor == 0)) {
3007 GEN_EXCP_PRIVOPC(ctx);
3008 return;
3009 }
3010 if (unlikely(rs & 1)) {
3011 GEN_EXCP_INVAL(ctx);
3012 return;
3013 }
3014 if (unlikely(ctx->mem_idx & 1)) {
3015 /* Little-endian mode is not handled */
3016 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
3017 return;
3018 }
3019 EA = tcg_temp_new();
3020 gen_set_access_type(ACCESS_INT);
3021 gen_addr_imm_index(EA, ctx, 0x03);
3022 gen_qemu_st64(cpu_gpr[rs], EA, ctx->mem_idx);
3023 tcg_gen_addi_tl(EA, EA, 8);
3024 gen_qemu_st64(cpu_gpr[rs+1], EA, ctx->mem_idx);
3025 tcg_temp_free(EA);
3026 #endif
3027 } else {
3028 /* std / stdu */
3029 if (Rc(ctx->opcode)) {
3030 if (unlikely(rA(ctx->opcode) == 0)) {
3031 GEN_EXCP_INVAL(ctx);
3032 return;
3033 }
3034 }
3035 EA = tcg_temp_new();
3036 gen_set_access_type(ACCESS_INT);
3037 gen_addr_imm_index(EA, ctx, 0x03);
3038 gen_qemu_st64(cpu_gpr[rs], EA, ctx->mem_idx);
3039 if (Rc(ctx->opcode))
3040 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
3041 tcg_temp_free(EA);
3042 }
3043 }
3044 #endif
3045 /*** Integer load and store with byte reverse ***/
3046 /* lhbrx */
3047 void always_inline gen_qemu_ld16ur(TCGv t0, TCGv t1, int flags)
3048 {
3049 TCGv_i32 temp = tcg_temp_new_i32();
3050 gen_qemu_ld16u(t0, t1, flags);
3051 tcg_gen_trunc_tl_i32(temp, t0);
3052 tcg_gen_bswap16_i32(temp, temp);
3053 tcg_gen_extu_i32_tl(t0, temp);
3054 tcg_temp_free_i32(temp);
3055 }
3056 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3057
3058 /* lwbrx */
3059 void always_inline gen_qemu_ld32ur(TCGv t0, TCGv t1, int flags)
3060 {
3061 TCGv_i32 temp = tcg_temp_new_i32();
3062 gen_qemu_ld32u(t0, t1, flags);
3063 tcg_gen_trunc_tl_i32(temp, t0);
3064 tcg_gen_bswap_i32(temp, temp);
3065 tcg_gen_extu_i32_tl(t0, temp);
3066 tcg_temp_free_i32(temp);
3067 }
3068 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3069
3070 /* sthbrx */
3071 void always_inline gen_qemu_st16r(TCGv t0, TCGv t1, int flags)
3072 {
3073 TCGv_i32 temp = tcg_temp_new_i32();
3074 TCGv t2 = tcg_temp_new();
3075 tcg_gen_trunc_tl_i32(temp, t0);
3076 tcg_gen_ext16u_i32(temp, temp);
3077 tcg_gen_bswap16_i32(temp, temp);
3078 tcg_gen_extu_i32_tl(t2, temp);
3079 tcg_temp_free_i32(temp);
3080 gen_qemu_st16(t2, t1, flags);
3081 tcg_temp_free(t2);
3082 }
3083 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3084
3085 /* stwbrx */
3086 void always_inline gen_qemu_st32r(TCGv t0, TCGv t1, int flags)
3087 {
3088 TCGv_i32 temp = tcg_temp_new_i32();
3089 TCGv t2 = tcg_temp_new();
3090 tcg_gen_trunc_tl_i32(temp, t0);
3091 tcg_gen_bswap_i32(temp, temp);
3092 tcg_gen_extu_i32_tl(t2, temp);
3093 tcg_temp_free_i32(temp);
3094 gen_qemu_st32(t2, t1, flags);
3095 tcg_temp_free(t2);
3096 }
3097 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3098
3099 /*** Integer load and store multiple ***/
3100 /* lmw */
3101 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
3102 {
3103 TCGv t0 = tcg_temp_new();
3104 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
3105 /* NIP cannot be restored if the memory exception comes from an helper */
3106 gen_update_nip(ctx, ctx->nip - 4);
3107 gen_addr_imm_index(t0, ctx, 0);
3108 gen_helper_lmw(t0, t1);
3109 tcg_temp_free(t0);
3110 tcg_temp_free_i32(t1);
3111 }
3112
3113 /* stmw */
3114 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
3115 {
3116 TCGv t0 = tcg_temp_new();
3117 TCGv_i32 t1 = tcg_const_i32(rS(ctx->opcode));
3118 /* NIP cannot be restored if the memory exception comes from an helper */
3119 gen_update_nip(ctx, ctx->nip - 4);
3120 gen_addr_imm_index(t0, ctx, 0);
3121 gen_helper_stmw(t0, t1);
3122 tcg_temp_free(t0);
3123 tcg_temp_free_i32(t1);
3124 }
3125
3126 /*** Integer load and store strings ***/
3127 /* lswi */
3128 /* PowerPC32 specification says we must generate an exception if
3129 * rA is in the range of registers to be loaded.
3130 * In an other hand, IBM says this is valid, but rA won't be loaded.
3131 * For now, I'll follow the spec...
3132 */
3133 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING)
3134 {
3135 TCGv t0;
3136 TCGv_i32 t1, t2;
3137 int nb = NB(ctx->opcode);
3138 int start = rD(ctx->opcode);
3139 int ra = rA(ctx->opcode);
3140 int nr;
3141
3142 if (nb == 0)
3143 nb = 32;
3144 nr = nb / 4;
3145 if (unlikely(((start + nr) > 32 &&
3146 start <= ra && (start + nr - 32) > ra) ||
3147 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
3148 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3149 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_LSWX);
3150 return;
3151 }
3152 /* NIP cannot be restored if the memory exception comes from an helper */
3153 gen_update_nip(ctx, ctx->nip - 4);
3154 t0 = tcg_temp_new();
3155 gen_addr_register(t0, ctx);
3156 t1 = tcg_const_i32(nb);
3157 t2 = tcg_const_i32(start);
3158 gen_helper_lsw(t0, t1, t2);
3159 tcg_temp_free(t0);
3160 tcg_temp_free_i32(t1);
3161 tcg_temp_free_i32(t2);
3162 }
3163
3164 /* lswx */
3165 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING)
3166 {
3167 TCGv t0 = tcg_temp_new();
3168 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
3169 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
3170 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
3171 /* NIP cannot be restored if the memory exception comes from an helper */
3172 gen_update_nip(ctx, ctx->nip - 4);
3173 gen_addr_reg_index(t0, ctx);
3174 gen_helper_lswx(t0, t1, t2, t3);
3175 tcg_temp_free(t0);
3176 tcg_temp_free_i32(t1);
3177 tcg_temp_free_i32(t2);
3178 tcg_temp_free_i32(t3);
3179 }
3180
3181 /* stswi */
3182 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING)
3183 {
3184 int nb = NB(ctx->opcode);
3185 TCGv t0 = tcg_temp_new();
3186 TCGv_i32 t1;
3187 TCGv_i32 t2 = tcg_const_i32(rS(ctx->opcode));
3188 /* NIP cannot be restored if the memory exception comes from an helper */
3189 gen_update_nip(ctx, ctx->nip - 4);
3190 gen_addr_register(t0, ctx);
3191 if (nb == 0)
3192 nb = 32;
3193 t1 = tcg_const_i32(nb);
3194 gen_helper_stsw(t0, t1, t2);
3195 tcg_temp_free(t0);
3196 tcg_temp_free_i32(t1);
3197 tcg_temp_free_i32(t2);
3198 }
3199
3200 /* stswx */
3201 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING)
3202 {
3203 TCGv t0 = tcg_temp_new();
3204 TCGv_i32 t1 = tcg_temp_new_i32();
3205 TCGv_i32 t2 = tcg_const_i32(rS(ctx->opcode));
3206 /* NIP cannot be restored if the memory exception comes from an helper */
3207 gen_update_nip(ctx, ctx->nip - 4);
3208 gen_addr_reg_index(t0, ctx);
3209 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3210 tcg_gen_andi_i32(t1, t1, 0x7F);
3211 gen_helper_stsw(t0, t1, t2);
3212 tcg_temp_free(t0);
3213 tcg_temp_free_i32(t1);
3214 tcg_temp_free_i32(t2);
3215 }
3216
3217 /*** Memory synchronisation ***/
3218 /* eieio */
3219 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO)
3220 {
3221 }
3222
3223 /* isync */
3224 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM)
3225 {
3226 GEN_STOP(ctx);
3227 }
3228
3229 /* lwarx */
3230 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000001, PPC_RES)
3231 {
3232 TCGv t0 = tcg_temp_local_new();
3233 gen_set_access_type(ACCESS_RES);
3234 gen_addr_reg_index(t0, ctx);
3235 gen_check_align(ctx, t0, 0x03);
3236 #if defined(TARGET_PPC64)
3237 if (!ctx->sf_mode)
3238 tcg_gen_ext32u_tl(t0, t0);
3239 #endif
3240 gen_qemu_ld32u(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx);
3241 tcg_gen_mov_tl(cpu_reserve, t0);
3242 tcg_temp_free(t0);
3243 }
3244
3245 /* stwcx. */
3246 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES)
3247 {
3248 int l1 = gen_new_label();
3249 TCGv t0 = tcg_temp_local_new();
3250 gen_set_access_type(ACCESS_RES);
3251 gen_addr_reg_index(t0, ctx);
3252 gen_check_align(ctx, t0, 0x03);
3253 #if defined(TARGET_PPC64)
3254 if (!ctx->sf_mode)
3255 tcg_gen_ext32u_tl(t0, t0);
3256 #endif
3257 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3258 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3259 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3260 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3261 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3262 gen_qemu_st32(cpu_gpr[rS(ctx->opcode)], t0, ctx->mem_idx);
3263 gen_set_label(l1);
3264 tcg_gen_movi_tl(cpu_reserve, -1);
3265 tcg_temp_free(t0);
3266 }
3267
3268 #if defined(TARGET_PPC64)
3269 /* ldarx */
3270 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000001, PPC_64B)
3271 {
3272 TCGv t0 = tcg_temp_local_new();
3273 gen_set_access_type(ACCESS_RES);
3274 gen_addr_reg_index(t0, ctx);
3275 gen_check_align(ctx, t0, 0x07);
3276 if (!ctx->sf_mode)
3277 tcg_gen_ext32u_tl(t0, t0);
3278 gen_qemu_ld64(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx);
3279 tcg_gen_mov_tl(cpu_reserve, t0);
3280 tcg_temp_free(t0);
3281 }
3282
3283 /* stdcx. */
3284 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B)
3285 {
3286 int l1 = gen_new_label();
3287 TCGv t0 = tcg_temp_local_new();
3288 gen_set_access_type(ACCESS_RES);
3289 gen_addr_reg_index(t0, ctx);
3290 gen_check_align(ctx, t0, 0x07);
3291 if (!ctx->sf_mode)
3292 tcg_gen_ext32u_tl(t0, t0);
3293 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3294 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3295 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3296 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3297 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3298 gen_qemu_st64(cpu_gpr[rS(ctx->opcode)], t0, ctx->mem_idx);
3299 gen_set_label(l1);
3300 tcg_gen_movi_tl(cpu_reserve, -1);
3301 tcg_temp_free(t0);
3302 }
3303 #endif /* defined(TARGET_PPC64) */
3304
3305 /* sync */
3306 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC)
3307 {
3308 }
3309
3310 /* wait */
3311 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT)
3312 {
3313 TCGv_i32 t0 = tcg_temp_new_i32();
3314 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUState, halted));
3315 tcg_temp_free_i32(t0);
3316 /* Stop translation, as the CPU is supposed to sleep from now */
3317 GEN_EXCP(ctx, EXCP_HLT, 1);
3318 }
3319
3320 /*** Floating-point load ***/
3321 #define GEN_LDF(name, ldop, opc, type) \
3322 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \
3323 { \
3324 TCGv EA; \
3325 if (unlikely(!ctx->fpu_enabled)) { \
3326 GEN_EXCP_NO_FP(ctx); \
3327 return; \
3328 } \
3329 gen_set_access_type(ACCESS_FLOAT); \
3330 EA = tcg_temp_new(); \
3331 gen_addr_imm_index(EA, ctx, 0); \
3332 gen_qemu_##ldop(cpu_fpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
3333 tcg_temp_free(EA); \
3334 }
3335
3336 #define GEN_LDUF(name, ldop, opc, type) \
3337 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \
3338 { \
3339 TCGv EA; \
3340 if (unlikely(!ctx->fpu_enabled)) { \
3341 GEN_EXCP_NO_FP(ctx); \
3342 return; \
3343 } \
3344 if (unlikely(rA(ctx->opcode) == 0)) { \
3345 GEN_EXCP_INVAL(ctx); \
3346 return; \
3347 } \
3348 gen_set_access_type(ACCESS_FLOAT); \
3349 EA = tcg_temp_new(); \
3350 gen_addr_imm_index(EA, ctx, 0); \
3351 gen_qemu_##ldop(cpu_fpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
3352 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3353 tcg_temp_free(EA); \
3354 }
3355
3356 #define GEN_LDUXF(name, ldop, opc, type) \
3357 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type) \
3358 { \
3359 TCGv EA; \
3360 if (unlikely(!ctx->fpu_enabled)) { \
3361 GEN_EXCP_NO_FP(ctx); \
3362 return; \
3363 } \
3364 if (unlikely(rA(ctx->opcode) == 0)) { \
3365 GEN_EXCP_INVAL(ctx); \
3366 return; \
3367 } \
3368 gen_set_access_type(ACCESS_FLOAT); \
3369 EA = tcg_temp_new(); \
3370 gen_addr_reg_index(EA, ctx); \
3371 gen_qemu_##ldop(cpu_fpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
3372 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3373 tcg_temp_free(EA); \
3374 }
3375
3376 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
3377 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \
3378 { \
3379 TCGv EA; \
3380 if (unlikely(!ctx->fpu_enabled)) { \
3381 GEN_EXCP_NO_FP(ctx); \
3382 return; \
3383 } \
3384 gen_set_access_type(ACCESS_FLOAT); \
3385 EA = tcg_temp_new(); \
3386 gen_addr_reg_index(EA, ctx); \
3387 gen_qemu_##ldop(cpu_fpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
3388 tcg_temp_free(EA); \
3389 }
3390
3391 #define GEN_LDFS(name, ldop, op, type) \
3392 GEN_LDF(name, ldop, op | 0x20, type); \
3393 GEN_LDUF(name, ldop, op | 0x21, type); \
3394 GEN_LDUXF(name, ldop, op | 0x01, type); \
3395 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3396
3397 static always_inline void gen_qemu_ld32fs(TCGv_i64 arg1, TCGv arg2, int flags)
3398 {
3399 TCGv t0 = tcg_temp_new();
3400 TCGv_i32 t1 = tcg_temp_new_i32();
3401 gen_qemu_ld32u(t0, arg2, flags);
3402 tcg_gen_trunc_tl_i32(t1, t0);
3403 tcg_temp_free(t0);
3404 gen_helper_float32_to_float64(arg1, t1);
3405 tcg_temp_free_i32(t1);
3406 }
3407
3408 /* lfd lfdu lfdux lfdx */
3409 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT);
3410 /* lfs lfsu lfsux lfsx */
3411 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
3412
3413 /*** Floating-point store ***/
3414 #define GEN_STF(name, stop, opc, type) \
3415 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \
3416 { \
3417 TCGv EA; \
3418 if (unlikely(!ctx->fpu_enabled)) { \
3419 GEN_EXCP_NO_FP(ctx); \
3420 return; \
3421 } \
3422 gen_set_access_type(ACCESS_FLOAT); \
3423 EA = tcg_temp_new(); \
3424 gen_addr_imm_index(EA, ctx, 0); \
3425 gen_qemu_##stop(cpu_fpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
3426 tcg_temp_free(EA); \
3427 }
3428
3429 #define GEN_STUF(name, stop, opc, type) \
3430 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \
3431 { \
3432 TCGv EA; \
3433 if (unlikely(!ctx->fpu_enabled)) { \
3434 GEN_EXCP_NO_FP(ctx); \
3435 return; \
3436 } \
3437 if (unlikely(rA(ctx->opcode) == 0)) { \
3438 GEN_EXCP_INVAL(ctx); \
3439 return; \
3440 } \
3441 gen_set_access_type(ACCESS_FLOAT); \
3442 EA = tcg_temp_new(); \
3443 gen_addr_imm_index(EA, ctx, 0); \
3444 gen_qemu_##stop(cpu_fpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
3445 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3446 tcg_temp_free(EA); \
3447 }
3448
3449 #define GEN_STUXF(name, stop, opc, type) \
3450 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type) \
3451 { \
3452 TCGv EA; \
3453 if (unlikely(!ctx->fpu_enabled)) { \
3454 GEN_EXCP_NO_FP(ctx); \
3455 return; \
3456 } \
3457 if (unlikely(rA(ctx->opcode) == 0)) { \
3458 GEN_EXCP_INVAL(ctx); \
3459 return; \
3460 } \
3461 gen_set_access_type(ACCESS_FLOAT); \
3462 EA = tcg_temp_new(); \
3463 gen_addr_reg_index(EA, ctx); \
3464 gen_qemu_##stop(cpu_fpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
3465 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3466 tcg_temp_free(EA); \
3467 }
3468
3469 #define GEN_STXF(name, stop, opc2, opc3, type) \
3470 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \
3471 { \
3472 TCGv EA; \
3473 if (unlikely(!ctx->fpu_enabled)) { \
3474 GEN_EXCP_NO_FP(ctx); \
3475 return; \
3476 } \
3477 gen_set_access_type(ACCESS_FLOAT); \
3478 EA = tcg_temp_new(); \
3479 gen_addr_reg_index(EA, ctx); \
3480 gen_qemu_##stop(cpu_fpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
3481 tcg_temp_free(EA); \
3482 }
3483
3484 #define GEN_STFS(name, stop, op, type) \
3485 GEN_STF(name, stop, op | 0x20, type); \
3486 GEN_STUF(name, stop, op | 0x21, type); \
3487 GEN_STUXF(name, stop, op | 0x01, type); \
3488 GEN_STXF(name, stop, 0x17, op | 0x00, type)
3489
3490 static always_inline void gen_qemu_st32fs(TCGv_i64 arg1, TCGv arg2, int flags)
3491 {
3492 TCGv_i32 t0 = tcg_temp_new_i32();
3493 TCGv t1 = tcg_temp_new();
3494 gen_helper_float64_to_float32(t0, arg1);
3495 tcg_gen_extu_i32_tl(t1, t0);
3496 tcg_temp_free_i32(t0);
3497 gen_qemu_st32(t1, arg2, flags);
3498 tcg_temp_free(t1);
3499 }
3500
3501 /* stfd stfdu stfdux stfdx */
3502 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT);
3503 /* stfs stfsu stfsux stfsx */
3504 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
3505
3506 /* Optional: */
3507 static always_inline void gen_qemu_st32fiw(TCGv_i64 arg1, TCGv arg2, int flags)
3508 {
3509 TCGv t0 = tcg_temp_new();
3510 tcg_gen_trunc_i64_tl(t0, arg1),
3511 gen_qemu_st32(t0, arg2, flags);
3512 tcg_temp_free(t0);
3513 }
3514 /* stfiwx */
3515 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3516
3517 /*** Branch ***/
3518 static always_inline void gen_goto_tb (DisasContext *ctx, int n,
3519 target_ulong dest)
3520 {
3521 TranslationBlock *tb;
3522 tb = ctx->tb;
3523 #if defined(TARGET_PPC64)
3524 if (!ctx->sf_mode)
3525 dest = (uint32_t) dest;
3526 #endif
3527 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3528 likely(!ctx->singlestep_enabled)) {
3529 tcg_gen_goto_tb(n);
3530 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3531 tcg_gen_exit_tb((long)tb + n);
3532 } else {
3533 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3534 if (unlikely(ctx->singlestep_enabled)) {
3535 if ((ctx->singlestep_enabled &
3536 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3537 ctx->exception == POWERPC_EXCP_BRANCH) {
3538 target_ulong tmp = ctx->nip;
3539 ctx->nip = dest;
3540 GEN_EXCP(ctx, POWERPC_EXCP_TRACE, 0);
3541 ctx->nip = tmp;
3542 }
3543 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3544 gen_update_nip(ctx, dest);
3545 gen_helper_raise_debug();
3546 }
3547 }
3548 tcg_gen_exit_tb(0);
3549 }
3550 }
3551
3552 static always_inline void gen_setlr (DisasContext *ctx, target_ulong nip)
3553 {
3554 #if defined(TARGET_PPC64)
3555 if (ctx->sf_mode == 0)
3556 tcg_gen_movi_tl(cpu_lr, (uint32_t)nip);
3557 else
3558 #endif
3559 tcg_gen_movi_tl(cpu_lr, nip);
3560 }
3561
3562 /* b ba bl bla */
3563 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3564 {
3565 target_ulong li, target;
3566
3567 ctx->exception = POWERPC_EXCP_BRANCH;
3568 /* sign extend LI */
3569 #if defined(TARGET_PPC64)
3570 if (ctx->sf_mode)
3571 li = ((int64_t)LI(ctx->opcode) << 38) >> 38;
3572 else
3573 #endif
3574 li = ((int32_t)LI(ctx->opcode) << 6) >> 6;
3575 if (likely(AA(ctx->opcode) == 0))
3576 target = ctx->nip + li - 4;
3577 else
3578 target = li;
3579 if (LK(ctx->opcode))
3580 gen_setlr(ctx, ctx->nip);
3581 gen_goto_tb(ctx, 0, target);
3582 }
3583
3584 #define BCOND_IM 0
3585 #define BCOND_LR 1
3586 #define BCOND_CTR 2
3587
3588 static always_inline void gen_bcond (DisasContext *ctx, int type)
3589 {
3590 uint32_t bo = BO(ctx->opcode);
3591 int l1 = gen_new_label();
3592 TCGv target;
3593
3594 ctx->exception = POWERPC_EXCP_BRANCH;
3595 if (type == BCOND_LR || type == BCOND_CTR) {
3596 target = tcg_temp_local_new();
3597 if (type == BCOND_CTR)
3598 tcg_gen_mov_tl(target, cpu_ctr);
3599 else
3600 tcg_gen_mov_tl(target, cpu_lr);
3601 }
3602 if (LK(ctx->opcode))
3603 gen_setlr(ctx, ctx->nip);
3604 l1 = gen_new_label();
3605 if ((bo & 0x4) == 0) {
3606 /* Decrement and test CTR */
3607 TCGv temp = tcg_temp_new();
3608 if (unlikely(type == BCOND_CTR)) {
3609 GEN_EXCP_INVAL(ctx);
3610 return;
3611 }
3612 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3613 #if defined(TARGET_PPC64)
3614 if (!ctx->sf_mode)
3615 tcg_gen_ext32u_tl(temp, cpu_ctr);
3616 else
3617 #endif
3618 tcg_gen_mov_tl(temp, cpu_ctr);
3619 if (bo & 0x2) {
3620 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3621 } else {
3622 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3623 }
3624 tcg_temp_free(temp);
3625 }
3626 if ((bo & 0x10) == 0) {
3627 /* Test CR */
3628 uint32_t bi = BI(ctx->opcode);
3629 uint32_t mask = 1 << (3 - (bi & 0x03));
3630 TCGv_i32 temp = tcg_temp_new_i32();
3631
3632 if (bo & 0x8) {
3633 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3634 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3635 } else {
3636 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3637 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3638 }
3639 tcg_temp_free_i32(temp);
3640 }
3641 if (type == BCOND_IM) {
3642 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3643 if (likely(AA(ctx->opcode) == 0)) {
3644 gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3645 } else {
3646 gen_goto_tb(ctx, 0, li);
3647 }
3648 gen_set_label(l1);
3649 gen_goto_tb(ctx, 1, ctx->nip);
3650 } else {
3651 #if defined(TARGET_PPC64)
3652 if (!(ctx->sf_mode))
3653 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3654 else
3655 #endif
3656 tcg_gen_andi_tl(cpu_nip, target, ~3);
3657 tcg_gen_exit_tb(0);
3658 gen_set_label(l1);
3659 #if defined(TARGET_PPC64)
3660 if (!(ctx->sf_mode))
3661 tcg_gen_movi_tl(cpu_nip, (uint32_t)ctx->nip);
3662 else
3663 #endif
3664 tcg_gen_movi_tl(cpu_nip, ctx->nip);
3665 tcg_gen_exit_tb(0);
3666 }
3667 }
3668
3669 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3670 {
3671 gen_bcond(ctx, BCOND_IM);
3672 }
3673
3674 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW)
3675 {
3676 gen_bcond(ctx, BCOND_CTR);
3677 }
3678
3679 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW)
3680 {
3681 gen_bcond(ctx, BCOND_LR);
3682 }
3683
3684 /*** Condition register logical ***/
3685 #define GEN_CRLOGIC(name, tcg_op, opc) \
3686 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) \
3687 { \
3688 uint8_t bitmask; \
3689 int sh; \
3690 TCGv_i32 t0, t1; \
3691 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3692 t0 = tcg_temp_new_i32(); \
3693 if (sh > 0) \
3694 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3695 else if (sh < 0) \
3696 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3697 else \
3698 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3699 t1 = tcg_temp_new_i32(); \
3700 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3701 if (sh > 0) \
3702 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3703 else if (sh < 0) \
3704 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3705 else \
3706 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3707 tcg_op(t0, t0, t1); \
3708 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3709 tcg_gen_andi_i32(t0, t0, bitmask); \
3710 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3711 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3712 tcg_temp_free_i32(t0); \
3713 tcg_temp_free_i32(t1); \
3714 }
3715
3716 /* crand */
3717 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3718 /* crandc */
3719 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3720 /* creqv */
3721 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3722 /* crnand */
3723 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3724 /* crnor */
3725 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3726 /* cror */
3727 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3728 /* crorc */
3729 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3730 /* crxor */
3731 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3732 /* mcrf */
3733 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER)
3734 {
3735 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3736 }
3737
3738 /*** System linkage ***/
3739 /* rfi (supervisor only) */
3740 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW)
3741 {
3742 #if defined(CONFIG_USER_ONLY)
3743 GEN_EXCP_PRIVOPC(ctx);
3744 #else
3745 /* Restore CPU state */
3746 if (unlikely(!ctx->supervisor)) {
3747 GEN_EXCP_PRIVOPC(ctx);
3748 return;
3749 }
3750 gen_helper_rfi();
3751 GEN_SYNC(ctx);
3752 #endif
3753 }
3754
3755 #if defined(TARGET_PPC64)
3756 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B)
3757 {
3758 #if defined(CONFIG_USER_ONLY)
3759 GEN_EXCP_PRIVOPC(ctx);
3760 #else
3761 /* Restore CPU state */
3762 if (unlikely(!ctx->supervisor)) {
3763 GEN_EXCP_PRIVOPC(ctx);
3764 return;
3765 }
3766 gen_helper_rfid();
3767 GEN_SYNC(ctx);
3768 #endif
3769 }
3770
3771 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H)
3772 {
3773 #if defined(CONFIG_USER_ONLY)
3774 GEN_EXCP_PRIVOPC(ctx);
3775 #else
3776 /* Restore CPU state */
3777 if (unlikely(ctx->supervisor <= 1)) {
3778 GEN_EXCP_PRIVOPC(ctx);
3779 return;
3780 }
3781 gen_helper_hrfid();
3782 GEN_SYNC(ctx);
3783 #endif
3784 }
3785 #endif
3786
3787 /* sc */
3788 #if defined(CONFIG_USER_ONLY)
3789 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3790 #else
3791 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3792 #endif
3793 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW)
3794 {
3795 uint32_t lev;
3796
3797 lev = (ctx->opcode >> 5) & 0x7F;
3798 GEN_EXCP(ctx, POWERPC_SYSCALL, lev);
3799 }
3800
3801 /*** Trap ***/
3802 /* tw */
3803 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW)
3804 {
3805 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3806 /* Update the nip since this might generate a trap exception */
3807 gen_update_nip(ctx, ctx->nip);
3808 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3809 tcg_temp_free_i32(t0);
3810 }
3811
3812 /* twi */
3813 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3814 {
3815 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3816 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3817 /* Update the nip since this might generate a trap exception */
3818 gen_update_nip(ctx, ctx->nip);
3819 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], t0, t1);
3820 tcg_temp_free(t0);
3821 tcg_temp_free_i32(t1);
3822 }
3823
3824 #if defined(TARGET_PPC64)
3825 /* td */
3826 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B)
3827 {
3828 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3829 /* Update the nip since this might generate a trap exception */
3830 gen_update_nip(ctx, ctx->nip);
3831 gen_helper_td(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3832 tcg_temp_free_i32(t0);
3833 }
3834
3835 /* tdi */
3836 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B)
3837 {
3838 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3839 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3840 /* Update the nip since this might generate a trap exception */
3841 gen_update_nip(ctx, ctx->nip);
3842 gen_helper_td(cpu_gpr[rA(ctx->opcode)], t0, t1);
3843 tcg_temp_free(t0);
3844 tcg_temp_free_i32(t1);
3845 }
3846 #endif
3847
3848 /*** Processor control ***/
3849 /* mcrxr */
3850 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC)
3851 {
3852 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], cpu_xer);
3853 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], XER_CA);
3854 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_SO | 1 << XER_OV | 1 << XER_CA));
3855 }
3856
3857 /* mfcr */
3858 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC)
3859 {
3860 uint32_t crm, crn;
3861
3862 if (likely(ctx->opcode & 0x00100000)) {
3863 crm = CRM(ctx->opcode);
3864 if (likely((crm ^ (crm - 1)) == 0)) {
3865 crn = ffs(crm);
3866 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3867 }
3868 } else {
3869 gen_helper_load_cr(cpu_gpr[rD(ctx->opcode)]);
3870 }
3871 }
3872
3873 /* mfmsr */
3874 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC)
3875 {
3876 #if defined(CONFIG_USER_ONLY)
3877 GEN_EXCP_PRIVREG(ctx);
3878 #else
3879 if (unlikely(!ctx->supervisor)) {
3880 GEN_EXCP_PRIVREG(ctx);
3881 return;
3882 }
3883 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
3884 #endif
3885 }
3886
3887 #if 1
3888 #define SPR_NOACCESS ((void *)(-1UL))
3889 #else
3890 static void spr_noaccess (void *opaque, int sprn)
3891 {
3892 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3893 printf("ERROR: try to access SPR %d !\n", sprn);
3894 }
3895 #define SPR_NOACCESS (&spr_noaccess)
3896 #endif
3897
3898 /* mfspr */
3899 static always_inline void gen_op_mfspr (DisasContext *ctx)
3900 {
3901 void (*read_cb)(void *opaque, int sprn);
3902 uint32_t sprn = SPR(ctx->opcode);
3903
3904 #if !defined(CONFIG_USER_ONLY)
3905 if (ctx->supervisor == 2)
3906 read_cb = ctx->spr_cb[sprn].hea_read;
3907 else if (ctx->supervisor)
3908 read_cb = ctx->spr_cb[sprn].oea_read;
3909 else
3910 #endif
3911 read_cb = ctx->spr_cb[sprn].uea_read;
3912 if (likely(read_cb != NULL)) {
3913 if (likely(read_cb != SPR_NOACCESS)) {
3914 (*read_cb)(ctx, sprn);
3915 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3916 } else {
3917 /* Privilege exception */
3918 /* This is a hack to avoid warnings when running Linux:
3919 * this OS breaks the PowerPC virtualisation model,
3920 * allowing userland application to read the PVR
3921 */
3922 if (sprn != SPR_PVR) {
3923 if (loglevel != 0) {
3924 fprintf(logfile, "Trying to read privileged spr %d %03x at "
3925 ADDRX "\n", sprn, sprn, ctx->nip);
3926 }
3927 printf("Trying to read privileged spr %d %03x at " ADDRX "\n",
3928 sprn, sprn, ctx->nip);
3929 }
3930 GEN_EXCP_PRIVREG(ctx);
3931 }
3932 } else {
3933 /* Not defined */
3934 if (loglevel != 0) {
3935 fprintf(logfile, "Trying to read invalid spr %d %03x at "
3936 ADDRX "\n", sprn, sprn, ctx->nip);
3937 }
3938 printf("Trying to read invalid spr %d %03x at " ADDRX "\n",
3939 sprn, sprn, ctx->nip);
3940 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3941 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
3942 }
3943 }
3944
3945 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC)
3946 {
3947 gen_op_mfspr(ctx);
3948 }
3949
3950 /* mftb */
3951 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB)
3952 {
3953 gen_op_mfspr(ctx);
3954 }
3955
3956 /* mtcrf */
3957 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC)
3958 {
3959 uint32_t crm, crn;
3960
3961 crm = CRM(ctx->opcode);
3962 if (likely((ctx->opcode & 0x00100000) || (crm ^ (crm - 1)) == 0)) {
3963 TCGv_i32 temp = tcg_temp_new_i32();
3964 crn = ffs(crm);
3965 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3966 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
3967 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
3968 tcg_temp_free_i32(temp);
3969 } else {
3970 TCGv_i32 temp = tcg_const_i32(crm);
3971 gen_helper_store_cr(cpu_gpr[rS(ctx->opcode)], temp);
3972 tcg_temp_free_i32(temp);
3973 }
3974 }
3975
3976 /* mtmsr */
3977 #if defined(TARGET_PPC64)
3978 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B)
3979 {
3980 #if defined(CONFIG_USER_ONLY)
3981 GEN_EXCP_PRIVREG(ctx);
3982 #else
3983 if (unlikely(!ctx->supervisor)) {
3984 GEN_EXCP_PRIVREG(ctx);
3985 return;
3986 }
3987 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3988 if (ctx->opcode & 0x00010000) {
3989 /* Special form that does not need any synchronisation */
3990 TCGv t0 = tcg_temp_new();
3991 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3992 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3993 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3994 tcg_temp_free(t0);
3995 } else {
3996 /* XXX: we need to update nip before the store
3997 * if we enter power saving mode, we will exit the loop
3998 * directly from ppc_store_msr
3999 */
4000 gen_update_nip(ctx, ctx->nip);
4001 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
4002 /* Must stop the translation as machine state (may have) changed */
4003 /* Note that mtmsr is not always defined as context-synchronizing */
4004 ctx->exception = POWERPC_EXCP_STOP;
4005 }
4006 #endif
4007 }
4008 #endif
4009
4010 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC)
4011 {
4012 #if defined(CONFIG_USER_ONLY)
4013 GEN_EXCP_PRIVREG(ctx);
4014 #else
4015 if (unlikely(!ctx->supervisor)) {
4016 GEN_EXCP_PRIVREG(ctx);
4017 return;
4018 }
4019 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4020 if (ctx->opcode & 0x00010000) {
4021 /* Special form that does not need any synchronisation */
4022 TCGv t0 = tcg_temp_new();
4023 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
4024 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
4025 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
4026 tcg_temp_free(t0);
4027 } else {
4028 /* XXX: we need to update nip before the store
4029 * if we enter power saving mode, we will exit the loop
4030 * directly from ppc_store_msr
4031 */
4032 gen_update_nip(ctx, ctx->nip);
4033 #if defined(TARGET_PPC64)
4034 if (!ctx->sf_mode) {
4035 TCGv t0 = tcg_temp_new();
4036 TCGv t1 = tcg_temp_new();
4037 tcg_gen_andi_tl(t0, cpu_msr, 0xFFFFFFFF00000000ULL);
4038 tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]);
4039 tcg_gen_or_tl(t0, t0, t1);
4040 tcg_temp_free(t1);
4041 gen_helper_store_msr(t0);
4042 tcg_temp_free(t0);
4043 } else
4044 #endif
4045 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
4046 /* Must stop the translation as machine state (may have) changed */
4047 /* Note that mtmsr is not always defined as context-synchronizing */
4048 ctx->exception = POWERPC_EXCP_STOP;
4049 }
4050 #endif
4051 }
4052
4053 /* mtspr */
4054 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC)
4055 {
4056 void (*write_cb)(void *opaque, int sprn);
4057 uint32_t sprn = SPR(ctx->opcode);
4058
4059 #if !defined(CONFIG_USER_ONLY)
4060 if (ctx->supervisor == 2)
4061 write_cb = ctx->spr_cb[sprn].hea_write;
4062 else if (ctx->supervisor)
4063 write_cb = ctx->spr_cb[sprn].oea_write;
4064 else
4065 #endif
4066 write_cb = ctx->spr_cb[sprn].uea_write;
4067 if (likely(write_cb != NULL)) {
4068 if (likely(write_cb != SPR_NOACCESS)) {
4069 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4070 (*write_cb)(ctx, sprn);
4071 } else {
4072 /* Privilege exception */
4073 if (loglevel != 0) {
4074 fprintf(logfile, "Trying to write privileged spr %d %03x at "
4075 ADDRX "\n", sprn, sprn, ctx->nip);
4076 }
4077 printf("Trying to write privileged spr %d %03x at " ADDRX "\n",
4078 sprn, sprn, ctx->nip);
4079 GEN_EXCP_PRIVREG(ctx);
4080 }
4081 } else {
4082 /* Not defined */
4083 if (loglevel != 0) {
4084 fprintf(logfile, "Trying to write invalid spr %d %03x at "
4085 ADDRX "\n", sprn, sprn, ctx->nip);
4086 }
4087 printf("Trying to write invalid spr %d %03x at " ADDRX "\n",
4088 sprn, sprn, ctx->nip);
4089 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
4090 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
4091 }
4092 }
4093
4094 /*** Cache management ***/
4095 /* dcbf */
4096 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE)
4097 {
4098 /* XXX: specification says this is treated as a load by the MMU */
4099 TCGv t0 = tcg_temp_new();
4100 gen_set_access_type(ACCESS_CACHE);
4101 gen_addr_reg_index(t0, ctx);
4102 gen_qemu_ld8u(t0, t0, ctx->mem_idx);
4103 tcg_temp_free(t0);
4104 }
4105
4106 /* dcbi (Supervisor only) */
4107 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE)
4108 {
4109 #if defined(CONFIG_USER_ONLY)
4110 GEN_EXCP_PRIVOPC(ctx);
4111 #else
4112 TCGv EA, val;
4113 if (unlikely(!ctx->supervisor)) {
4114 GEN_EXCP_PRIVOPC(ctx);
4115 return;
4116 }
4117 EA = tcg_temp_new();
4118 gen_set_access_type(ACCESS_CACHE);
4119 gen_addr_reg_index(EA, ctx);
4120 val = tcg_temp_new();
4121 /* XXX: specification says this should be treated as a store by the MMU */
4122 gen_qemu_ld8u(val, EA, ctx->mem_idx);
4123 gen_qemu_st8(val, EA, ctx->mem_idx);
4124 tcg_temp_free(val);
4125 tcg_temp_free(EA);
4126 #endif
4127 }
4128
4129 /* dcdst */
4130 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE)
4131 {
4132 /* XXX: specification say this is treated as a load by the MMU */
4133 TCGv t0 = tcg_temp_new();
4134 gen_set_access_type(ACCESS_CACHE);
4135 gen_addr_reg_index(t0, ctx);
4136 gen_qemu_ld8u(t0, t0, ctx->mem_idx);
4137 tcg_temp_free(t0);
4138 }
4139
4140 /* dcbt */
4141 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE)
4142 {
4143 /* interpreted as no-op */
4144 /* XXX: specification say this is treated as a load by the MMU
4145 * but does not generate any exception
4146 */
4147 }
4148
4149 /* dcbtst */
4150 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE)
4151 {
4152 /* interpreted as no-op */
4153 /* XXX: specification say this is treated as a load by the MMU
4154 * but does not generate any exception
4155 */
4156 }
4157
4158 /* dcbz */
4159 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ)
4160 {
4161 TCGv t0 = tcg_temp_new();
4162 gen_addr_reg_index(t0, ctx);
4163 /* NIP cannot be restored if the memory exception comes from an helper */
4164 gen_update_nip(ctx, ctx->nip - 4);
4165 gen_helper_dcbz(t0);
4166 tcg_temp_free(t0);
4167 }
4168
4169 GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT)
4170 {
4171 TCGv t0 = tcg_temp_new();
4172 gen_addr_reg_index(t0, ctx);
4173 /* NIP cannot be restored if the memory exception comes from an helper */
4174 gen_update_nip(ctx, ctx->nip - 4);
4175 if (ctx->opcode & 0x00200000)
4176 gen_helper_dcbz(t0);
4177 else
4178 gen_helper_dcbz_970(t0);
4179 tcg_temp_free(t0);
4180 }
4181
4182 /* icbi */
4183 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI)
4184 {
4185 TCGv t0 = tcg_temp_new();
4186 /* NIP cannot be restored if the memory exception comes from an helper */
4187 gen_update_nip(ctx, ctx->nip - 4);
4188 gen_addr_reg_index(t0, ctx);
4189 gen_helper_icbi(t0);
4190 tcg_temp_free(t0);
4191 }
4192
4193 /* Optional: */
4194 /* dcba */
4195 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA)
4196 {
4197 /* interpreted as no-op */
4198 /* XXX: specification say this is treated as a store by the MMU
4199 * but does not generate any exception
4200 */
4201 }
4202
4203 /*** Segment register manipulation ***/
4204 /* Supervisor only: */
4205 /* mfsr */
4206 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT)
4207 {
4208 #if defined(CONFIG_USER_ONLY)
4209 GEN_EXCP_PRIVREG(ctx);
4210 #else
4211 if (unlikely(!ctx->supervisor)) {
4212 GEN_EXCP_PRIVREG(ctx);
4213 return;
4214 }
4215 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
4216 gen_op_load_sr();
4217 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4218 #endif
4219 }
4220
4221 /* mfsrin */
4222 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT)
4223 {
4224 #if defined(CONFIG_USER_ONLY)
4225 GEN_EXCP_PRIVREG(ctx);
4226 #else
4227 if (unlikely(!ctx->supervisor)) {
4228 GEN_EXCP_PRIVREG(ctx);
4229 return;
4230 }
4231 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4232 gen_op_srli_T1(28);
4233 gen_op_load_sr();
4234 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4235 #endif
4236 }
4237
4238 /* mtsr */
4239 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT)
4240 {
4241 #if defined(CONFIG_USER_ONLY)
4242 GEN_EXCP_PRIVREG(ctx);
4243 #else
4244 if (unlikely(!ctx->supervisor)) {
4245 GEN_EXCP_PRIVREG(ctx);
4246 return;
4247 }
4248 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4249 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
4250 gen_op_store_sr();
4251 #endif
4252 }
4253
4254 /* mtsrin */
4255 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT)
4256 {
4257 #if defined(CONFIG_USER_ONLY)
4258 GEN_EXCP_PRIVREG(ctx);
4259 #else
4260 if (unlikely(!ctx->supervisor)) {
4261 GEN_EXCP_PRIVREG(ctx);
4262 return;
4263 }
4264 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4265 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4266 gen_op_srli_T1(28);
4267 gen_op_store_sr();
4268 #endif
4269 }
4270
4271 #if defined(TARGET_PPC64)
4272 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4273 /* mfsr */
4274 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B)
4275 {
4276 #if defined(CONFIG_USER_ONLY)
4277 GEN_EXCP_PRIVREG(ctx);
4278 #else
4279 if (unlikely(!ctx->supervisor)) {
4280 GEN_EXCP_PRIVREG(ctx);
4281 return;
4282 }
4283 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
4284 gen_op_load_slb();
4285 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4286 #endif
4287 }
4288
4289 /* mfsrin */
4290 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
4291 PPC_SEGMENT_64B)
4292 {
4293 #if defined(CONFIG_USER_ONLY)
4294 GEN_EXCP_PRIVREG(ctx);
4295 #else
4296 if (unlikely(!ctx->supervisor)) {
4297 GEN_EXCP_PRIVREG(ctx);
4298 return;
4299 }
4300 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4301 gen_op_srli_T1(28);
4302 gen_op_load_slb();
4303 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4304 #endif
4305 }
4306
4307 /* mtsr */
4308 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B)
4309 {
4310 #if defined(CONFIG_USER_ONLY)
4311 GEN_EXCP_PRIVREG(ctx);
4312 #else
4313 if (unlikely(!ctx->supervisor)) {
4314 GEN_EXCP_PRIVREG(ctx);
4315 return;
4316 }
4317 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4318 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
4319 gen_op_store_slb();
4320 #endif
4321 }
4322
4323 /* mtsrin */
4324 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
4325 PPC_SEGMENT_64B)
4326 {
4327 #if defined(CONFIG_USER_ONLY)
4328 GEN_EXCP_PRIVREG(ctx);
4329 #else
4330 if (unlikely(!ctx->supervisor)) {
4331 GEN_EXCP_PRIVREG(ctx);
4332 return;
4333 }
4334 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4335 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4336 gen_op_srli_T1(28);
4337 gen_op_store_slb();
4338 #endif
4339 }
4340 #endif /* defined(TARGET_PPC64) */
4341
4342 /*** Lookaside buffer management ***/
4343 /* Optional & supervisor only: */
4344 /* tlbia */
4345 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA)
4346 {
4347 #if defined(CONFIG_USER_ONLY)
4348 GEN_EXCP_PRIVOPC(ctx);
4349 #else
4350 if (unlikely(!ctx->supervisor)) {
4351 GEN_EXCP_PRIVOPC(ctx);
4352 return;
4353 }
4354 gen_op_tlbia();
4355 #endif
4356 }
4357
4358 /* tlbie */
4359 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE)
4360 {
4361 #if defined(CONFIG_USER_ONLY)
4362 GEN_EXCP_PRIVOPC(ctx);
4363 #else
4364 if (unlikely(!ctx->supervisor)) {
4365 GEN_EXCP_PRIVOPC(ctx);
4366 return;
4367 }
4368 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4369 #if defined(TARGET_PPC64)
4370 if (ctx->sf_mode)
4371 gen_op_tlbie_64();
4372 else
4373 #endif
4374 gen_op_tlbie();
4375 #endif
4376 }
4377
4378 /* tlbsync */
4379 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC)
4380 {
4381 #if defined(CONFIG_USER_ONLY)
4382 GEN_EXCP_PRIVOPC(ctx);
4383 #else
4384 if (unlikely(!ctx->supervisor)) {
4385 GEN_EXCP_PRIVOPC(ctx);
4386 return;
4387 }
4388 /* This has no effect: it should ensure that all previous
4389 * tlbie have completed
4390 */
4391 GEN_STOP(ctx);
4392 #endif
4393 }
4394
4395 #if defined(TARGET_PPC64)
4396 /* slbia */
4397 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI)
4398 {
4399 #if defined(CONFIG_USER_ONLY)
4400 GEN_EXCP_PRIVOPC(ctx);
4401 #else
4402 if (unlikely(!ctx->supervisor)) {
4403 GEN_EXCP_PRIVOPC(ctx);
4404 return;
4405 }
4406 gen_op_slbia();
4407 #endif
4408 }
4409
4410 /* slbie */
4411 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI)
4412 {
4413 #if defined(CONFIG_USER_ONLY)
4414 GEN_EXCP_PRIVOPC(ctx);
4415 #else
4416 if (unlikely(!ctx->supervisor)) {
4417 GEN_EXCP_PRIVOPC(ctx);
4418 return;
4419 }
4420 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4421 gen_op_slbie();
4422 #endif
4423 }
4424 #endif
4425
4426 /*** External control ***/
4427 /* Optional: */
4428 /* eciwx */
4429 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN)
4430 {
4431 /* Should check EAR[E] ! */
4432 TCGv t0 = tcg_temp_new();
4433 gen_set_access_type(ACCESS_RES);
4434 gen_addr_reg_index(t0, ctx);
4435 gen_check_align(ctx, t0, 0x03);
4436 gen_qemu_ld32u(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx);
4437 tcg_temp_free(t0);
4438 }
4439
4440 /* ecowx */
4441 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN)
4442 {
4443 /* Should check EAR[E] ! */
4444 TCGv t0 = tcg_temp_new();
4445 gen_set_access_type(ACCESS_RES);
4446 gen_addr_reg_index(t0, ctx);
4447 gen_check_align(ctx, t0, 0x03);
4448 gen_qemu_st32(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx);
4449 tcg_temp_free(t0);
4450 }
4451
4452 /* PowerPC 601 specific instructions */
4453 /* abs - abs. */
4454 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR)
4455 {
4456 int l1 = gen_new_label();
4457 int l2 = gen_new_label();
4458 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4459 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4460 tcg_gen_br(l2);
4461 gen_set_label(l1);
4462 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4463 gen_set_label(l2);
4464 if (unlikely(Rc(ctx->opcode) != 0))
4465 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4466 }
4467
4468 /* abso - abso. */
4469 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR)
4470 {
4471 int l1 = gen_new_label();
4472 int l2 = gen_new_label();
4473 int l3 = gen_new_label();
4474 /* Start with XER OV disabled, the most likely case */
4475 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4476 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4477 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4478 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4479 tcg_gen_br(l2);
4480 gen_set_label(l1);
4481 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4482 tcg_gen_br(l3);
4483 gen_set_label(l2);
4484 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4485 gen_set_label(l3);
4486 if (unlikely(Rc(ctx->opcode) != 0))
4487 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4488 }
4489
4490 /* clcs */
4491 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR)
4492 {
4493 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4494 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], t0);
4495 tcg_temp_free_i32(t0);
4496 /* Rc=1 sets CR0 to an undefined state */
4497 }
4498
4499 /* div - div. */
4500 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR)
4501 {
4502 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4503 if (unlikely(Rc(ctx->opcode) != 0))
4504 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4505 }
4506
4507 /* divo - divo. */
4508 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR)
4509 {
4510 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4511 if (unlikely(Rc(ctx->opcode) != 0))
4512 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4513 }
4514
4515 /* divs - divs. */
4516 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR)
4517 {
4518 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4519 if (unlikely(Rc(ctx->opcode) != 0))
4520 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4521 }
4522
4523 /* divso - divso. */
4524 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR)
4525 {
4526 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4527 if (unlikely(Rc(ctx->opcode) != 0))
4528 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4529 }
4530
4531 /* doz - doz. */
4532 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR)
4533 {
4534 int l1 = gen_new_label();
4535 int l2 = gen_new_label();
4536 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4537 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4538 tcg_gen_br(l2);
4539 gen_set_label(l1);
4540 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4541 gen_set_label(l2);
4542 if (unlikely(Rc(ctx->opcode) != 0))
4543 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4544 }
4545
4546 /* dozo - dozo. */
4547 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR)
4548 {
4549 int l1 = gen_new_label();
4550 int l2 = gen_new_label();
4551 TCGv t0 = tcg_temp_new();
4552 TCGv t1 = tcg_temp_new();
4553 TCGv t2 = tcg_temp_new();
4554 /* Start with XER OV disabled, the most likely case */
4555 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4556 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4557 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4558 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4559 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4560 tcg_gen_andc_tl(t1, t1, t2);
4561 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4562 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4563 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4564 tcg_gen_br(l2);
4565 gen_set_label(l1);
4566 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4567 gen_set_label(l2);
4568 tcg_temp_free(t0);
4569 tcg_temp_free(t1);
4570 tcg_temp_free(t2);
4571 if (unlikely(Rc(ctx->opcode) != 0))
4572 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4573 }
4574
4575 /* dozi */
4576 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
4577 {
4578 target_long simm = SIMM(ctx->opcode);
4579 int l1 = gen_new_label();
4580 int l2 = gen_new_label();
4581 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4582 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4583 tcg_gen_br(l2);
4584 gen_set_label(l1);
4585 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4586 gen_set_label(l2);
4587 if (unlikely(Rc(ctx->opcode) != 0))
4588 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4589 }
4590
4591 /* lscbx - lscbx. */
4592 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR)
4593 {
4594 TCGv t0 = tcg_temp_new();
4595 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4596 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4597 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4598
4599 gen_addr_reg_index(t0, ctx);
4600 /* NIP cannot be restored if the memory exception comes from an helper */
4601 gen_update_nip(ctx, ctx->nip - 4);
4602 gen_helper_lscbx(t0, t0, t1, t2, t3);
4603 tcg_temp_free_i32(t1);
4604 tcg_temp_free_i32(t2);
4605 tcg_temp_free_i32(t3);
4606 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4607 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4608 if (unlikely(Rc(ctx->opcode) != 0))
4609 gen_set_Rc0(ctx, t0);
4610 tcg_temp_free(t0);
4611 }
4612
4613 /* maskg - maskg. */
4614 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR)
4615 {
4616 int l1 = gen_new_label();
4617 TCGv t0 = tcg_temp_new();
4618 TCGv t1 = tcg_temp_new();
4619 TCGv t2 = tcg_temp_new();
4620 TCGv t3 = tcg_temp_new();
4621 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4622 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4623 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4624 tcg_gen_addi_tl(t2, t0, 1);
4625 tcg_gen_shr_tl(t2, t3, t2);
4626 tcg_gen_shr_tl(t3, t3, t1);
4627 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4628 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4629 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4630 gen_set_label(l1);
4631 tcg_temp_free(t0);
4632 tcg_temp_free(t1);
4633 tcg_temp_free(t2);
4634 tcg_temp_free(t3);
4635 if (unlikely(Rc(ctx->opcode) != 0))
4636 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4637 }
4638
4639 /* maskir - maskir. */
4640 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR)
4641 {
4642 TCGv t0 = tcg_temp_new();
4643 TCGv t1 = tcg_temp_new();
4644 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4645 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4646 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4647 tcg_temp_free(t0);
4648 tcg_temp_free(t1);
4649 if (unlikely(Rc(ctx->opcode) != 0))
4650 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4651 }
4652
4653 /* mul - mul. */
4654 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR)
4655 {
4656 TCGv_i64 t0 = tcg_temp_new_i64();
4657 TCGv_i64 t1 = tcg_temp_new_i64();
4658 TCGv t2 = tcg_temp_new();
4659 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4660 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4661 tcg_gen_mul_i64(t0, t0, t1);
4662 tcg_gen_trunc_i64_tl(t2, t0);
4663 gen_store_spr(SPR_MQ, t2);
4664 tcg_gen_shri_i64(t1, t0, 32);
4665 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4666 tcg_temp_free_i64(t0);
4667 tcg_temp_free_i64(t1);
4668 tcg_temp_free(t2);
4669 if (unlikely(Rc(ctx->opcode) != 0))
4670 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4671 }
4672
4673 /* mulo - mulo. */
4674 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR)
4675 {
4676 int l1 = gen_new_label();
4677 TCGv_i64 t0 = tcg_temp_new_i64();
4678 TCGv_i64 t1 = tcg_temp_new_i64();
4679 TCGv t2 = tcg_temp_new();
4680 /* Start with XER OV disabled, the most likely case */
4681 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4682 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4683 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4684 tcg_gen_mul_i64(t0, t0, t1);
4685 tcg_gen_trunc_i64_tl(t2, t0);
4686 gen_store_spr(SPR_MQ, t2);
4687 tcg_gen_shri_i64(t1, t0, 32);
4688 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4689 tcg_gen_ext32s_i64(t1, t0);
4690 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4691 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4692 gen_set_label(l1);
4693 tcg_temp_free_i64(t0);
4694 tcg_temp_free_i64(t1);
4695 tcg_temp_free(t2);
4696 if (unlikely(Rc(ctx->opcode) != 0))
4697 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4698 }
4699
4700 /* nabs - nabs. */
4701 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR)
4702 {
4703 int l1 = gen_new_label();
4704 int l2 = gen_new_label();
4705 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4706 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4707 tcg_gen_br(l2);
4708 gen_set_label(l1);
4709 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4710 gen_set_label(l2);
4711 if (unlikely(Rc(ctx->opcode) != 0))
4712 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4713 }
4714
4715 /* nabso - nabso. */
4716 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR)
4717 {
4718 int l1 = gen_new_label();
4719 int l2 = gen_new_label();
4720 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4721 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4722 tcg_gen_br(l2);
4723 gen_set_label(l1);
4724 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4725 gen_set_label(l2);
4726 /* nabs never overflows */
4727 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4728 if (unlikely(Rc(ctx->opcode) != 0))
4729 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4730 }
4731
4732 /* rlmi - rlmi. */
4733 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
4734 {
4735 uint32_t mb = MB(ctx->opcode);
4736 uint32_t me = ME(ctx->opcode);
4737 TCGv t0 = tcg_temp_new();
4738 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4739 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4740 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
4741 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
4742 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
4743 tcg_temp_free(t0);
4744 if (unlikely(Rc(ctx->opcode) != 0))
4745 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4746 }
4747
4748 /* rrib - rrib. */
4749 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR)
4750 {
4751 TCGv t0 = tcg_temp_new();
4752 TCGv t1 = tcg_temp_new();
4753 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4754 tcg_gen_movi_tl(t1, 0x80000000);
4755 tcg_gen_shr_tl(t1, t1, t0);
4756 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4757 tcg_gen_and_tl(t0, t0, t1);
4758 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
4759 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4760 tcg_temp_free(t0);
4761 tcg_temp_free(t1);
4762 if (unlikely(Rc(ctx->opcode) != 0))
4763 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4764 }
4765
4766 /* sle - sle. */
4767 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR)
4768 {
4769 TCGv t0 = tcg_temp_new();
4770 TCGv t1 = tcg_temp_new();
4771 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4772 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4773 tcg_gen_subfi_tl(t1, 32, t1);
4774 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4775 tcg_gen_or_tl(t1, t0, t1);
4776 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4777 gen_store_spr(SPR_MQ, t1);
4778 tcg_temp_free(t0);
4779 tcg_temp_free(t1);
4780 if (unlikely(Rc(ctx->opcode) != 0))
4781 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4782 }
4783
4784 /* sleq - sleq. */
4785 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR)
4786 {
4787 TCGv t0 = tcg_temp_new();
4788 TCGv t1 = tcg_temp_new();
4789 TCGv t2 = tcg_temp_new();
4790 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4791 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
4792 tcg_gen_shl_tl(t2, t2, t0);
4793 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4794 gen_load_spr(t1, SPR_MQ);
4795 gen_store_spr(SPR_MQ, t0);
4796 tcg_gen_and_tl(t0, t0, t2);
4797 tcg_gen_andc_tl(t1, t1, t2);
4798 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4799 tcg_temp_free(t0);
4800 tcg_temp_free(t1);
4801 tcg_temp_free(t2);
4802 if (unlikely(Rc(ctx->opcode) != 0))
4803 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4804 }
4805
4806 /* sliq - sliq. */
4807 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR)
4808 {
4809 int sh = SH(ctx->opcode);
4810 TCGv t0 = tcg_temp_new();
4811 TCGv t1 = tcg_temp_new();
4812 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4813 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4814 tcg_gen_or_tl(t1, t0, t1);
4815 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4816 gen_store_spr(SPR_MQ, t1);
4817 tcg_temp_free(t0);
4818 tcg_temp_free(t1);
4819 if (unlikely(Rc(ctx->opcode) != 0))
4820 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4821 }
4822
4823 /* slliq - slliq. */
4824 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR)
4825 {
4826 int sh = SH(ctx->opcode);
4827 TCGv t0 = tcg_temp_new();
4828 TCGv t1 = tcg_temp_new();
4829 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4830 gen_load_spr(t1, SPR_MQ);
4831 gen_store_spr(SPR_MQ, t0);
4832 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
4833 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
4834 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4835 tcg_temp_free(t0);
4836 tcg_temp_free(t1);
4837 if (unlikely(Rc(ctx->opcode) != 0))
4838 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4839 }
4840
4841 /* sllq - sllq. */
4842 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR)
4843 {
4844 int l1 = gen_new_label();
4845 int l2 = gen_new_label();
4846 TCGv t0 = tcg_temp_local_new();
4847 TCGv t1 = tcg_temp_local_new();
4848 TCGv t2 = tcg_temp_local_new();
4849 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4850 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4851 tcg_gen_shl_tl(t1, t1, t2);
4852 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4853 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
4854 gen_load_spr(t0, SPR_MQ);
4855 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4856 tcg_gen_br(l2);
4857 gen_set_label(l1);
4858 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4859 gen_load_spr(t2, SPR_MQ);
4860 tcg_gen_andc_tl(t1, t2, t1);
4861 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4862 gen_set_label(l2);
4863 tcg_temp_free(t0);
4864 tcg_temp_free(t1);
4865 tcg_temp_free(t2);
4866 if (unlikely(Rc(ctx->opcode) != 0))
4867 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4868 }
4869
4870 /* slq - slq. */
4871 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR)
4872 {
4873 int l1 = gen_new_label();
4874 TCGv t0 = tcg_temp_new();
4875 TCGv t1 = tcg_temp_new();
4876 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4877 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4878 tcg_gen_subfi_tl(t1, 32, t1);
4879 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4880 tcg_gen_or_tl(t1, t0, t1);
4881 gen_store_spr(SPR_MQ, t1);
4882 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
4883 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4884 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4885 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
4886 gen_set_label(l1);
4887 tcg_temp_free(t0);
4888 tcg_temp_free(t1);
4889 if (unlikely(Rc(ctx->opcode) != 0))
4890 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4891 }
4892
4893 /* sraiq - sraiq. */
4894 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR)
4895 {
4896 int sh = SH(ctx->opcode);
4897 int l1 = gen_new_label();
4898 TCGv t0 = tcg_temp_new();
4899 TCGv t1 = tcg_temp_new();
4900 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4901 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4902 tcg_gen_or_tl(t0, t0, t1);
4903 gen_store_spr(SPR_MQ, t0);
4904 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4905 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4906 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
4907 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4908 gen_set_label(l1);
4909 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
4910 tcg_temp_free(t0);
4911 tcg_temp_free(t1);
4912 if (unlikely(Rc(ctx->opcode) != 0))
4913 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4914 }
4915
4916 /* sraq - sraq. */
4917 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR)
4918 {
4919 int l1 = gen_new_label();
4920 int l2 = gen_new_label();
4921 TCGv t0 = tcg_temp_new();
4922 TCGv t1 = tcg_temp_local_new();
4923 TCGv t2 = tcg_temp_local_new();
4924 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4925 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4926 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
4927 tcg_gen_subfi_tl(t2, 32, t2);
4928 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
4929 tcg_gen_or_tl(t0, t0, t2);
4930 gen_store_spr(SPR_MQ, t0);
4931 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4932 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
4933 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
4934 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
4935 gen_set_label(l1);
4936 tcg_temp_free(t0);
4937 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
4938 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4939 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4940 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
4941 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4942 gen_set_label(l2);
4943 tcg_temp_free(t1);
4944 tcg_temp_free(t2);
4945 if (unlikely(Rc(ctx->opcode) != 0))
4946 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4947 }
4948
4949 /* sre - sre. */
4950 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR)
4951 {
4952 TCGv t0 = tcg_temp_new();
4953 TCGv t1 = tcg_temp_new();
4954 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4955 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4956 tcg_gen_subfi_tl(t1, 32, t1);
4957 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4958 tcg_gen_or_tl(t1, t0, t1);
4959 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4960 gen_store_spr(SPR_MQ, t1);
4961 tcg_temp_free(t0);
4962 tcg_temp_free(t1);
4963 if (unlikely(Rc(ctx->opcode) != 0))
4964 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4965 }
4966
4967 /* srea - srea. */
4968 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR)
4969 {
4970 TCGv t0 = tcg_temp_new();
4971 TCGv t1 = tcg_temp_new();
4972 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4973 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4974 gen_store_spr(SPR_MQ, t0);
4975 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
4976 tcg_temp_free(t0);
4977 tcg_temp_free(t1);
4978 if (unlikely(Rc(ctx->opcode) != 0))
4979 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4980 }
4981
4982 /* sreq */
4983 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR)
4984 {
4985 TCGv t0 = tcg_temp_new();
4986 TCGv t1 = tcg_temp_new();
4987 TCGv t2 = tcg_temp_new();
4988 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4989 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4990 tcg_gen_shr_tl(t1, t1, t0);
4991 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4992 gen_load_spr(t2, SPR_MQ);
4993 gen_store_spr(SPR_MQ, t0);
4994 tcg_gen_and_tl(t0, t0, t1);
4995 tcg_gen_andc_tl(t2, t2, t1);
4996 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
4997 tcg_temp_free(t0);
4998 tcg_temp_free(t1);
4999 tcg_temp_free(t2);
5000 if (unlikely(Rc(ctx->opcode) != 0))
5001 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5002 }
5003
5004 /* sriq */
5005 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR)
5006 {
5007 int sh = SH(ctx->opcode);
5008 TCGv t0 = tcg_temp_new();
5009 TCGv t1 = tcg_temp_new();
5010 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5011 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5012 tcg_gen_or_tl(t1, t0, t1);
5013 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5014 gen_store_spr(SPR_MQ, t1);
5015 tcg_temp_free(t0);
5016 tcg_temp_free(t1);
5017 if (unlikely(Rc(ctx->opcode) != 0))
5018 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5019 }
5020
5021 /* srliq */
5022 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR)
5023 {
5024 int sh = SH(ctx->opcode);
5025 TCGv t0 = tcg_temp_new();
5026 TCGv t1 = tcg_temp_new();
5027 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5028 gen_load_spr(t1, SPR_MQ);
5029 gen_store_spr(SPR_MQ, t0);
5030 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
5031 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
5032 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5033 tcg_temp_free(t0);
5034 tcg_temp_free(t1);
5035 if (unlikely(Rc(ctx->opcode) != 0))
5036 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5037 }
5038
5039 /* srlq */
5040 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR)
5041 {
5042 int l1 = gen_new_label();
5043 int l2 = gen_new_label();
5044 TCGv t0 = tcg_temp_local_new();
5045 TCGv t1 = tcg_temp_local_new();
5046 TCGv t2 = tcg_temp_local_new();
5047 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5048 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5049 tcg_gen_shr_tl(t2, t1, t2);
5050 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5051 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5052 gen_load_spr(t0, SPR_MQ);
5053 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5054 tcg_gen_br(l2);
5055 gen_set_label(l1);
5056 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5057 tcg_gen_and_tl(t0, t0, t2);
5058 gen_load_spr(t1, SPR_MQ);
5059 tcg_gen_andc_tl(t1, t1, t2);
5060 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5061 gen_set_label(l2);
5062 tcg_temp_free(t0);
5063 tcg_temp_free(t1);
5064 tcg_temp_free(t2);
5065 if (unlikely(Rc(ctx->opcode) != 0))
5066 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5067 }
5068
5069 /* srq */
5070 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR)
5071 {
5072 int l1 = gen_new_label();
5073 TCGv t0 = tcg_temp_new();
5074 TCGv t1 = tcg_temp_new();
5075 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5076 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5077 tcg_gen_subfi_tl(t1, 32, t1);
5078 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5079 tcg_gen_or_tl(t1, t0, t1);
5080 gen_store_spr(SPR_MQ, t1);
5081 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5082 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5083 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5084 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5085 gen_set_label(l1);
5086 tcg_temp_free(t0);
5087 tcg_temp_free(t1);
5088 if (unlikely(Rc(ctx->opcode) != 0))
5089 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5090 }
5091
5092 /* PowerPC 602 specific instructions */
5093 /* dsa */
5094 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC)
5095 {
5096 /* XXX: TODO */
5097 GEN_EXCP_INVAL(ctx);
5098 }
5099
5100 /* esa */
5101 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC)
5102 {
5103 /* XXX: TODO */
5104 GEN_EXCP_INVAL(ctx);
5105 }
5106
5107 /* mfrom */
5108 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC)
5109 {
5110 #if defined(CONFIG_USER_ONLY)
5111 GEN_EXCP_PRIVOPC(ctx);
5112 #else
5113 if (unlikely(!ctx->supervisor)) {
5114 GEN_EXCP_PRIVOPC(ctx);
5115 return;
5116 }
5117 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5118 #endif
5119 }
5120
5121 /* 602 - 603 - G2 TLB management */
5122 /* tlbld */
5123 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB)
5124 {
5125 #if defined(CONFIG_USER_ONLY)
5126 GEN_EXCP_PRIVOPC(ctx);
5127 #else
5128 if (unlikely(!ctx->supervisor)) {
5129 GEN_EXCP_PRIVOPC(ctx);
5130 return;
5131 }
5132 gen_helper_load_6xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5133 #endif
5134 }
5135
5136 /* tlbli */
5137 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB)
5138 {
5139 #if defined(CONFIG_USER_ONLY)
5140 GEN_EXCP_PRIVOPC(ctx);
5141 #else
5142 if (unlikely(!ctx->supervisor)) {
5143 GEN_EXCP_PRIVOPC(ctx);
5144 return;
5145 }
5146 gen_helper_load_6xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5147 #endif
5148 }
5149
5150 /* 74xx TLB management */
5151 /* tlbld */
5152 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB)
5153 {
5154 #if defined(CONFIG_USER_ONLY)
5155 GEN_EXCP_PRIVOPC(ctx);
5156 #else
5157 if (unlikely(!ctx->supervisor)) {
5158 GEN_EXCP_PRIVOPC(ctx);
5159 return;
5160 }
5161 gen_helper_load_74xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5162 #endif
5163 }
5164
5165 /* tlbli */
5166 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB)
5167 {
5168 #if defined(CONFIG_USER_ONLY)
5169 GEN_EXCP_PRIVOPC(ctx);
5170 #else
5171 if (unlikely(!ctx->supervisor)) {
5172 GEN_EXCP_PRIVOPC(ctx);
5173 return;
5174 }
5175 gen_helper_load_74xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5176 #endif
5177 }
5178
5179 /* POWER instructions not in PowerPC 601 */
5180 /* clf */
5181 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER)
5182 {
5183 /* Cache line flush: implemented as no-op */
5184 }
5185
5186 /* cli */
5187 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER)
5188 {
5189 /* Cache line invalidate: privileged and treated as no-op */
5190 #if defined(CONFIG_USER_ONLY)
5191 GEN_EXCP_PRIVOPC(ctx);
5192 #else
5193 if (unlikely(!ctx->supervisor)) {
5194 GEN_EXCP_PRIVOPC(ctx);
5195 return;
5196 }
5197 #endif
5198 }
5199
5200 /* dclst */
5201 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER)
5202 {
5203 /* Data cache line store: treated as no-op */
5204 }
5205
5206 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER)
5207 {
5208 #if defined(CONFIG_USER_ONLY)
5209 GEN_EXCP_PRIVOPC(ctx);
5210 #else
5211 if (unlikely(!ctx->supervisor)) {
5212 GEN_EXCP_PRIVOPC(ctx);
5213 return;
5214 }
5215 int ra = rA(ctx->opcode);
5216 int rd = rD(ctx->opcode);
5217
5218 gen_addr_reg_index(cpu_T[0], ctx);
5219 gen_op_POWER_mfsri();
5220 tcg_gen_mov_tl(cpu_gpr[rd], cpu_T[0]);
5221 if (ra != 0 && ra != rd)
5222 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[1]);
5223 #endif
5224 }
5225
5226 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER)
5227 {
5228 #if defined(CONFIG_USER_ONLY)
5229 GEN_EXCP_PRIVOPC(ctx);
5230 #else
5231 TCGv t0;
5232 if (unlikely(!ctx->supervisor)) {
5233 GEN_EXCP_PRIVOPC(ctx);
5234 return;
5235 }
5236 t0 = tcg_temp_new();
5237 gen_addr_reg_index(t0, ctx);
5238 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], t0);
5239 tcg_temp_free(t0);
5240 #endif
5241 }
5242
5243 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER)
5244 {
5245 #if defined(CONFIG_USER_ONLY)
5246 GEN_EXCP_PRIVOPC(ctx);
5247 #else
5248 if (unlikely(!ctx->supervisor)) {
5249 GEN_EXCP_PRIVOPC(ctx);
5250 return;
5251 }
5252 gen_helper_rfsvc();
5253 GEN_SYNC(ctx);
5254 #endif
5255 }
5256
5257 /* svc is not implemented for now */
5258
5259 /* POWER2 specific instructions */
5260 /* Quad manipulation (load/store two floats at a time) */
5261
5262 /* lfq */
5263 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5264 {
5265 int rd = rD(ctx->opcode);
5266 TCGv t0 = tcg_temp_new();
5267 gen_addr_imm_index(t0, ctx, 0);
5268 gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx);
5269 tcg_gen_addi_tl(t0, t0, 8);
5270 gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx);
5271 tcg_temp_free(t0);
5272 }
5273
5274 /* lfqu */
5275 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5276 {
5277 int ra = rA(ctx->opcode);
5278 int rd = rD(ctx->opcode);
5279 TCGv t0 = tcg_temp_new();
5280 TCGv t1 = tcg_temp_new();
5281 gen_addr_imm_index(t0, ctx, 0);
5282 gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx);
5283 tcg_gen_addi_tl(t1, t0, 8);
5284 gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
5285 if (ra != 0)
5286 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5287 tcg_temp_free(t0);
5288 tcg_temp_free(t1);
5289 }
5290
5291 /* lfqux */
5292 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2)
5293 {
5294 int ra = rA(ctx->opcode);
5295 int rd = rD(ctx->opcode);
5296 TCGv t0 = tcg_temp_new();
5297 TCGv t1 = tcg_temp_new();
5298 gen_addr_reg_index(t0, ctx);
5299 gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx);
5300 tcg_gen_addi_tl(t1, t0, 8);
5301 gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
5302 if (ra != 0)
5303 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5304 tcg_temp_free(t0);
5305 tcg_temp_free(t1);
5306 }
5307
5308 /* lfqx */
5309 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2)
5310 {
5311 int rd = rD(ctx->opcode);
5312 TCGv t0 = tcg_temp_new();
5313 gen_addr_reg_index(t0, ctx);
5314 gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx);
5315 tcg_gen_addi_tl(t0, t0, 8);
5316 gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx);
5317 tcg_temp_free(t0);
5318 }
5319
5320 /* stfq */
5321 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5322 {
5323 int rd = rD(ctx->opcode);
5324 TCGv t0 = tcg_temp_new();
5325 gen_addr_imm_index(t0, ctx, 0);
5326 gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx);
5327 tcg_gen_addi_tl(t0, t0, 8);
5328 gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx);
5329 tcg_temp_free(t0);
5330 }
5331
5332 /* stfqu */
5333 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5334 {
5335 int ra = rA(ctx->opcode);
5336 int rd = rD(ctx->opcode);
5337 TCGv t0 = tcg_temp_new();
5338 TCGv t1 = tcg_temp_new();
5339 gen_addr_imm_index(t0, ctx, 0);
5340 gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx);
5341 tcg_gen_addi_tl(t1, t0, 8);
5342 gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
5343 if (ra != 0)
5344 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5345 tcg_temp_free(t0);
5346 tcg_temp_free(t1);
5347 }
5348
5349 /* stfqux */
5350 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2)
5351 {
5352 int ra = rA(ctx->opcode);
5353 int rd = rD(ctx->opcode);
5354 TCGv t0 = tcg_temp_new();
5355 TCGv t1 = tcg_temp_new();
5356 gen_addr_reg_index(t0, ctx);
5357 gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx);
5358 tcg_gen_addi_tl(t1, t0, 8);
5359 gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
5360 if (ra != 0)
5361 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5362 tcg_temp_free(t0);
5363 tcg_temp_free(t1);
5364 }
5365
5366 /* stfqx */
5367 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2)
5368 {
5369 int rd = rD(ctx->opcode);
5370 TCGv t0 = tcg_temp_new();
5371 gen_addr_reg_index(t0, ctx);
5372 gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx);
5373 tcg_gen_addi_tl(t0, t0, 8);
5374 gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx);
5375 tcg_temp_free(t0);
5376 }
5377
5378 /* BookE specific instructions */
5379 /* XXX: not implemented on 440 ? */
5380 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI)
5381 {
5382 /* XXX: TODO */
5383 GEN_EXCP_INVAL(ctx);
5384 }
5385
5386 /* XXX: not implemented on 440 ? */
5387 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA)
5388 {
5389 #if defined(CONFIG_USER_ONLY)
5390 GEN_EXCP_PRIVOPC(ctx);
5391 #else
5392 if (unlikely(!ctx->supervisor)) {
5393 GEN_EXCP_PRIVOPC(ctx);
5394 return;
5395 }
5396 gen_addr_reg_index(cpu_T[0], ctx);
5397 /* Use the same micro-ops as for tlbie */
5398 #if defined(TARGET_PPC64)
5399 if (ctx->sf_mode)
5400 gen_op_tlbie_64();
5401 else
5402 #endif
5403 gen_op_tlbie();
5404 #endif
5405 }
5406
5407 /* All 405 MAC instructions are translated here */
5408 static always_inline void gen_405_mulladd_insn (DisasContext *ctx,
5409 int opc2, int opc3,
5410 int ra, int rb, int rt, int Rc)
5411 {
5412 TCGv t0, t1;
5413
5414 t0 = tcg_temp_local_new();
5415 t1 = tcg_temp_local_new();
5416
5417 switch (opc3 & 0x0D) {
5418 case 0x05:
5419 /* macchw - macchw. - macchwo - macchwo. */
5420 /* macchws - macchws. - macchwso - macchwso. */
5421 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5422 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5423 /* mulchw - mulchw. */
5424 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5425 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5426 tcg_gen_ext16s_tl(t1, t1);
5427 break;
5428 case 0x04:
5429 /* macchwu - macchwu. - macchwuo - macchwuo. */
5430 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5431 /* mulchwu - mulchwu. */
5432 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5433 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5434 tcg_gen_ext16u_tl(t1, t1);
5435 break;
5436 case 0x01:
5437 /* machhw - machhw. - machhwo - machhwo. */
5438 /* machhws - machhws. - machhwso - machhwso. */
5439 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5440 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5441 /* mulhhw - mulhhw. */
5442 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5443 tcg_gen_ext16s_tl(t0, t0);
5444 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5445 tcg_gen_ext16s_tl(t1, t1);
5446 break;
5447 case 0x00:
5448 /* machhwu - machhwu. - machhwuo - machhwuo. */
5449 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5450 /* mulhhwu - mulhhwu. */
5451 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5452 tcg_gen_ext16u_tl(t0, t0);
5453 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5454 tcg_gen_ext16u_tl(t1, t1);
5455 break;
5456 case 0x0D:
5457 /* maclhw - maclhw. - maclhwo - maclhwo. */
5458 /* maclhws - maclhws. - maclhwso - maclhwso. */
5459 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5460 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5461 /* mullhw - mullhw. */
5462 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5463 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5464 break;
5465 case 0x0C:
5466 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5467 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5468 /* mullhwu - mullhwu. */
5469 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5470 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5471 break;
5472 }
5473 if (opc2 & 0x04) {
5474 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5475 tcg_gen_mul_tl(t1, t0, t1);
5476 if (opc2 & 0x02) {
5477 /* nmultiply-and-accumulate (0x0E) */
5478 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5479 } else {
5480 /* multiply-and-accumulate (0x0C) */
5481 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5482 }
5483
5484 if (opc3 & 0x12) {
5485 /* Check overflow and/or saturate */
5486 int l1 = gen_new_label();
5487
5488 if (opc3 & 0x10) {
5489 /* Start with XER OV disabled, the most likely case */
5490 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
5491 }
5492 if (opc3 & 0x01) {
5493 /* Signed */
5494 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5495 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5496 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5497 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5498 if (opc3 & 0x02) {
5499 /* Saturate */
5500 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5501 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5502 }
5503 } else {
5504 /* Unsigned */
5505 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5506 if (opc3 & 0x02) {
5507 /* Saturate */
5508 tcg_gen_movi_tl(t0, UINT32_MAX);
5509 }
5510 }
5511 if (opc3 & 0x10) {
5512 /* Check overflow */
5513 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
5514 }
5515 gen_set_label(l1);
5516 tcg_gen_mov_tl(cpu_gpr[rt], t0);
5517 }
5518 } else {
5519 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5520 }
5521 tcg_temp_free(t0);
5522 tcg_temp_free(t1);
5523 if (unlikely(Rc) != 0) {
5524 /* Update Rc0 */
5525 gen_set_Rc0(ctx, cpu_gpr[rt]);
5526 }
5527 }
5528
5529 #define GEN_MAC_HANDLER(name, opc2, opc3) \
5530 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) \
5531 { \
5532 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5533 rD(ctx->opcode), Rc(ctx->opcode)); \
5534 }
5535
5536 /* macchw - macchw. */
5537 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5538 /* macchwo - macchwo. */
5539 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5540 /* macchws - macchws. */
5541 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5542 /* macchwso - macchwso. */
5543 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5544 /* macchwsu - macchwsu. */
5545 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5546 /* macchwsuo - macchwsuo. */
5547 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5548 /* macchwu - macchwu. */
5549 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5550 /* macchwuo - macchwuo. */
5551 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5552 /* machhw - machhw. */
5553 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5554 /* machhwo - machhwo. */
5555 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5556 /* machhws - machhws. */
5557 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5558 /* machhwso - machhwso. */
5559 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5560 /* machhwsu - machhwsu. */
5561 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5562 /* machhwsuo - machhwsuo. */
5563 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5564 /* machhwu - machhwu. */
5565 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5566 /* machhwuo - machhwuo. */
5567 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5568 /* maclhw - maclhw. */
5569 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5570 /* maclhwo - maclhwo. */
5571 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5572 /* maclhws - maclhws. */
5573 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5574 /* maclhwso - maclhwso. */
5575 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5576 /* maclhwu - maclhwu. */
5577 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5578 /* maclhwuo - maclhwuo. */
5579 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5580 /* maclhwsu - maclhwsu. */
5581 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5582 /* maclhwsuo - maclhwsuo. */
5583 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5584 /* nmacchw - nmacchw. */
5585 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5586 /* nmacchwo - nmacchwo. */
5587 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5588 /* nmacchws - nmacchws. */
5589 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5590 /* nmacchwso - nmacchwso. */
5591 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5592 /* nmachhw - nmachhw. */
5593 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5594 /* nmachhwo - nmachhwo. */
5595 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5596 /* nmachhws - nmachhws. */
5597 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5598 /* nmachhwso - nmachhwso. */
5599 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5600 /* nmaclhw - nmaclhw. */
5601 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5602 /* nmaclhwo - nmaclhwo. */
5603 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5604 /* nmaclhws - nmaclhws. */
5605 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5606 /* nmaclhwso - nmaclhwso. */
5607 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5608
5609 /* mulchw - mulchw. */
5610 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5611 /* mulchwu - mulchwu. */
5612 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5613 /* mulhhw - mulhhw. */
5614 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5615 /* mulhhwu - mulhhwu. */
5616 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5617 /* mullhw - mullhw. */
5618 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5619 /* mullhwu - mullhwu. */
5620 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5621
5622 /* mfdcr */
5623 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR)
5624 {
5625 #if defined(CONFIG_USER_ONLY)
5626 GEN_EXCP_PRIVREG(ctx);
5627 #else
5628 TCGv dcrn;
5629 if (unlikely(!ctx->supervisor)) {
5630 GEN_EXCP_PRIVREG(ctx);
5631 return;
5632 }
5633 /* NIP cannot be restored if the memory exception comes from an helper */
5634 gen_update_nip(ctx, ctx->nip - 4);
5635 dcrn = tcg_const_tl(SPR(ctx->opcode));
5636 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], dcrn);
5637 tcg_temp_free(dcrn);
5638 #endif
5639 }
5640
5641 /* mtdcr */
5642 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR)
5643 {
5644 #if defined(CONFIG_USER_ONLY)
5645 GEN_EXCP_PRIVREG(ctx);
5646 #else
5647 TCGv dcrn;
5648 if (unlikely(!ctx->supervisor)) {
5649 GEN_EXCP_PRIVREG(ctx);
5650 return;
5651 }
5652 /* NIP cannot be restored if the memory exception comes from an helper */
5653 gen_update_nip(ctx, ctx->nip - 4);
5654 dcrn = tcg_const_tl(SPR(ctx->opcode));
5655 gen_helper_store_dcr(dcrn, cpu_gpr[rS(ctx->opcode)]);
5656 tcg_temp_free(dcrn);
5657 #endif
5658 }
5659
5660 /* mfdcrx */
5661 /* XXX: not implemented on 440 ? */
5662 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX)
5663 {
5664 #if defined(CONFIG_USER_ONLY)
5665 GEN_EXCP_PRIVREG(ctx);
5666 #else
5667 if (unlikely(!ctx->supervisor)) {
5668 GEN_EXCP_PRIVREG(ctx);
5669 return;
5670 }
5671 /* NIP cannot be restored if the memory exception comes from an helper */
5672 gen_update_nip(ctx, ctx->nip - 4);
5673 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5674 /* Note: Rc update flag set leads to undefined state of Rc0 */
5675 #endif
5676 }
5677
5678 /* mtdcrx */
5679 /* XXX: not implemented on 440 ? */
5680 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX)
5681 {
5682 #if defined(CONFIG_USER_ONLY)
5683 GEN_EXCP_PRIVREG(ctx);
5684 #else
5685 if (unlikely(!ctx->supervisor)) {
5686 GEN_EXCP_PRIVREG(ctx);
5687 return;
5688 }
5689 /* NIP cannot be restored if the memory exception comes from an helper */
5690 gen_update_nip(ctx, ctx->nip - 4);
5691 gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5692 /* Note: Rc update flag set leads to undefined state of Rc0 */
5693 #endif
5694 }
5695
5696 /* mfdcrux (PPC 460) : user-mode access to DCR */
5697 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX)
5698 {
5699 /* NIP cannot be restored if the memory exception comes from an helper */
5700 gen_update_nip(ctx, ctx->nip - 4);
5701 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5702 /* Note: Rc update flag set leads to undefined state of Rc0 */
5703 }
5704
5705 /* mtdcrux (PPC 460) : user-mode access to DCR */
5706 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX)
5707 {
5708 /* NIP cannot be restored if the memory exception comes from an helper */
5709 gen_update_nip(ctx, ctx->nip - 4);
5710 gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5711 /* Note: Rc update flag set leads to undefined state of Rc0 */
5712 }
5713
5714 /* dccci */
5715 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON)
5716 {
5717 #if defined(CONFIG_USER_ONLY)
5718 GEN_EXCP_PRIVOPC(ctx);
5719 #else
5720 if (unlikely(!ctx->supervisor)) {
5721 GEN_EXCP_PRIVOPC(ctx);
5722 return;
5723 }
5724 /* interpreted as no-op */
5725 #endif
5726 }
5727
5728 /* dcread */
5729 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON)
5730 {
5731 #if defined(CONFIG_USER_ONLY)
5732 GEN_EXCP_PRIVOPC(ctx);
5733 #else
5734 TCGv EA, val;
5735 if (unlikely(!ctx->supervisor)) {
5736 GEN_EXCP_PRIVOPC(ctx);
5737 return;
5738 }
5739 EA = tcg_temp_new();
5740 gen_set_access_type(ACCESS_CACHE);
5741 gen_addr_reg_index(EA, ctx);
5742 val = tcg_temp_new();
5743 gen_qemu_ld32u(val, EA, ctx->mem_idx);
5744 tcg_temp_free(val);
5745 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5746 tcg_temp_free(EA);
5747 #endif
5748 }
5749
5750 /* icbt */
5751 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT)
5752 {
5753 /* interpreted as no-op */
5754 /* XXX: specification say this is treated as a load by the MMU
5755 * but does not generate any exception
5756 */
5757 }
5758
5759 /* iccci */
5760 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON)
5761 {
5762 #if defined(CONFIG_USER_ONLY)
5763 GEN_EXCP_PRIVOPC(ctx);
5764 #else
5765 if (unlikely(!ctx->supervisor)) {
5766 GEN_EXCP_PRIVOPC(ctx);
5767 return;
5768 }
5769 /* interpreted as no-op */
5770 #endif
5771 }
5772
5773 /* icread */
5774 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON)
5775 {
5776 #if defined(CONFIG_USER_ONLY)
5777 GEN_EXCP_PRIVOPC(ctx);
5778 #else
5779 if (unlikely(!ctx->supervisor)) {
5780 GEN_EXCP_PRIVOPC(ctx);
5781 return;
5782 }
5783 /* interpreted as no-op */
5784 #endif
5785 }
5786
5787 /* rfci (supervisor only) */
5788 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP)
5789 {
5790 #if defined(CONFIG_USER_ONLY)
5791 GEN_EXCP_PRIVOPC(ctx);
5792 #else
5793 if (unlikely(!ctx->supervisor)) {
5794 GEN_EXCP_PRIVOPC(ctx);
5795 return;
5796 }
5797 /* Restore CPU state */
5798 gen_helper_40x_rfci();
5799 GEN_SYNC(ctx);
5800 #endif
5801 }
5802
5803 GEN_HANDLER(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE)
5804 {
5805 #if defined(CONFIG_USER_ONLY)
5806 GEN_EXCP_PRIVOPC(ctx);
5807 #else
5808 if (unlikely(!ctx->supervisor)) {
5809 GEN_EXCP_PRIVOPC(ctx);
5810 return;
5811 }
5812 /* Restore CPU state */
5813 gen_helper_rfci();
5814 GEN_SYNC(ctx);
5815 #endif
5816 }
5817
5818 /* BookE specific */
5819 /* XXX: not implemented on 440 ? */
5820 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI)
5821 {
5822 #if defined(CONFIG_USER_ONLY)
5823 GEN_EXCP_PRIVOPC(ctx);
5824 #else
5825 if (unlikely(!ctx->supervisor)) {
5826 GEN_EXCP_PRIVOPC(ctx);
5827 return;
5828 }
5829 /* Restore CPU state */
5830 gen_helper_rfdi();
5831 GEN_SYNC(ctx);
5832 #endif
5833 }
5834
5835 /* XXX: not implemented on 440 ? */
5836 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI)
5837 {
5838 #if defined(CONFIG_USER_ONLY)
5839 GEN_EXCP_PRIVOPC(ctx);
5840 #else
5841 if (unlikely(!ctx->supervisor)) {
5842 GEN_EXCP_PRIVOPC(ctx);
5843 return;
5844 }
5845 /* Restore CPU state */
5846 gen_helper_rfmci();
5847 GEN_SYNC(ctx);
5848 #endif
5849 }
5850
5851 /* TLB management - PowerPC 405 implementation */
5852 /* tlbre */
5853 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB)
5854 {
5855 #if defined(CONFIG_USER_ONLY)
5856 GEN_EXCP_PRIVOPC(ctx);
5857 #else
5858 if (unlikely(!ctx->supervisor)) {
5859 GEN_EXCP_PRIVOPC(ctx);
5860 return;
5861 }
5862 switch (rB(ctx->opcode)) {
5863 case 0:
5864 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5865 gen_op_4xx_tlbre_hi();
5866 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5867 break;
5868 case 1:
5869 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5870 gen_op_4xx_tlbre_lo();
5871 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5872 break;
5873 default:
5874 GEN_EXCP_INVAL(ctx);
5875 break;
5876 }
5877 #endif
5878 }
5879
5880 /* tlbsx - tlbsx. */
5881 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB)
5882 {
5883 #if defined(CONFIG_USER_ONLY)
5884 GEN_EXCP_PRIVOPC(ctx);
5885 #else
5886 if (unlikely(!ctx->supervisor)) {
5887 GEN_EXCP_PRIVOPC(ctx);
5888 return;
5889 }
5890 gen_addr_reg_index(cpu_T[0], ctx);
5891 gen_op_4xx_tlbsx();
5892 if (Rc(ctx->opcode))
5893 gen_op_4xx_tlbsx_check();
5894 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5895 #endif
5896 }
5897
5898 /* tlbwe */
5899 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB)
5900 {
5901 #if defined(CONFIG_USER_ONLY)
5902 GEN_EXCP_PRIVOPC(ctx);
5903 #else
5904 if (unlikely(!ctx->supervisor)) {
5905 GEN_EXCP_PRIVOPC(ctx);
5906 return;
5907 }
5908 switch (rB(ctx->opcode)) {
5909 case 0:
5910 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5911 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5912 gen_op_4xx_tlbwe_hi();
5913 break;
5914 case 1:
5915 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5916 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5917 gen_op_4xx_tlbwe_lo();
5918 break;
5919 default:
5920 GEN_EXCP_INVAL(ctx);
5921 break;
5922 }
5923 #endif
5924 }
5925
5926 /* TLB management - PowerPC 440 implementation */
5927 /* tlbre */
5928 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE)
5929 {
5930 #if defined(CONFIG_USER_ONLY)
5931 GEN_EXCP_PRIVOPC(ctx);
5932 #else
5933 if (unlikely(!ctx->supervisor)) {
5934 GEN_EXCP_PRIVOPC(ctx);
5935 return;
5936 }
5937 switch (rB(ctx->opcode)) {
5938 case 0:
5939 case 1:
5940 case 2:
5941 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5942 gen_op_440_tlbre(rB(ctx->opcode));
5943 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5944 break;
5945 default:
5946 GEN_EXCP_INVAL(ctx);
5947 break;
5948 }
5949 #endif
5950 }
5951
5952 /* tlbsx - tlbsx. */
5953 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE)
5954 {
5955 #if defined(CONFIG_USER_ONLY)
5956 GEN_EXCP_PRIVOPC(ctx);
5957 #else
5958 if (unlikely(!ctx->supervisor)) {
5959 GEN_EXCP_PRIVOPC(ctx);
5960 return;
5961 }
5962 gen_addr_reg_index(cpu_T[0], ctx);
5963 gen_op_440_tlbsx();
5964 if (Rc(ctx->opcode))
5965 gen_op_4xx_tlbsx_check();
5966 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5967 #endif
5968 }
5969
5970 /* tlbwe */
5971 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE)
5972 {
5973 #if defined(CONFIG_USER_ONLY)
5974 GEN_EXCP_PRIVOPC(ctx);
5975 #else
5976 if (unlikely(!ctx->supervisor)) {
5977 GEN_EXCP_PRIVOPC(ctx);
5978 return;
5979 }
5980 switch (rB(ctx->opcode)) {
5981 case 0:
5982 case 1:
5983 case 2:
5984 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5985 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5986 gen_op_440_tlbwe(rB(ctx->opcode));
5987 break;
5988 default:
5989 GEN_EXCP_INVAL(ctx);
5990 break;
5991 }
5992 #endif
5993 }
5994
5995 /* wrtee */
5996 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE)
5997 {
5998 #if defined(CONFIG_USER_ONLY)
5999 GEN_EXCP_PRIVOPC(ctx);
6000 #else
6001 TCGv t0;
6002 if (unlikely(!ctx->supervisor)) {
6003 GEN_EXCP_PRIVOPC(ctx);
6004 return;
6005 }
6006 t0 = tcg_temp_new();
6007 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6008 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6009 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6010 tcg_temp_free(t0);
6011 /* Stop translation to have a chance to raise an exception
6012 * if we just set msr_ee to 1
6013 */
6014 GEN_STOP(ctx);
6015 #endif
6016 }
6017
6018 /* wrteei */
6019 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000EFC01, PPC_WRTEE)
6020 {
6021 #if defined(CONFIG_USER_ONLY)
6022 GEN_EXCP_PRIVOPC(ctx);
6023 #else
6024 if (unlikely(!ctx->supervisor)) {
6025 GEN_EXCP_PRIVOPC(ctx);
6026 return;
6027 }
6028 if (ctx->opcode & 0x00010000) {
6029 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6030 /* Stop translation to have a chance to raise an exception */
6031 GEN_STOP(ctx);
6032 } else {
6033 tcg_gen_andi_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6034 }
6035 #endif
6036 }
6037
6038 /* PowerPC 440 specific instructions */
6039 /* dlmzb */
6040 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC)
6041 {
6042 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6043 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
6044 cpu_gpr[rB(ctx->opcode)], t0);
6045 tcg_temp_free_i32(t0);
6046 }
6047
6048 /* mbar replaces eieio on 440 */
6049 GEN_HANDLER(mbar, 0x1F, 0x16, 0x13, 0x001FF801, PPC_BOOKE)
6050 {
6051 /* interpreted as no-op */
6052 }
6053
6054 /* msync replaces sync on 440 */
6055 GEN_HANDLER(msync, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE)
6056 {
6057 /* interpreted as no-op */
6058 }
6059
6060 /* icbt */
6061 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, PPC_BOOKE)
6062 {
6063 /* interpreted as no-op */
6064 /* XXX: specification say this is treated as a load by the MMU
6065 * but does not generate any exception
6066 */
6067 }
6068
6069 /*** Altivec vector extension ***/
6070 /* Altivec registers moves */
6071
6072 #define GEN_VR_LDX(name, opc2, opc3) \
6073 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
6074 { \
6075 TCGv EA; \
6076 if (unlikely(!ctx->altivec_enabled)) { \
6077 GEN_EXCP_NO_VR(ctx); \
6078 return; \
6079 } \
6080 EA = tcg_temp_new(); \
6081 gen_addr_reg_index(EA, ctx); \
6082 tcg_gen_andi_tl(EA, EA, ~0xf); \
6083 if (ctx->mem_idx & 1) { \
6084 gen_qemu_ld64(cpu_avrl[rD(ctx->opcode)], EA, ctx->mem_idx); \
6085 tcg_gen_addi_tl(EA, EA, 8); \
6086 gen_qemu_ld64(cpu_avrh[rD(ctx->opcode)], EA, ctx->mem_idx); \
6087 } else { \
6088 gen_qemu_ld64(cpu_avrh[rD(ctx->opcode)], EA, ctx->mem_idx); \
6089 tcg_gen_addi_tl(EA, EA, 8); \
6090 gen_qemu_ld64(cpu_avrl[rD(ctx->opcode)], EA, ctx->mem_idx); \
6091 } \
6092 tcg_temp_free(EA); \
6093 }
6094
6095 #define GEN_VR_STX(name, opc2, opc3) \
6096 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
6097 { \
6098 TCGv EA; \
6099 if (unlikely(!ctx->altivec_enabled)) { \
6100 GEN_EXCP_NO_VR(ctx); \
6101 return; \
6102 } \
6103 EA = tcg_temp_new(); \
6104 gen_addr_reg_index(EA, ctx); \
6105 tcg_gen_andi_tl(EA, EA, ~0xf); \
6106 if (ctx->mem_idx & 1) { \
6107 gen_qemu_st64(cpu_avrl[rD(ctx->opcode)], EA, ctx->mem_idx); \
6108 tcg_gen_addi_tl(EA, EA, 8); \
6109 gen_qemu_st64(cpu_avrh[rD(ctx->opcode)], EA, ctx->mem_idx); \
6110 } else { \
6111 gen_qemu_st64(cpu_avrh[rD(ctx->opcode)], EA, ctx->mem_idx); \
6112 tcg_gen_addi_tl(EA, EA, 8); \
6113 gen_qemu_st64(cpu_avrl[rD(ctx->opcode)], EA, ctx->mem_idx); \
6114 } \
6115 tcg_temp_free(EA); \
6116 }
6117
6118 GEN_VR_LDX(lvx, 0x07, 0x03);
6119 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
6120 GEN_VR_LDX(lvxl, 0x07, 0x0B);
6121
6122 GEN_VR_STX(svx, 0x07, 0x07);
6123 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
6124 GEN_VR_STX(svxl, 0x07, 0x0F);
6125
6126 /*** SPE extension ***/
6127 /* Register moves */
6128
6129 static always_inline void gen_load_gpr64(TCGv_i64 t, int reg) {
6130 #if defined(TARGET_PPC64)
6131 tcg_gen_mov_i64(t, cpu_gpr[reg]);
6132 #else
6133 tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
6134 #endif
6135 }
6136
6137 static always_inline void gen_store_gpr64(int reg, TCGv_i64 t) {
6138 #if defined(TARGET_PPC64)
6139 tcg_gen_mov_i64(cpu_gpr[reg], t);
6140 #else
6141 TCGv_i64 tmp = tcg_temp_new_i64();
6142 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
6143 tcg_gen_shri_i64(tmp, t, 32);
6144 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
6145 tcg_temp_free_i64(tmp);
6146 #endif
6147 }
6148
6149 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
6150 GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type) \
6151 { \
6152 if (Rc(ctx->opcode)) \
6153 gen_##name1(ctx); \
6154 else \
6155 gen_##name0(ctx); \
6156 }
6157
6158 /* Handler for undefined SPE opcodes */
6159 static always_inline void gen_speundef (DisasContext *ctx)
6160 {
6161 GEN_EXCP_INVAL(ctx);
6162 }
6163
6164 /* SPE logic */
6165 #if defined(TARGET_PPC64)
6166 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6167 static always_inline void gen_##name (DisasContext *ctx) \
6168 { \
6169 if (unlikely(!ctx->spe_enabled)) { \
6170 GEN_EXCP_NO_AP(ctx); \
6171 return; \
6172 } \
6173 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6174 cpu_gpr[rB(ctx->opcode)]); \
6175 }
6176 #else
6177 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6178 static always_inline void gen_##name (DisasContext *ctx) \
6179 { \
6180 if (unlikely(!ctx->spe_enabled)) { \
6181 GEN_EXCP_NO_AP(ctx); \
6182 return; \
6183 } \
6184 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6185 cpu_gpr[rB(ctx->opcode)]); \
6186 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6187 cpu_gprh[rB(ctx->opcode)]); \
6188 }
6189 #endif
6190
6191 GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl);
6192 GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl);
6193 GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl);
6194 GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl);
6195 GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl);
6196 GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl);
6197 GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl);
6198 GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl);
6199
6200 /* SPE logic immediate */
6201 #if defined(TARGET_PPC64)
6202 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6203 static always_inline void gen_##name (DisasContext *ctx) \
6204 { \
6205 if (unlikely(!ctx->spe_enabled)) { \
6206 GEN_EXCP_NO_AP(ctx); \
6207 return; \
6208 } \
6209 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6210 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6211 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6212 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6213 tcg_opi(t0, t0, rB(ctx->opcode)); \
6214 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6215 tcg_gen_trunc_i64_i32(t1, t2); \
6216 tcg_temp_free_i64(t2); \
6217 tcg_opi(t1, t1, rB(ctx->opcode)); \
6218 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6219 tcg_temp_free_i32(t0); \
6220 tcg_temp_free_i32(t1); \
6221 }
6222 #else
6223 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6224 static always_inline void gen_##name (DisasContext *ctx) \
6225 { \
6226 if (unlikely(!ctx->spe_enabled)) { \
6227 GEN_EXCP_NO_AP(ctx); \
6228 return; \
6229 } \
6230 tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6231 rB(ctx->opcode)); \
6232 tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6233 rB(ctx->opcode)); \
6234 }
6235 #endif
6236 GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32);
6237 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32);
6238 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32);
6239 GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32);
6240
6241 /* SPE arithmetic */
6242 #if defined(TARGET_PPC64)
6243 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6244 static always_inline void gen_##name (DisasContext *ctx) \
6245 { \
6246 if (unlikely(!ctx->spe_enabled)) { \
6247 GEN_EXCP_NO_AP(ctx); \
6248 return; \
6249 } \
6250 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6251 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6252 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6253 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6254 tcg_op(t0, t0); \
6255 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6256 tcg_gen_trunc_i64_i32(t1, t2); \
6257 tcg_temp_free_i64(t2); \
6258 tcg_op(t1, t1); \
6259 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6260 tcg_temp_free_i32(t0); \
6261 tcg_temp_free_i32(t1); \
6262 }
6263 #else
6264 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6265 static always_inline void gen_##name (DisasContext *ctx) \
6266 { \
6267 if (unlikely(!ctx->spe_enabled)) { \
6268 GEN_EXCP_NO_AP(ctx); \
6269 return; \
6270 } \
6271 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
6272 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
6273 }
6274 #endif
6275
6276 static always_inline void gen_op_evabs (TCGv_i32 ret, TCGv_i32 arg1)
6277 {
6278 int l1 = gen_new_label();
6279 int l2 = gen_new_label();
6280
6281 tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1);
6282 tcg_gen_neg_i32(ret, arg1);
6283 tcg_gen_br(l2);
6284 gen_set_label(l1);
6285 tcg_gen_mov_i32(ret, arg1);
6286 gen_set_label(l2);
6287 }
6288 GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
6289 GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
6290 GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
6291 GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
6292 static always_inline void gen_op_evrndw (TCGv_i32 ret, TCGv_i32 arg1)
6293 {
6294 tcg_gen_addi_i32(ret, arg1, 0x8000);
6295 tcg_gen_ext16u_i32(ret, ret);
6296 }
6297 GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
6298 GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
6299 GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
6300
6301 #if defined(TARGET_PPC64)
6302 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6303 static always_inline void gen_##name (DisasContext *ctx) \
6304 { \
6305 if (unlikely(!ctx->spe_enabled)) { \
6306 GEN_EXCP_NO_AP(ctx); \
6307 return; \
6308 } \
6309 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6310 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6311 TCGv_i32 t2 = tcg_temp_local_new_i32(); \
6312 TCGv_i64 t3 = tcg_temp_local_new(TCG_TYPE_I64); \
6313 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6314 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
6315 tcg_op(t0, t0, t2); \
6316 tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
6317 tcg_gen_trunc_i64_i32(t1, t3); \
6318 tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
6319 tcg_gen_trunc_i64_i32(t2, t3); \
6320 tcg_temp_free_i64(t3); \
6321 tcg_op(t1, t1, t2); \
6322 tcg_temp_free_i32(t2); \
6323 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6324 tcg_temp_free_i32(t0); \
6325 tcg_temp_free_i32(t1); \
6326 }
6327 #else
6328 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6329 static always_inline void gen_##name (DisasContext *ctx) \
6330 { \
6331 if (unlikely(!ctx->spe_enabled)) { \
6332 GEN_EXCP_NO_AP(ctx); \
6333 return; \
6334 } \
6335 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6336 cpu_gpr[rB(ctx->opcode)]); \
6337 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6338 cpu_gprh[rB(ctx->opcode)]); \
6339 }
6340 #endif
6341
6342 static always_inline void gen_op_evsrwu (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6343 {
6344 TCGv_i32 t0;
6345 int l1, l2;
6346
6347 l1 = gen_new_label();
6348 l2 = gen_new_label();
6349 t0 = tcg_temp_local_new_i32();
6350 /* No error here: 6 bits are used */
6351 tcg_gen_andi_i32(t0, arg2, 0x3F);
6352 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6353 tcg_gen_shr_i32(ret, arg1, t0);
6354 tcg_gen_br(l2);
6355 gen_set_label(l1);
6356 tcg_gen_movi_i32(ret, 0);
6357 tcg_gen_br(l2);
6358 tcg_temp_free_i32(t0);
6359 }
6360 GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
6361 static always_inline void gen_op_evsrws (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6362 {
6363 TCGv_i32 t0;
6364 int l1, l2;
6365
6366 l1 = gen_new_label();
6367 l2 = gen_new_label();
6368 t0 = tcg_temp_local_new_i32();
6369 /* No error here: 6 bits are used */
6370 tcg_gen_andi_i32(t0, arg2, 0x3F);
6371 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6372 tcg_gen_sar_i32(ret, arg1, t0);
6373 tcg_gen_br(l2);
6374 gen_set_label(l1);
6375 tcg_gen_movi_i32(ret, 0);
6376 tcg_gen_br(l2);
6377 tcg_temp_free_i32(t0);
6378 }
6379 GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
6380 static always_inline void gen_op_evslw (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6381 {
6382 TCGv_i32 t0;
6383 int l1, l2;
6384
6385 l1 = gen_new_label();
6386 l2 = gen_new_label();
6387 t0 = tcg_temp_local_new_i32();
6388 /* No error here: 6 bits are used */
6389 tcg_gen_andi_i32(t0, arg2, 0x3F);
6390 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6391 tcg_gen_shl_i32(ret, arg1, t0);
6392 tcg_gen_br(l2);
6393 gen_set_label(l1);
6394 tcg_gen_movi_i32(ret, 0);
6395 tcg_gen_br(l2);
6396 tcg_temp_free_i32(t0);
6397 }
6398 GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
6399 static always_inline void gen_op_evrlw (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6400 {
6401 TCGv_i32 t0 = tcg_temp_new_i32();
6402 tcg_gen_andi_i32(t0, arg2, 0x1F);
6403 tcg_gen_rotl_i32(ret, arg1, t0);
6404 tcg_temp_free_i32(t0);
6405 }
6406 GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
6407 static always_inline void gen_evmergehi (DisasContext *ctx)
6408 {
6409 if (unlikely(!ctx->spe_enabled)) {
6410 GEN_EXCP_NO_AP(ctx);
6411 return;
6412 }
6413 #if defined(TARGET_PPC64)
6414 TCGv t0 = tcg_temp_new();
6415 TCGv t1 = tcg_temp_new();
6416 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
6417 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
6418 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6419 tcg_temp_free(t0);
6420 tcg_temp_free(t1);
6421 #else
6422 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6423 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6424 #endif
6425 }
6426 GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
6427 static always_inline void gen_op_evsubf (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6428 {
6429 tcg_gen_sub_i32(ret, arg2, arg1);
6430 }
6431 GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf);
6432
6433 /* SPE arithmetic immediate */
6434 #if defined(TARGET_PPC64)
6435 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6436 static always_inline void gen_##name (DisasContext *ctx) \
6437 { \
6438 if (unlikely(!ctx->spe_enabled)) { \
6439 GEN_EXCP_NO_AP(ctx); \
6440 return; \
6441 } \
6442 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6443 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6444 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6445 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
6446 tcg_op(t0, t0, rA(ctx->opcode)); \
6447 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
6448 tcg_gen_trunc_i64_i32(t1, t2); \
6449 tcg_temp_free_i64(t2); \
6450 tcg_op(t1, t1, rA(ctx->opcode)); \
6451 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6452 tcg_temp_free_i32(t0); \
6453 tcg_temp_free_i32(t1); \
6454 }
6455 #else
6456 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6457 static always_inline void gen_##name (DisasContext *ctx) \
6458 { \
6459 if (unlikely(!ctx->spe_enabled)) { \
6460 GEN_EXCP_NO_AP(ctx); \
6461 return; \
6462 } \
6463 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
6464 rA(ctx->opcode)); \
6465 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
6466 rA(ctx->opcode)); \
6467 }
6468 #endif
6469 GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32);
6470 GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32);
6471
6472 /* SPE comparison */
6473 #if defined(TARGET_PPC64)
6474 #define GEN_SPEOP_COMP(name, tcg_cond) \
6475 static always_inline void gen_##name (DisasContext *ctx) \
6476 { \
6477 if (unlikely(!ctx->spe_enabled)) { \
6478 GEN_EXCP_NO_AP(ctx); \
6479 return; \
6480 } \
6481 int l1 = gen_new_label(); \
6482 int l2 = gen_new_label(); \
6483 int l3 = gen_new_label(); \
6484 int l4 = gen_new_label(); \
6485 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6486 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6487 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6488 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6489 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
6490 tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
6491 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
6492 tcg_gen_br(l2); \
6493 gen_set_label(l1); \
6494 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
6495 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
6496 gen_set_label(l2); \
6497 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6498 tcg_gen_trunc_i64_i32(t0, t2); \
6499 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
6500 tcg_gen_trunc_i64_i32(t1, t2); \
6501 tcg_temp_free_i64(t2); \
6502 tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
6503 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6504 ~(CRF_CH | CRF_CH_AND_CL)); \
6505 tcg_gen_br(l4); \
6506 gen_set_label(l3); \
6507 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6508 CRF_CH | CRF_CH_OR_CL); \
6509 gen_set_label(l4); \
6510 tcg_temp_free_i32(t0); \
6511 tcg_temp_free_i32(t1); \
6512 }
6513 #else
6514 #define GEN_SPEOP_COMP(name, tcg_cond) \
6515 static always_inline void gen_##name (DisasContext *ctx) \
6516 { \
6517 if (unlikely(!ctx->spe_enabled)) { \
6518 GEN_EXCP_NO_AP(ctx); \
6519 return; \
6520 } \
6521 int l1 = gen_new_label(); \
6522 int l2 = gen_new_label(); \
6523 int l3 = gen_new_label(); \
6524 int l4 = gen_new_label(); \
6525 \
6526 tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
6527 cpu_gpr[rB(ctx->opcode)], l1); \
6528 tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
6529 tcg_gen_br(l2); \
6530 gen_set_label(l1); \
6531 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
6532 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
6533 gen_set_label(l2); \
6534 tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
6535 cpu_gprh[rB(ctx->opcode)], l3); \
6536 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6537 ~(CRF_CH | CRF_CH_AND_CL)); \
6538 tcg_gen_br(l4); \
6539 gen_set_label(l3); \
6540 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6541 CRF_CH | CRF_CH_OR_CL); \
6542 gen_set_label(l4); \
6543 }
6544 #endif
6545 GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU);
6546 GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT);
6547 GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU);
6548 GEN_SPEOP_COMP(evcmplts, TCG_COND_LT);
6549 GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ);
6550
6551 /* SPE misc */
6552 static always_inline void gen_brinc (DisasContext *ctx)
6553 {
6554 /* Note: brinc is usable even if SPE is disabled */
6555 gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
6556 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6557 }
6558 static always_inline void gen_evmergelo (DisasContext *ctx)
6559 {
6560 if (unlikely(!ctx->spe_enabled)) {
6561 GEN_EXCP_NO_AP(ctx);
6562 return;
6563 }
6564 #if defined(TARGET_PPC64)
6565 TCGv t0 = tcg_temp_new();
6566 TCGv t1 = tcg_temp_new();
6567 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFLL);
6568 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
6569 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6570 tcg_temp_free(t0);
6571 tcg_temp_free(t1);
6572 #else
6573 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6574 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6575 #endif
6576 }
6577 static always_inline void gen_evmergehilo (DisasContext *ctx)
6578 {
6579 if (unlikely(!ctx->spe_enabled)) {
6580 GEN_EXCP_NO_AP(ctx);
6581 return;
6582 }
6583 #if defined(TARGET_PPC64)
6584 TCGv t0 = tcg_temp_new();
6585 TCGv t1 = tcg_temp_new();
6586 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFLL);
6587 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
6588 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6589 tcg_temp_free(t0);
6590 tcg_temp_free(t1);
6591 #else
6592 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6593 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6594 #endif
6595 }
6596 static always_inline void gen_evmergelohi (DisasContext *ctx)
6597 {
6598 if (unlikely(!ctx->spe_enabled)) {
6599 GEN_EXCP_NO_AP(ctx);
6600 return;
6601 }
6602 #if defined(TARGET_PPC64)
6603 TCGv t0 = tcg_temp_new();
6604 TCGv t1 = tcg_temp_new();
6605 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
6606 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
6607 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6608 tcg_temp_free(t0);
6609 tcg_temp_free(t1);
6610 #else
6611 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6612 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6613 #endif
6614 }
6615 static always_inline void gen_evsplati (DisasContext *ctx)
6616 {
6617 uint64_t imm = ((int32_t)(rA(ctx->opcode) << 11)) >> 27;
6618
6619 #if defined(TARGET_PPC64)
6620 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
6621 #else
6622 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
6623 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
6624 #endif
6625 }
6626 static always_inline void gen_evsplatfi (DisasContext *ctx)
6627 {
6628 uint64_t imm = rA(ctx->opcode) << 11;
6629
6630 #if defined(TARGET_PPC64)
6631 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
6632 #else
6633 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
6634 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
6635 #endif
6636 }
6637
6638 static always_inline void gen_evsel (DisasContext *ctx)
6639 {
6640 int l1 = gen_new_label();
6641 int l2 = gen_new_label();
6642 int l3 = gen_new_label();
6643 int l4 = gen_new_label();
6644 TCGv_i32 t0 = tcg_temp_local_new_i32();
6645 #if defined(TARGET_PPC64)
6646 TCGv t1 = tcg_temp_local_new();
6647 TCGv t2 = tcg_temp_local_new();
6648 #endif
6649 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
6650 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
6651 #if defined(TARGET_PPC64)
6652 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL);
6653 #else
6654 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6655 #endif
6656 tcg_gen_br(l2);
6657 gen_set_label(l1);
6658 #if defined(TARGET_PPC64)
6659 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL);
6660 #else
6661 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6662 #endif
6663 gen_set_label(l2);
6664 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2);
6665 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3);
6666 #if defined(TARGET_PPC64)
6667 tcg_gen_andi_tl(t2, cpu_gpr[rA(ctx->opcode)], 0x00000000FFFFFFFFULL);
6668 #else
6669 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6670 #endif
6671 tcg_gen_br(l4);
6672 gen_set_label(l3);
6673 #if defined(TARGET_PPC64)
6674 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFULL);
6675 #else
6676 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6677 #endif
6678 gen_set_label(l4);
6679 tcg_temp_free_i32(t0);
6680 #if defined(TARGET_PPC64)
6681 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
6682 tcg_temp_free(t1);
6683 tcg_temp_free(t2);
6684 #endif
6685 }
6686 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE)
6687 {
6688 gen_evsel(ctx);
6689 }
6690 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE)
6691 {
6692 gen_evsel(ctx);
6693 }
6694 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE)
6695 {
6696 gen_evsel(ctx);
6697 }
6698 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE)
6699 {
6700 gen_evsel(ctx);
6701 }
6702
6703 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); ////
6704 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE);
6705 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); ////
6706 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE);
6707 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); ////
6708 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); ////
6709 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); ////
6710 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); //
6711 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); ////
6712 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); ////
6713 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); ////
6714 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); ////
6715 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); ////
6716 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); ////
6717 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); ////
6718 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE);
6719 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); ////
6720 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE);
6721 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); //
6722 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE);
6723 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); ////
6724 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); ////
6725 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
6726 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
6727 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
6728
6729 /* SPE load and stores */
6730 static always_inline void gen_addr_spe_imm_index (TCGv EA, DisasContext *ctx, int sh)
6731 {
6732 target_ulong uimm = rB(ctx->opcode);
6733
6734 if (rA(ctx->opcode) == 0)
6735 tcg_gen_movi_tl(EA, uimm << sh);
6736 else
6737 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh);
6738 }
6739
6740 static always_inline void gen_op_evldd(DisasContext *ctx, TCGv addr)
6741 {
6742 #if defined(TARGET_PPC64)
6743 gen_qemu_ld64(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6744 #else
6745 TCGv_i64 t0 = tcg_temp_new_i64();
6746 gen_qemu_ld64(t0, addr, ctx->mem_idx);
6747 tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0);
6748 tcg_gen_shri_i64(t0, t0, 32);
6749 tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0);
6750 tcg_temp_free_i64(t0);
6751 #endif
6752 }
6753
6754 static always_inline void gen_op_evldw(DisasContext *ctx, TCGv addr)
6755 {
6756 #if defined(TARGET_PPC64)
6757 TCGv t0 = tcg_temp_new();
6758 gen_qemu_ld32u(t0, addr, ctx->mem_idx);
6759 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
6760 tcg_gen_addi_tl(addr, addr, 4);
6761 gen_qemu_ld32u(t0, addr, ctx->mem_idx);
6762 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6763 tcg_temp_free(t0);
6764 #else
6765 gen_qemu_ld32u(cpu_gprh[rD(ctx->opcode)], addr, ctx->mem_idx);
6766 tcg_gen_addi_tl(addr, addr, 4);
6767 gen_qemu_ld32u(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6768 #endif
6769 }
6770
6771 static always_inline void gen_op_evldh(DisasContext *ctx, TCGv addr)
6772 {
6773 TCGv t0 = tcg_temp_new();
6774 #if defined(TARGET_PPC64)
6775 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6776 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
6777 tcg_gen_addi_tl(addr, addr, 2);
6778 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6779 tcg_gen_shli_tl(t0, t0, 32);
6780 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6781 tcg_gen_addi_tl(addr, addr, 2);
6782 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6783 tcg_gen_shli_tl(t0, t0, 16);
6784 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6785 tcg_gen_addi_tl(addr, addr, 2);
6786 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6787 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6788 #else
6789 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6790 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
6791 tcg_gen_addi_tl(addr, addr, 2);
6792 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6793 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
6794 tcg_gen_addi_tl(addr, addr, 2);
6795 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6796 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
6797 tcg_gen_addi_tl(addr, addr, 2);
6798 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6799 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6800 #endif
6801 tcg_temp_free(t0);
6802 }
6803
6804 static always_inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr)
6805 {
6806 TCGv t0 = tcg_temp_new();
6807 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6808 #if defined(TARGET_PPC64)
6809 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
6810 tcg_gen_shli_tl(t0, t0, 16);
6811 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6812 #else
6813 tcg_gen_shli_tl(t0, t0, 16);
6814 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
6815 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6816 #endif
6817 tcg_temp_free(t0);
6818 }
6819
6820 static always_inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr)
6821 {
6822 TCGv t0 = tcg_temp_new();
6823 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6824 #if defined(TARGET_PPC64)
6825 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
6826 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6827 #else
6828 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
6829 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6830 #endif
6831 tcg_temp_free(t0);
6832 }
6833
6834 static always_inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr)
6835 {
6836 TCGv t0 = tcg_temp_new();
6837 gen_qemu_ld16s(t0, addr, ctx->mem_idx);
6838 #if defined(TARGET_PPC64)
6839 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
6840 tcg_gen_ext32u_tl(t0, t0);
6841 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6842 #else
6843 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
6844 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6845 #endif
6846 tcg_temp_free(t0);
6847 }
6848
6849 static always_inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr)
6850 {
6851 TCGv t0 = tcg_temp_new();
6852 #if defined(TARGET_PPC64)
6853 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6854 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
6855 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6856 tcg_gen_shli_tl(t0, t0, 16);
6857 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6858 #else
6859 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6860 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
6861 tcg_gen_addi_tl(addr, addr, 2);
6862 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6863 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
6864 #endif
6865 tcg_temp_free(t0);
6866 }
6867
6868 static always_inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr)
6869 {
6870 #if defined(TARGET_PPC64)
6871 TCGv t0 = tcg_temp_new();
6872 gen_qemu_ld16u(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6873 tcg_gen_addi_tl(addr, addr, 2);
6874 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6875 tcg_gen_shli_tl(t0, t0, 32);
6876 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6877 tcg_temp_free(t0);
6878 #else
6879 gen_qemu_ld16u(cpu_gprh[rD(ctx->opcode)], addr, ctx->mem_idx);
6880 tcg_gen_addi_tl(addr, addr, 2);
6881 gen_qemu_ld16u(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6882 #endif
6883 }
6884
6885 static always_inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr)
6886 {
6887 #if defined(TARGET_PPC64)
6888 TCGv t0 = tcg_temp_new();
6889 gen_qemu_ld16s(t0, addr, ctx->mem_idx);
6890 tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0);
6891 tcg_gen_addi_tl(addr, addr, 2);
6892 gen_qemu_ld16s(t0, addr, ctx->mem_idx);
6893 tcg_gen_shli_tl(t0, t0, 32);
6894 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6895 tcg_temp_free(t0);
6896 #else
6897 gen_qemu_ld16s(cpu_gprh[rD(ctx->opcode)], addr, ctx->mem_idx);
6898 tcg_gen_addi_tl(addr, addr, 2);
6899 gen_qemu_ld16s(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6900 #endif
6901 }
6902
6903 static always_inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr)
6904 {
6905 TCGv t0 = tcg_temp_new();
6906 gen_qemu_ld32u(t0, addr, ctx->mem_idx);
6907 #if defined(TARGET_PPC64)
6908 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
6909 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6910 #else
6911 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
6912 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6913 #endif
6914 tcg_temp_free(t0);
6915 }
6916
6917 static always_inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr)
6918 {
6919 TCGv t0 = tcg_temp_new();
6920 #if defined(TARGET_PPC64)
6921 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6922 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
6923 tcg_gen_shli_tl(t0, t0, 32);
6924 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6925 tcg_gen_addi_tl(addr, addr, 2);
6926 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6927 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6928 tcg_gen_shli_tl(t0, t0, 16);
6929 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6930 #else
6931 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6932 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
6933 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
6934 tcg_gen_addi_tl(addr, addr, 2);
6935 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6936 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
6937 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
6938 #endif
6939 tcg_temp_free(t0);
6940 }
6941
6942 static always_inline void gen_op_evstdd(DisasContext *ctx, TCGv addr)
6943 {
6944 #if defined(TARGET_PPC64)
6945 gen_qemu_st64(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
6946 #else
6947 TCGv_i64 t0 = tcg_temp_new_i64();
6948 tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]);
6949 gen_qemu_st64(t0, addr, ctx->mem_idx);
6950 tcg_temp_free_i64(t0);
6951 #endif
6952 }
6953
6954 static always_inline void gen_op_evstdw(DisasContext *ctx, TCGv addr)
6955 {
6956 #if defined(TARGET_PPC64)
6957 TCGv t0 = tcg_temp_new();
6958 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
6959 gen_qemu_st32(t0, addr, ctx->mem_idx);
6960 tcg_temp_free(t0);
6961 #else
6962 gen_qemu_st32(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx);
6963 #endif
6964 tcg_gen_addi_tl(addr, addr, 4);
6965 gen_qemu_st32(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
6966 }
6967
6968 static always_inline void gen_op_evstdh(DisasContext *ctx, TCGv addr)
6969 {
6970 TCGv t0 = tcg_temp_new();
6971 #if defined(TARGET_PPC64)
6972 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
6973 #else
6974 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
6975 #endif
6976 gen_qemu_st16(t0, addr, ctx->mem_idx);
6977 tcg_gen_addi_tl(addr, addr, 2);
6978 #if defined(TARGET_PPC64)
6979 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
6980 gen_qemu_st16(t0, addr, ctx->mem_idx);
6981 #else
6982 gen_qemu_st16(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx);
6983 #endif
6984 tcg_gen_addi_tl(addr, addr, 2);
6985 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
6986 gen_qemu_st16(t0, addr, ctx->mem_idx);
6987 tcg_temp_free(t0);
6988 tcg_gen_addi_tl(addr, addr, 2);
6989 gen_qemu_st16(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
6990 }
6991
6992 static always_inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr)
6993 {
6994 TCGv t0 = tcg_temp_new();
6995 #if defined(TARGET_PPC64)
6996 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
6997 #else
6998 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
6999 #endif
7000 gen_qemu_st16(t0, addr, ctx->mem_idx);
7001 tcg_gen_addi_tl(addr, addr, 2);
7002 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7003 gen_qemu_st16(t0, addr, ctx->mem_idx);
7004 tcg_temp_free(t0);
7005 }
7006
7007 static always_inline void gen_op_evstwho(DisasContext *ctx, TCGv addr)
7008 {
7009 #if defined(TARGET_PPC64)
7010 TCGv t0 = tcg_temp_new();
7011 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7012 gen_qemu_st16(t0, addr, ctx->mem_idx);
7013 tcg_temp_free(t0);
7014 #else
7015 gen_qemu_st16(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx);
7016 #endif
7017 tcg_gen_addi_tl(addr, addr, 2);
7018 gen_qemu_st16(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
7019 }
7020
7021 static always_inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr)
7022 {
7023 #if defined(TARGET_PPC64)
7024 TCGv t0 = tcg_temp_new();
7025 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7026 gen_qemu_st32(t0, addr, ctx->mem_idx);
7027 tcg_temp_free(t0);
7028 #else
7029 gen_qemu_st32(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx);
7030 #endif
7031 }
7032
7033 static always_inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr)
7034 {
7035 gen_qemu_st32(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
7036 }
7037
7038 #define GEN_SPEOP_LDST(name, opc2, sh) \
7039 GEN_HANDLER(gen_##name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE) \
7040 { \
7041 TCGv t0; \
7042 if (unlikely(!ctx->spe_enabled)) { \
7043 GEN_EXCP_NO_AP(ctx); \
7044 return; \
7045 } \
7046 t0 = tcg_temp_new(); \
7047 if (Rc(ctx->opcode)) { \
7048 gen_addr_spe_imm_index(t0, ctx, sh); \
7049 } else { \
7050 gen_addr_reg_index(t0, ctx); \
7051 } \
7052 gen_op_##name(ctx, t0); \
7053 tcg_temp_free(t0); \
7054 }
7055
7056 GEN_SPEOP_LDST(evldd, 0x00, 3);
7057 GEN_SPEOP_LDST(evldw, 0x01, 3);
7058 GEN_SPEOP_LDST(evldh, 0x02, 3);
7059 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1);
7060 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1);
7061 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1);
7062 GEN_SPEOP_LDST(evlwhe, 0x08, 2);
7063 GEN_SPEOP_LDST(evlwhou, 0x0A, 2);
7064 GEN_SPEOP_LDST(evlwhos, 0x0B, 2);
7065 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2);
7066 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2);
7067
7068 GEN_SPEOP_LDST(evstdd, 0x10, 3);
7069 GEN_SPEOP_LDST(evstdw, 0x11, 3);
7070 GEN_SPEOP_LDST(evstdh, 0x12, 3);
7071 GEN_SPEOP_LDST(evstwhe, 0x18, 2);
7072 GEN_SPEOP_LDST(evstwho, 0x1A, 2);
7073 GEN_SPEOP_LDST(evstwwe, 0x1C, 2);
7074 GEN_SPEOP_LDST(evstwwo, 0x1E, 2);
7075
7076 /* Multiply and add - TODO */
7077 #if 0
7078 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE);
7079 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE);
7080 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE);
7081 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE);
7082 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE);
7083 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE);
7084 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE);
7085 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE);
7086 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE);
7087 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE);
7088 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE);
7089 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE);
7090
7091 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE);
7092 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE);
7093 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE);
7094 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE);
7095 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE);
7096 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE);
7097 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE);
7098 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE);
7099 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE);
7100 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE);
7101 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE);
7102 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE);
7103 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE);
7104 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE);
7105
7106 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE);
7107 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE);
7108 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE);
7109 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE);
7110 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE);
7111 GEN_SPE(evmra, speundef, 0x07, 0x13, 0x0000F800, PPC_SPE);
7112
7113 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE);
7114 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE);
7115 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE);
7116 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE);
7117 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE);
7118 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE);
7119 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE);
7120 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE);
7121 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE);
7122 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE);
7123 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE);
7124 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE);
7125
7126 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE);
7127 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE);
7128 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE);
7129 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE);
7130 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE);
7131
7132 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE);
7133 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE);
7134 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE);
7135 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE);
7136 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE);
7137 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE);
7138 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE);
7139 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE);
7140 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE);
7141 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE);
7142 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE);
7143 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE);
7144
7145 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE);
7146 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE);
7147 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE);
7148 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE);
7149 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
7150 #endif
7151
7152 /*** SPE floating-point extension ***/
7153 #if defined(TARGET_PPC64)
7154 #define GEN_SPEFPUOP_CONV_32_32(name) \
7155 static always_inline void gen_##name (DisasContext *ctx) \
7156 { \
7157 TCGv_i32 t0; \
7158 TCGv t1; \
7159 t0 = tcg_temp_new_i32(); \
7160 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7161 gen_helper_##name(t0, t0); \
7162 t1 = tcg_temp_new(); \
7163 tcg_gen_extu_i32_tl(t1, t0); \
7164 tcg_temp_free_i32(t0); \
7165 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7166 0xFFFFFFFF00000000ULL); \
7167 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7168 tcg_temp_free(t1); \
7169 }
7170 #define GEN_SPEFPUOP_CONV_32_64(name) \
7171 static always_inline void gen_##name (DisasContext *ctx) \
7172 { \
7173 TCGv_i32 t0; \
7174 TCGv t1; \
7175 t0 = tcg_temp_new_i32(); \
7176 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7177 t1 = tcg_temp_new(); \
7178 tcg_gen_extu_i32_tl(t1, t0); \
7179 tcg_temp_free_i32(t0); \
7180 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7181 0xFFFFFFFF00000000ULL); \
7182 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7183 tcg_temp_free(t1); \
7184 }
7185 #define GEN_SPEFPUOP_CONV_64_32(name) \
7186 static always_inline void gen_##name (DisasContext *ctx) \
7187 { \
7188 TCGv_i32 t0 = tcg_temp_new_i32(); \
7189 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7190 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7191 tcg_temp_free_i32(t0); \
7192 }
7193 #define GEN_SPEFPUOP_CONV_64_64(name) \
7194 static always_inline void gen_##name (DisasContext *ctx) \
7195 { \
7196 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7197 }
7198 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
7199 static always_inline void gen_##name (DisasContext *ctx) \
7200 { \
7201 TCGv_i32 t0, t1; \
7202 TCGv_i64 t2; \
7203 if (unlikely(!ctx->spe_enabled)) { \
7204 GEN_EXCP_NO_AP(ctx); \
7205 return; \
7206 } \
7207 t0 = tcg_temp_new_i32(); \
7208 t1 = tcg_temp_new_i32(); \
7209 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7210 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7211 gen_helper_##name(t0, t0, t1); \
7212 tcg_temp_free_i32(t1); \
7213 t2 = tcg_temp_new(); \
7214 tcg_gen_extu_i32_tl(t2, t0); \
7215 tcg_temp_free_i32(t0); \
7216 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7217 0xFFFFFFFF00000000ULL); \
7218 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \
7219 tcg_temp_free(t2); \
7220 }
7221 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
7222 static always_inline void gen_##name (DisasContext *ctx) \
7223 { \
7224 if (unlikely(!ctx->spe_enabled)) { \
7225 GEN_EXCP_NO_AP(ctx); \
7226 return; \
7227 } \
7228 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7229 cpu_gpr[rB(ctx->opcode)]); \
7230 }
7231 #define GEN_SPEFPUOP_COMP_32(name) \
7232 static always_inline void gen_##name (DisasContext *ctx) \
7233 { \
7234 TCGv_i32 t0, t1; \
7235 if (unlikely(!ctx->spe_enabled)) { \
7236 GEN_EXCP_NO_AP(ctx); \
7237 return; \
7238 } \
7239 t0 = tcg_temp_new_i32(); \
7240 t1 = tcg_temp_new_i32(); \
7241 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7242 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7243 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
7244 tcg_temp_free_i32(t0); \
7245 tcg_temp_free_i32(t1); \
7246 }
7247 #define GEN_SPEFPUOP_COMP_64(name) \
7248 static always_inline void gen_##name (DisasContext *ctx) \
7249 { \
7250 if (unlikely(!ctx->spe_enabled)) { \
7251 GEN_EXCP_NO_AP(ctx); \
7252 return; \
7253 } \
7254 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
7255 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7256 }
7257 #else
7258 #define GEN_SPEFPUOP_CONV_32_32(name) \
7259 static always_inline void gen_##name (DisasContext *ctx) \
7260 { \
7261 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7262 }
7263 #define GEN_SPEFPUOP_CONV_32_64(name) \
7264 static always_inline void gen_##name (DisasContext *ctx) \
7265 { \
7266 TCGv_i64 t0 = tcg_temp_new_i64(); \
7267 gen_load_gpr64(t0, rB(ctx->opcode)); \
7268 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7269 tcg_temp_free_i64(t0); \
7270 }
7271 #define GEN_SPEFPUOP_CONV_64_32(name) \
7272 static always_inline void gen_##name (DisasContext *ctx) \
7273 { \
7274 TCGv_i64 t0 = tcg_temp_new_i64(); \
7275 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7276 gen_store_gpr64(rD(ctx->opcode), t0); \
7277 tcg_temp_free_i64(t0); \
7278 }
7279 #define GEN_SPEFPUOP_CONV_64_64(name) \
7280 static always_inline void gen_##name (DisasContext *ctx) \
7281 { \
7282 TCGv_i64 t0 = tcg_temp_new_i64(); \
7283 gen_load_gpr64(t0, rB(ctx->opcode)); \
7284 gen_helper_##name(t0, t0); \
7285 gen_store_gpr64(rD(ctx->opcode), t0); \
7286 tcg_temp_free_i64(t0); \
7287 }
7288 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
7289 static always_inline void gen_##name (DisasContext *ctx) \
7290 { \
7291 if (unlikely(!ctx->spe_enabled)) { \
7292 GEN_EXCP_NO_AP(ctx); \
7293 return; \
7294 } \
7295 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], \
7296 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7297 }
7298 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
7299 static always_inline void gen_##name (DisasContext *ctx) \
7300 { \
7301 TCGv_i64 t0, t1; \
7302 if (unlikely(!ctx->spe_enabled)) { \
7303 GEN_EXCP_NO_AP(ctx); \
7304 return; \
7305 } \
7306 t0 = tcg_temp_new_i64(); \
7307 t1 = tcg_temp_new_i64(); \
7308 gen_load_gpr64(t0, rA(ctx->opcode)); \
7309 gen_load_gpr64(t1, rB(ctx->opcode)); \
7310 gen_helper_##name(t0, t0, t1); \
7311 gen_store_gpr64(rD(ctx->opcode), t0); \
7312 tcg_temp_free_i64(t0); \
7313 tcg_temp_free_i64(t1); \
7314 }
7315 #define GEN_SPEFPUOP_COMP_32(name) \
7316 static always_inline void gen_##name (DisasContext *ctx) \
7317 { \
7318 if (unlikely(!ctx->spe_enabled)) { \
7319 GEN_EXCP_NO_AP(ctx); \
7320 return; \
7321 } \
7322 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
7323 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7324 }
7325 #define GEN_SPEFPUOP_COMP_64(name) \
7326 static always_inline void gen_##name (DisasContext *ctx) \
7327 { \
7328 TCGv_i64 t0, t1; \
7329 if (unlikely(!ctx->spe_enabled)) { \
7330 GEN_EXCP_NO_AP(ctx); \
7331 return; \
7332 } \
7333 t0 = tcg_temp_new_i64(); \
7334 t1 = tcg_temp_new_i64(); \
7335 gen_load_gpr64(t0, rA(ctx->opcode)); \
7336 gen_load_gpr64(t1, rB(ctx->opcode)); \
7337 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
7338 tcg_temp_free_i64(t0); \
7339 tcg_temp_free_i64(t1); \
7340 }
7341 #endif
7342
7343 /* Single precision floating-point vectors operations */
7344 /* Arithmetic */
7345 GEN_SPEFPUOP_ARITH2_64_64(evfsadd);
7346 GEN_SPEFPUOP_ARITH2_64_64(evfssub);
7347 GEN_SPEFPUOP_ARITH2_64_64(evfsmul);
7348 GEN_SPEFPUOP_ARITH2_64_64(evfsdiv);
7349 static always_inline void gen_evfsabs (DisasContext *ctx)
7350 {
7351 if (unlikely(!ctx->spe_enabled)) {
7352 GEN_EXCP_NO_AP(ctx);
7353 return;
7354 }
7355 #if defined(TARGET_PPC64)
7356 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL);
7357 #else
7358 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000);
7359 tcg_gen_andi_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
7360 #endif
7361 }
7362 static always_inline void gen_evfsnabs (DisasContext *ctx)
7363 {
7364 if (unlikely(!ctx->spe_enabled)) {
7365 GEN_EXCP_NO_AP(ctx);
7366 return;
7367 }
7368 #if defined(TARGET_PPC64)
7369 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
7370 #else
7371 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
7372 tcg_gen_ori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
7373 #endif
7374 }
7375 static always_inline void gen_evfsneg (DisasContext *ctx)
7376 {
7377 if (unlikely(!ctx->spe_enabled)) {
7378 GEN_EXCP_NO_AP(ctx);
7379 return;
7380 }
7381 #if defined(TARGET_PPC64)
7382 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
7383 #else
7384 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
7385 tcg_gen_xori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
7386 #endif
7387 }
7388
7389 /* Conversion */
7390 GEN_SPEFPUOP_CONV_64_64(evfscfui);
7391 GEN_SPEFPUOP_CONV_64_64(evfscfsi);
7392 GEN_SPEFPUOP_CONV_64_64(evfscfuf);
7393 GEN_SPEFPUOP_CONV_64_64(evfscfsf);
7394 GEN_SPEFPUOP_CONV_64_64(evfsctui);
7395 GEN_SPEFPUOP_CONV_64_64(evfsctsi);
7396 GEN_SPEFPUOP_CONV_64_64(evfsctuf);
7397 GEN_SPEFPUOP_CONV_64_64(evfsctsf);
7398 GEN_SPEFPUOP_CONV_64_64(evfsctuiz);
7399 GEN_SPEFPUOP_CONV_64_64(evfsctsiz);
7400
7401 /* Comparison */
7402 GEN_SPEFPUOP_COMP_64(evfscmpgt);
7403 GEN_SPEFPUOP_COMP_64(evfscmplt);
7404 GEN_SPEFPUOP_COMP_64(evfscmpeq);
7405 GEN_SPEFPUOP_COMP_64(evfststgt);
7406 GEN_SPEFPUOP_COMP_64(evfststlt);
7407 GEN_SPEFPUOP_COMP_64(evfststeq);
7408
7409 /* Opcodes definitions */
7410 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPEFPU); //
7411 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPEFPU); //
7412 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPEFPU); //
7413 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPEFPU); //
7414 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPEFPU); //
7415 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPEFPU); //
7416 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPEFPU); //
7417 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPEFPU); //
7418 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPEFPU); //
7419 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPEFPU); //
7420 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPEFPU); //
7421 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPEFPU); //
7422 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPEFPU); //
7423 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPEFPU); //
7424
7425 /* Single precision floating-point operations */
7426 /* Arithmetic */
7427 GEN_SPEFPUOP_ARITH2_32_32(efsadd);
7428 GEN_SPEFPUOP_ARITH2_32_32(efssub);
7429 GEN_SPEFPUOP_ARITH2_32_32(efsmul);
7430 GEN_SPEFPUOP_ARITH2_32_32(efsdiv);
7431 static always_inline void gen_efsabs (DisasContext *ctx)
7432 {
7433 if (unlikely(!ctx->spe_enabled)) {
7434 GEN_EXCP_NO_AP(ctx);
7435 return;
7436 }
7437 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL);
7438 }
7439 static always_inline void gen_efsnabs (DisasContext *ctx)
7440 {
7441 if (unlikely(!ctx->spe_enabled)) {
7442 GEN_EXCP_NO_AP(ctx);
7443 return;
7444 }
7445 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
7446 }
7447 static always_inline void gen_efsneg (DisasContext *ctx)
7448 {
7449 if (unlikely(!ctx->spe_enabled)) {
7450 GEN_EXCP_NO_AP(ctx);
7451 return;
7452 }
7453 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
7454 }
7455
7456 /* Conversion */
7457 GEN_SPEFPUOP_CONV_32_32(efscfui);
7458 GEN_SPEFPUOP_CONV_32_32(efscfsi);
7459 GEN_SPEFPUOP_CONV_32_32(efscfuf);
7460 GEN_SPEFPUOP_CONV_32_32(efscfsf);
7461 GEN_SPEFPUOP_CONV_32_32(efsctui);
7462 GEN_SPEFPUOP_CONV_32_32(efsctsi);
7463 GEN_SPEFPUOP_CONV_32_32(efsctuf);
7464 GEN_SPEFPUOP_CONV_32_32(efsctsf);
7465 GEN_SPEFPUOP_CONV_32_32(efsctuiz);
7466 GEN_SPEFPUOP_CONV_32_32(efsctsiz);
7467 GEN_SPEFPUOP_CONV_32_64(efscfd);
7468
7469 /* Comparison */
7470 GEN_SPEFPUOP_COMP_32(efscmpgt);
7471 GEN_SPEFPUOP_COMP_32(efscmplt);
7472 GEN_SPEFPUOP_COMP_32(efscmpeq);
7473 GEN_SPEFPUOP_COMP_32(efststgt);
7474 GEN_SPEFPUOP_COMP_32(efststlt);
7475 GEN_SPEFPUOP_COMP_32(efststeq);
7476
7477 /* Opcodes definitions */
7478 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPEFPU); //
7479 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPEFPU); //
7480 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPEFPU); //
7481 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPEFPU); //
7482 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPEFPU); //
7483 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPEFPU); //
7484 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPEFPU); //
7485 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPEFPU); //
7486 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPEFPU); //
7487 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPEFPU); //
7488 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPEFPU); //
7489 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPEFPU); //
7490 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPEFPU); //
7491 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPEFPU); //
7492
7493 /* Double precision floating-point operations */
7494 /* Arithmetic */
7495 GEN_SPEFPUOP_ARITH2_64_64(efdadd);
7496 GEN_SPEFPUOP_ARITH2_64_64(efdsub);
7497 GEN_SPEFPUOP_ARITH2_64_64(efdmul);
7498 GEN_SPEFPUOP_ARITH2_64_64(efddiv);
7499 static always_inline void gen_efdabs (DisasContext *ctx)
7500 {
7501 if (unlikely(!ctx->spe_enabled)) {
7502 GEN_EXCP_NO_AP(ctx);
7503 return;
7504 }
7505 #if defined(TARGET_PPC64)
7506 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL);
7507 #else
7508 tcg_gen_andi_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
7509 #endif
7510 }
7511 static always_inline void gen_efdnabs (DisasContext *ctx)
7512 {
7513 if (unlikely(!ctx->spe_enabled)) {
7514 GEN_EXCP_NO_AP(ctx);
7515 return;
7516 }
7517 #if defined(TARGET_PPC64)
7518 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
7519 #else
7520 tcg_gen_ori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
7521 #endif
7522 }
7523 static always_inline void gen_efdneg (DisasContext *ctx)
7524 {
7525 if (unlikely(!ctx->spe_enabled)) {
7526 GEN_EXCP_NO_AP(ctx);
7527 return;
7528 }
7529 #if defined(TARGET_PPC64)
7530 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
7531 #else
7532 tcg_gen_xori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
7533 #endif
7534 }
7535
7536 /* Conversion */
7537 GEN_SPEFPUOP_CONV_64_32(efdcfui);
7538 GEN_SPEFPUOP_CONV_64_32(efdcfsi);
7539 GEN_SPEFPUOP_CONV_64_32(efdcfuf);
7540 GEN_SPEFPUOP_CONV_64_32(efdcfsf);
7541 GEN_SPEFPUOP_CONV_32_64(efdctui);
7542 GEN_SPEFPUOP_CONV_32_64(efdctsi);
7543 GEN_SPEFPUOP_CONV_32_64(efdctuf);
7544 GEN_SPEFPUOP_CONV_32_64(efdctsf);
7545 GEN_SPEFPUOP_CONV_32_64(efdctuiz);
7546 GEN_SPEFPUOP_CONV_32_64(efdctsiz);
7547 GEN_SPEFPUOP_CONV_64_32(efdcfs);
7548 GEN_SPEFPUOP_CONV_64_64(efdcfuid);
7549 GEN_SPEFPUOP_CONV_64_64(efdcfsid);
7550 GEN_SPEFPUOP_CONV_64_64(efdctuidz);
7551 GEN_SPEFPUOP_CONV_64_64(efdctsidz);
7552
7553 /* Comparison */
7554 GEN_SPEFPUOP_COMP_64(efdcmpgt);
7555 GEN_SPEFPUOP_COMP_64(efdcmplt);
7556 GEN_SPEFPUOP_COMP_64(efdcmpeq);
7557 GEN_SPEFPUOP_COMP_64(efdtstgt);
7558 GEN_SPEFPUOP_COMP_64(efdtstlt);
7559 GEN_SPEFPUOP_COMP_64(efdtsteq);
7560
7561 /* Opcodes definitions */
7562 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPEFPU); //
7563 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPEFPU); //
7564 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPEFPU); //
7565 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPEFPU); //
7566 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPEFPU); //
7567 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPEFPU); //
7568 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPEFPU); //
7569 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPEFPU); //
7570 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPEFPU); //
7571 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPEFPU); //
7572 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPEFPU); //
7573 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPEFPU); //
7574 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPEFPU); //
7575 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPEFPU); //
7576 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPEFPU); //
7577 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPEFPU); //
7578
7579 /* End opcode list */
7580 GEN_OPCODE_MARK(end);
7581
7582 #include "translate_init.c"
7583 #include "helper_regs.h"
7584
7585 /*****************************************************************************/
7586 /* Misc PowerPC helpers */
7587 void cpu_dump_state (CPUState *env, FILE *f,
7588 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
7589 int flags)
7590 {
7591 #define RGPL 4
7592 #define RFPL 4
7593
7594 int i;
7595
7596 cpu_fprintf(f, "NIP " ADDRX " LR " ADDRX " CTR " ADDRX " XER %08x\n",
7597 env->nip, env->lr, env->ctr, env->xer);
7598 cpu_fprintf(f, "MSR " ADDRX " HID0 " ADDRX " HF " ADDRX " idx %d\n",
7599 env->msr, env->spr[SPR_HID0], env->hflags, env->mmu_idx);
7600 #if !defined(NO_TIMER_DUMP)
7601 cpu_fprintf(f, "TB %08x %08x "
7602 #if !defined(CONFIG_USER_ONLY)
7603 "DECR %08x"
7604 #endif
7605 "\n",
7606 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
7607 #if !defined(CONFIG_USER_ONLY)
7608 , cpu_ppc_load_decr(env)
7609 #endif
7610 );
7611 #endif
7612 for (i = 0; i < 32; i++) {
7613 if ((i & (RGPL - 1)) == 0)
7614 cpu_fprintf(f, "GPR%02d", i);
7615 cpu_fprintf(f, " " REGX, ppc_dump_gpr(env, i));
7616 if ((i & (RGPL - 1)) == (RGPL - 1))
7617 cpu_fprintf(f, "\n");
7618 }
7619 cpu_fprintf(f, "CR ");
7620 for (i = 0; i < 8; i++)
7621 cpu_fprintf(f, "%01x", env->crf[i]);
7622 cpu_fprintf(f, " [");
7623 for (i = 0; i < 8; i++) {
7624 char a = '-';
7625 if (env->crf[i] & 0x08)
7626 a = 'L';
7627 else if (env->crf[i] & 0x04)
7628 a = 'G';
7629 else if (env->crf[i] & 0x02)
7630 a = 'E';
7631 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
7632 }
7633 cpu_fprintf(f, " ] RES " ADDRX "\n", env->reserve);
7634 for (i = 0; i < 32; i++) {
7635 if ((i & (RFPL - 1)) == 0)
7636 cpu_fprintf(f, "FPR%02d", i);
7637 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
7638 if ((i & (RFPL - 1)) == (RFPL - 1))
7639 cpu_fprintf(f, "\n");
7640 }
7641 #if !defined(CONFIG_USER_ONLY)
7642 cpu_fprintf(f, "SRR0 " ADDRX " SRR1 " ADDRX " SDR1 " ADDRX "\n",
7643 env->spr[SPR_SRR0], env->spr[SPR_SRR1], env->sdr1);
7644 #endif
7645
7646 #undef RGPL
7647 #undef RFPL
7648 }
7649
7650 void cpu_dump_statistics (CPUState *env, FILE*f,
7651 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
7652 int flags)
7653 {
7654 #if defined(DO_PPC_STATISTICS)
7655 opc_handler_t **t1, **t2, **t3, *handler;
7656 int op1, op2, op3;
7657
7658 t1 = env->opcodes;
7659 for (op1 = 0; op1 < 64; op1++) {
7660 handler = t1[op1];
7661 if (is_indirect_opcode(handler)) {
7662 t2 = ind_table(handler);
7663 for (op2 = 0; op2 < 32; op2++) {
7664 handler = t2[op2];
7665 if (is_indirect_opcode(handler)) {
7666 t3 = ind_table(handler);
7667 for (op3 = 0; op3 < 32; op3++) {
7668 handler = t3[op3];
7669 if (handler->count == 0)
7670 continue;
7671 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
7672 "%016llx %lld\n",
7673 op1, op2, op3, op1, (op3 << 5) | op2,
7674 handler->oname,
7675 handler->count, handler->count);
7676 }
7677 } else {
7678 if (handler->count == 0)
7679 continue;
7680 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
7681 "%016llx %lld\n",
7682 op1, op2, op1, op2, handler->oname,
7683 handler->count, handler->count);
7684 }
7685 }
7686 } else {
7687 if (handler->count == 0)
7688 continue;
7689 cpu_fprintf(f, "%02x (%02x ) %16s: %016llx %lld\n",
7690 op1, op1, handler->oname,
7691 handler->count, handler->count);
7692 }
7693 }
7694 #endif
7695 }
7696
7697 /*****************************************************************************/
7698 static always_inline void gen_intermediate_code_internal (CPUState *env,
7699 TranslationBlock *tb,
7700 int search_pc)
7701 {
7702 DisasContext ctx, *ctxp = &ctx;
7703 opc_handler_t **table, *handler;
7704 target_ulong pc_start;
7705 uint16_t *gen_opc_end;
7706 int supervisor, little_endian;
7707 CPUBreakpoint *bp;
7708 int j, lj = -1;
7709 int num_insns;
7710 int max_insns;
7711
7712 pc_start = tb->pc;
7713 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7714 #if defined(OPTIMIZE_FPRF_UPDATE)
7715 gen_fprf_ptr = gen_fprf_buf;
7716 #endif
7717 ctx.nip = pc_start;
7718 ctx.tb = tb;
7719 ctx.exception = POWERPC_EXCP_NONE;
7720 ctx.spr_cb = env->spr_cb;
7721 supervisor = env->mmu_idx;
7722 #if !defined(CONFIG_USER_ONLY)
7723 ctx.supervisor = supervisor;
7724 #endif
7725 little_endian = env->hflags & (1 << MSR_LE) ? 1 : 0;
7726 #if defined(TARGET_PPC64)
7727 ctx.sf_mode = msr_sf;
7728 ctx.mem_idx = (supervisor << 2) | (msr_sf << 1) | little_endian;
7729 #else
7730 ctx.mem_idx = (supervisor << 1) | little_endian;
7731 #endif
7732 ctx.fpu_enabled = msr_fp;
7733 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
7734 ctx.spe_enabled = msr_spe;
7735 else
7736 ctx.spe_enabled = 0;
7737 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
7738 ctx.altivec_enabled = msr_vr;
7739 else
7740 ctx.altivec_enabled = 0;
7741 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
7742 ctx.singlestep_enabled = CPU_SINGLE_STEP;
7743 else
7744 ctx.singlestep_enabled = 0;
7745 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
7746 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
7747 if (unlikely(env->singlestep_enabled))
7748 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
7749 #if defined (DO_SINGLE_STEP) && 0
7750 /* Single step trace mode */
7751 msr_se = 1;
7752 #endif
7753 num_insns = 0;
7754 max_insns = tb->cflags & CF_COUNT_MASK;
7755 if (max_insns == 0)
7756 max_insns = CF_COUNT_MASK;
7757
7758 gen_icount_start();
7759 /* Set env in case of segfault during code fetch */
7760 while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) {
7761 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
7762 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
7763 if (bp->pc == ctx.nip) {
7764 gen_update_nip(&ctx, ctx.nip);
7765 gen_helper_raise_debug();
7766 break;
7767 }
7768 }
7769 }
7770 if (unlikely(search_pc)) {
7771 j = gen_opc_ptr - gen_opc_buf;
7772 if (lj < j) {
7773 lj++;
7774 while (lj < j)
7775 gen_opc_instr_start[lj++] = 0;
7776 gen_opc_pc[lj] = ctx.nip;
7777 gen_opc_instr_start[lj] = 1;
7778 gen_opc_icount[lj] = num_insns;
7779 }
7780 }
7781 #if defined PPC_DEBUG_DISAS
7782 if (loglevel & CPU_LOG_TB_IN_ASM) {
7783 fprintf(logfile, "----------------\n");
7784 fprintf(logfile, "nip=" ADDRX " super=%d ir=%d\n",
7785 ctx.nip, supervisor, (int)msr_ir);
7786 }
7787 #endif
7788 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
7789 gen_io_start();
7790 if (unlikely(little_endian)) {
7791 ctx.opcode = bswap32(ldl_code(ctx.nip));
7792 } else {
7793 ctx.opcode = ldl_code(ctx.nip);
7794 }
7795 #if defined PPC_DEBUG_DISAS
7796 if (loglevel & CPU_LOG_TB_IN_ASM) {
7797 fprintf(logfile, "translate opcode %08x (%02x %02x %02x) (%s)\n",
7798 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
7799 opc3(ctx.opcode), little_endian ? "little" : "big");
7800 }
7801 #endif
7802 ctx.nip += 4;
7803 table = env->opcodes;
7804 num_insns++;
7805 handler = table[opc1(ctx.opcode)];
7806 if (is_indirect_opcode(handler)) {
7807 table = ind_table(handler);
7808 handler = table[opc2(ctx.opcode)];
7809 if (is_indirect_opcode(handler)) {
7810 table = ind_table(handler);
7811 handler = table[opc3(ctx.opcode)];
7812 }
7813 }
7814 /* Is opcode *REALLY* valid ? */
7815 if (unlikely(handler->handler == &gen_invalid)) {
7816 if (loglevel != 0) {
7817 fprintf(logfile, "invalid/unsupported opcode: "
7818 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
7819 opc1(ctx.opcode), opc2(ctx.opcode),
7820 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
7821 } else {
7822 printf("invalid/unsupported opcode: "
7823 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
7824 opc1(ctx.opcode), opc2(ctx.opcode),
7825 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
7826 }
7827 } else {
7828 if (unlikely((ctx.opcode & handler->inval) != 0)) {
7829 if (loglevel != 0) {
7830 fprintf(logfile, "invalid bits: %08x for opcode: "
7831 "%02x - %02x - %02x (%08x) " ADDRX "\n",
7832 ctx.opcode & handler->inval, opc1(ctx.opcode),
7833 opc2(ctx.opcode), opc3(ctx.opcode),
7834 ctx.opcode, ctx.nip - 4);
7835 } else {
7836 printf("invalid bits: %08x for opcode: "
7837 "%02x - %02x - %02x (%08x) " ADDRX "\n",
7838 ctx.opcode & handler->inval, opc1(ctx.opcode),
7839 opc2(ctx.opcode), opc3(ctx.opcode),
7840 ctx.opcode, ctx.nip - 4);
7841 }
7842 GEN_EXCP_INVAL(ctxp);
7843 break;
7844 }
7845 }
7846 (*(handler->handler))(&ctx);
7847 #if defined(DO_PPC_STATISTICS)
7848 handler->count++;
7849 #endif
7850 /* Check trace mode exceptions */
7851 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
7852 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
7853 ctx.exception != POWERPC_SYSCALL &&
7854 ctx.exception != POWERPC_EXCP_TRAP &&
7855 ctx.exception != POWERPC_EXCP_BRANCH)) {
7856 GEN_EXCP(ctxp, POWERPC_EXCP_TRACE, 0);
7857 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
7858 (env->singlestep_enabled) ||
7859 num_insns >= max_insns)) {
7860 /* if we reach a page boundary or are single stepping, stop
7861 * generation
7862 */
7863 break;
7864 }
7865 #if defined (DO_SINGLE_STEP)
7866 break;
7867 #endif
7868 }
7869 if (tb->cflags & CF_LAST_IO)
7870 gen_io_end();
7871 if (ctx.exception == POWERPC_EXCP_NONE) {
7872 gen_goto_tb(&ctx, 0, ctx.nip);
7873 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
7874 if (unlikely(env->singlestep_enabled)) {
7875 gen_update_nip(&ctx, ctx.nip);
7876 gen_helper_raise_debug();
7877 }
7878 /* Generate the return instruction */
7879 tcg_gen_exit_tb(0);
7880 }
7881 gen_icount_end(tb, num_insns);
7882 *gen_opc_ptr = INDEX_op_end;
7883 if (unlikely(search_pc)) {
7884 j = gen_opc_ptr - gen_opc_buf;
7885 lj++;
7886 while (lj <= j)
7887 gen_opc_instr_start[lj++] = 0;
7888 } else {
7889 tb->size = ctx.nip - pc_start;
7890 tb->icount = num_insns;
7891 }
7892 #if defined(DEBUG_DISAS)
7893 if (loglevel & CPU_LOG_TB_CPU) {
7894 fprintf(logfile, "---------------- excp: %04x\n", ctx.exception);
7895 cpu_dump_state(env, logfile, fprintf, 0);
7896 }
7897 if (loglevel & CPU_LOG_TB_IN_ASM) {
7898 int flags;
7899 flags = env->bfd_mach;
7900 flags |= little_endian << 16;
7901 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7902 target_disas(logfile, pc_start, ctx.nip - pc_start, flags);
7903 fprintf(logfile, "\n");
7904 }
7905 #endif
7906 }
7907
7908 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
7909 {
7910 gen_intermediate_code_internal(env, tb, 0);
7911 }
7912
7913 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
7914 {
7915 gen_intermediate_code_internal(env, tb, 1);
7916 }
7917
7918 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7919 unsigned long searched_pc, int pc_pos, void *puc)
7920 {
7921 env->nip = gen_opc_pc[pc_pos];
7922 }