]>
git.proxmox.com Git - mirror_qemu.git/blob - target/ppc/translate/vsx-impl.inc.c
1 /*** VSX extension ***/
3 static inline void get_vsr(TCGv_i64 dst
, int n
)
5 tcg_gen_ld_i64(dst
, cpu_env
, offsetof(CPUPPCState
, vsr
[n
].u64
[1]));
8 static inline void set_vsr(int n
, TCGv_i64 src
)
10 tcg_gen_st_i64(src
, cpu_env
, offsetof(CPUPPCState
, vsr
[n
].u64
[1]));
13 static inline void get_cpu_vsrh(TCGv_i64 dst
, int n
)
18 get_avr64(dst
, n
- 32, true);
22 static inline void get_cpu_vsrl(TCGv_i64 dst
, int n
)
27 get_avr64(dst
, n
- 32, false);
31 static inline void set_cpu_vsrh(int n
, TCGv_i64 src
)
36 set_avr64(n
- 32, src
, true);
40 static inline void set_cpu_vsrl(int n
, TCGv_i64 src
)
45 set_avr64(n
- 32, src
, false);
49 #define VSX_LOAD_SCALAR(name, operation) \
50 static void gen_##name(DisasContext *ctx) \
54 if (unlikely(!ctx->vsx_enabled)) { \
55 gen_exception(ctx, POWERPC_EXCP_VSXU); \
58 t0 = tcg_temp_new_i64(); \
59 gen_set_access_type(ctx, ACCESS_INT); \
60 EA = tcg_temp_new(); \
61 gen_addr_reg_index(ctx, EA); \
62 gen_qemu_##operation(ctx, t0, EA); \
63 set_cpu_vsrh(xT(ctx->opcode), t0); \
64 /* NOTE: cpu_vsrl is undefined */ \
66 tcg_temp_free_i64(t0); \
69 VSX_LOAD_SCALAR(lxsdx
, ld64_i64
)
70 VSX_LOAD_SCALAR(lxsiwax
, ld32s_i64
)
71 VSX_LOAD_SCALAR(lxsibzx
, ld8u_i64
)
72 VSX_LOAD_SCALAR(lxsihzx
, ld16u_i64
)
73 VSX_LOAD_SCALAR(lxsiwzx
, ld32u_i64
)
74 VSX_LOAD_SCALAR(lxsspx
, ld32fs
)
76 static void gen_lxvd2x(DisasContext
*ctx
)
80 if (unlikely(!ctx
->vsx_enabled
)) {
81 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
84 t0
= tcg_temp_new_i64();
85 gen_set_access_type(ctx
, ACCESS_INT
);
87 gen_addr_reg_index(ctx
, EA
);
88 gen_qemu_ld64_i64(ctx
, t0
, EA
);
89 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
90 tcg_gen_addi_tl(EA
, EA
, 8);
91 gen_qemu_ld64_i64(ctx
, t0
, EA
);
92 set_cpu_vsrl(xT(ctx
->opcode
), t0
);
94 tcg_temp_free_i64(t0
);
97 static void gen_lxvdsx(DisasContext
*ctx
)
102 if (unlikely(!ctx
->vsx_enabled
)) {
103 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
106 t0
= tcg_temp_new_i64();
107 t1
= tcg_temp_new_i64();
108 gen_set_access_type(ctx
, ACCESS_INT
);
110 gen_addr_reg_index(ctx
, EA
);
111 gen_qemu_ld64_i64(ctx
, t0
, EA
);
112 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
113 tcg_gen_mov_i64(t1
, t0
);
114 set_cpu_vsrl(xT(ctx
->opcode
), t1
);
116 tcg_temp_free_i64(t0
);
117 tcg_temp_free_i64(t1
);
120 static void gen_lxvw4x(DisasContext
*ctx
)
125 if (unlikely(!ctx
->vsx_enabled
)) {
126 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
129 xth
= tcg_temp_new_i64();
130 xtl
= tcg_temp_new_i64();
131 get_cpu_vsrh(xth
, xT(ctx
->opcode
));
132 get_cpu_vsrl(xtl
, xT(ctx
->opcode
));
133 gen_set_access_type(ctx
, ACCESS_INT
);
136 gen_addr_reg_index(ctx
, EA
);
138 TCGv_i64 t0
= tcg_temp_new_i64();
139 TCGv_i64 t1
= tcg_temp_new_i64();
141 tcg_gen_qemu_ld_i64(t0
, EA
, ctx
->mem_idx
, MO_LEQ
);
142 tcg_gen_shri_i64(t1
, t0
, 32);
143 tcg_gen_deposit_i64(xth
, t1
, t0
, 32, 32);
144 tcg_gen_addi_tl(EA
, EA
, 8);
145 tcg_gen_qemu_ld_i64(t0
, EA
, ctx
->mem_idx
, MO_LEQ
);
146 tcg_gen_shri_i64(t1
, t0
, 32);
147 tcg_gen_deposit_i64(xtl
, t1
, t0
, 32, 32);
148 tcg_temp_free_i64(t0
);
149 tcg_temp_free_i64(t1
);
151 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
152 tcg_gen_addi_tl(EA
, EA
, 8);
153 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
156 tcg_temp_free_i64(xth
);
157 tcg_temp_free_i64(xtl
);
160 static void gen_bswap16x8(TCGv_i64 outh
, TCGv_i64 outl
,
161 TCGv_i64 inh
, TCGv_i64 inl
)
163 TCGv_i64 mask
= tcg_const_i64(0x00FF00FF00FF00FF);
164 TCGv_i64 t0
= tcg_temp_new_i64();
165 TCGv_i64 t1
= tcg_temp_new_i64();
167 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
168 tcg_gen_and_i64(t0
, inh
, mask
);
169 tcg_gen_shli_i64(t0
, t0
, 8);
170 tcg_gen_shri_i64(t1
, inh
, 8);
171 tcg_gen_and_i64(t1
, t1
, mask
);
172 tcg_gen_or_i64(outh
, t0
, t1
);
174 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
175 tcg_gen_and_i64(t0
, inl
, mask
);
176 tcg_gen_shli_i64(t0
, t0
, 8);
177 tcg_gen_shri_i64(t1
, inl
, 8);
178 tcg_gen_and_i64(t1
, t1
, mask
);
179 tcg_gen_or_i64(outl
, t0
, t1
);
181 tcg_temp_free_i64(t0
);
182 tcg_temp_free_i64(t1
);
183 tcg_temp_free_i64(mask
);
186 static void gen_bswap32x4(TCGv_i64 outh
, TCGv_i64 outl
,
187 TCGv_i64 inh
, TCGv_i64 inl
)
189 TCGv_i64 hi
= tcg_temp_new_i64();
190 TCGv_i64 lo
= tcg_temp_new_i64();
192 tcg_gen_bswap64_i64(hi
, inh
);
193 tcg_gen_bswap64_i64(lo
, inl
);
194 tcg_gen_shri_i64(outh
, hi
, 32);
195 tcg_gen_deposit_i64(outh
, outh
, hi
, 32, 32);
196 tcg_gen_shri_i64(outl
, lo
, 32);
197 tcg_gen_deposit_i64(outl
, outl
, lo
, 32, 32);
199 tcg_temp_free_i64(hi
);
200 tcg_temp_free_i64(lo
);
202 static void gen_lxvh8x(DisasContext
*ctx
)
208 if (unlikely(!ctx
->vsx_enabled
)) {
209 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
212 xth
= tcg_temp_new_i64();
213 xtl
= tcg_temp_new_i64();
214 get_cpu_vsrh(xth
, xT(ctx
->opcode
));
215 get_cpu_vsrl(xtl
, xT(ctx
->opcode
));
216 gen_set_access_type(ctx
, ACCESS_INT
);
219 gen_addr_reg_index(ctx
, EA
);
220 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
221 tcg_gen_addi_tl(EA
, EA
, 8);
222 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
224 gen_bswap16x8(xth
, xtl
, xth
, xtl
);
227 tcg_temp_free_i64(xth
);
228 tcg_temp_free_i64(xtl
);
231 static void gen_lxvb16x(DisasContext
*ctx
)
237 if (unlikely(!ctx
->vsx_enabled
)) {
238 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
241 xth
= tcg_temp_new_i64();
242 xtl
= tcg_temp_new_i64();
243 get_cpu_vsrh(xth
, xT(ctx
->opcode
));
244 get_cpu_vsrl(xtl
, xT(ctx
->opcode
));
245 gen_set_access_type(ctx
, ACCESS_INT
);
247 gen_addr_reg_index(ctx
, EA
);
248 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
249 tcg_gen_addi_tl(EA
, EA
, 8);
250 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
252 tcg_temp_free_i64(xth
);
253 tcg_temp_free_i64(xtl
);
256 #define VSX_VECTOR_LOAD_STORE(name, op, indexed) \
257 static void gen_##name(DisasContext *ctx) \
265 xt = xT(ctx->opcode); \
267 xt = DQxT(ctx->opcode); \
271 if (unlikely(!ctx->vsx_enabled)) { \
272 gen_exception(ctx, POWERPC_EXCP_VSXU); \
276 if (unlikely(!ctx->altivec_enabled)) { \
277 gen_exception(ctx, POWERPC_EXCP_VPU); \
281 xth = tcg_temp_new_i64(); \
282 xtl = tcg_temp_new_i64(); \
283 get_cpu_vsrh(xth, xt); \
284 get_cpu_vsrl(xtl, xt); \
285 gen_set_access_type(ctx, ACCESS_INT); \
286 EA = tcg_temp_new(); \
288 gen_addr_reg_index(ctx, EA); \
290 gen_addr_imm_index(ctx, EA, 0x0F); \
292 if (ctx->le_mode) { \
293 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
294 set_cpu_vsrl(xt, xtl); \
295 tcg_gen_addi_tl(EA, EA, 8); \
296 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
297 set_cpu_vsrh(xt, xth); \
299 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
300 set_cpu_vsrh(xt, xth); \
301 tcg_gen_addi_tl(EA, EA, 8); \
302 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
303 set_cpu_vsrl(xt, xtl); \
306 tcg_temp_free_i64(xth); \
307 tcg_temp_free_i64(xtl); \
310 VSX_VECTOR_LOAD_STORE(lxv
, ld_i64
, 0)
311 VSX_VECTOR_LOAD_STORE(stxv
, st_i64
, 0)
312 VSX_VECTOR_LOAD_STORE(lxvx
, ld_i64
, 1)
313 VSX_VECTOR_LOAD_STORE(stxvx
, st_i64
, 1)
316 #define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
317 static void gen_##name(DisasContext *ctx) \
321 if (xT(ctx->opcode) < 32) { \
322 if (unlikely(!ctx->vsx_enabled)) { \
323 gen_exception(ctx, POWERPC_EXCP_VSXU); \
327 if (unlikely(!ctx->altivec_enabled)) { \
328 gen_exception(ctx, POWERPC_EXCP_VPU); \
332 EA = tcg_temp_new(); \
333 xt = tcg_const_tl(xT(ctx->opcode)); \
334 gen_set_access_type(ctx, ACCESS_INT); \
335 gen_addr_register(ctx, EA); \
336 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
341 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl
)
342 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll
)
343 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl
)
344 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll
)
347 #define VSX_LOAD_SCALAR_DS(name, operation) \
348 static void gen_##name(DisasContext *ctx) \
353 if (unlikely(!ctx->altivec_enabled)) { \
354 gen_exception(ctx, POWERPC_EXCP_VPU); \
357 xth = tcg_temp_new_i64(); \
358 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
359 gen_set_access_type(ctx, ACCESS_INT); \
360 EA = tcg_temp_new(); \
361 gen_addr_imm_index(ctx, EA, 0x03); \
362 gen_qemu_##operation(ctx, xth, EA); \
363 set_cpu_vsrh(rD(ctx->opcode) + 32, xth); \
364 /* NOTE: cpu_vsrl is undefined */ \
366 tcg_temp_free_i64(xth); \
369 VSX_LOAD_SCALAR_DS(lxsd
, ld64_i64
)
370 VSX_LOAD_SCALAR_DS(lxssp
, ld32fs
)
372 #define VSX_STORE_SCALAR(name, operation) \
373 static void gen_##name(DisasContext *ctx) \
377 if (unlikely(!ctx->vsx_enabled)) { \
378 gen_exception(ctx, POWERPC_EXCP_VSXU); \
381 t0 = tcg_temp_new_i64(); \
382 gen_set_access_type(ctx, ACCESS_INT); \
383 EA = tcg_temp_new(); \
384 gen_addr_reg_index(ctx, EA); \
385 gen_qemu_##operation(ctx, t0, EA); \
386 set_cpu_vsrh(xS(ctx->opcode), t0); \
388 tcg_temp_free_i64(t0); \
391 VSX_STORE_SCALAR(stxsdx
, st64_i64
)
393 VSX_STORE_SCALAR(stxsibx
, st8_i64
)
394 VSX_STORE_SCALAR(stxsihx
, st16_i64
)
395 VSX_STORE_SCALAR(stxsiwx
, st32_i64
)
396 VSX_STORE_SCALAR(stxsspx
, st32fs
)
398 static void gen_stxvd2x(DisasContext
*ctx
)
402 if (unlikely(!ctx
->vsx_enabled
)) {
403 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
406 t0
= tcg_temp_new_i64();
407 gen_set_access_type(ctx
, ACCESS_INT
);
409 gen_addr_reg_index(ctx
, EA
);
410 get_cpu_vsrh(t0
, xS(ctx
->opcode
));
411 gen_qemu_st64_i64(ctx
, t0
, EA
);
412 tcg_gen_addi_tl(EA
, EA
, 8);
413 get_cpu_vsrl(t0
, xS(ctx
->opcode
));
414 gen_qemu_st64_i64(ctx
, t0
, EA
);
416 tcg_temp_free_i64(t0
);
419 static void gen_stxvw4x(DisasContext
*ctx
)
425 if (unlikely(!ctx
->vsx_enabled
)) {
426 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
429 xsh
= tcg_temp_new_i64();
430 xsl
= tcg_temp_new_i64();
431 get_cpu_vsrh(xsh
, xS(ctx
->opcode
));
432 get_cpu_vsrl(xsl
, xS(ctx
->opcode
));
433 gen_set_access_type(ctx
, ACCESS_INT
);
435 gen_addr_reg_index(ctx
, EA
);
437 TCGv_i64 t0
= tcg_temp_new_i64();
438 TCGv_i64 t1
= tcg_temp_new_i64();
440 tcg_gen_shri_i64(t0
, xsh
, 32);
441 tcg_gen_deposit_i64(t1
, t0
, xsh
, 32, 32);
442 tcg_gen_qemu_st_i64(t1
, EA
, ctx
->mem_idx
, MO_LEQ
);
443 tcg_gen_addi_tl(EA
, EA
, 8);
444 tcg_gen_shri_i64(t0
, xsl
, 32);
445 tcg_gen_deposit_i64(t1
, t0
, xsl
, 32, 32);
446 tcg_gen_qemu_st_i64(t1
, EA
, ctx
->mem_idx
, MO_LEQ
);
447 tcg_temp_free_i64(t0
);
448 tcg_temp_free_i64(t1
);
450 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
451 tcg_gen_addi_tl(EA
, EA
, 8);
452 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
455 tcg_temp_free_i64(xsh
);
456 tcg_temp_free_i64(xsl
);
459 static void gen_stxvh8x(DisasContext
*ctx
)
465 if (unlikely(!ctx
->vsx_enabled
)) {
466 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
469 xsh
= tcg_temp_new_i64();
470 xsl
= tcg_temp_new_i64();
471 get_cpu_vsrh(xsh
, xS(ctx
->opcode
));
472 get_cpu_vsrl(xsl
, xS(ctx
->opcode
));
473 gen_set_access_type(ctx
, ACCESS_INT
);
475 gen_addr_reg_index(ctx
, EA
);
477 TCGv_i64 outh
= tcg_temp_new_i64();
478 TCGv_i64 outl
= tcg_temp_new_i64();
480 gen_bswap16x8(outh
, outl
, xsh
, xsl
);
481 tcg_gen_qemu_st_i64(outh
, EA
, ctx
->mem_idx
, MO_BEQ
);
482 tcg_gen_addi_tl(EA
, EA
, 8);
483 tcg_gen_qemu_st_i64(outl
, EA
, ctx
->mem_idx
, MO_BEQ
);
484 tcg_temp_free_i64(outh
);
485 tcg_temp_free_i64(outl
);
487 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
488 tcg_gen_addi_tl(EA
, EA
, 8);
489 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
492 tcg_temp_free_i64(xsh
);
493 tcg_temp_free_i64(xsl
);
496 static void gen_stxvb16x(DisasContext
*ctx
)
502 if (unlikely(!ctx
->vsx_enabled
)) {
503 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
506 xsh
= tcg_temp_new_i64();
507 xsl
= tcg_temp_new_i64();
508 get_cpu_vsrh(xsh
, xS(ctx
->opcode
));
509 get_cpu_vsrl(xsl
, xS(ctx
->opcode
));
510 gen_set_access_type(ctx
, ACCESS_INT
);
512 gen_addr_reg_index(ctx
, EA
);
513 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
514 tcg_gen_addi_tl(EA
, EA
, 8);
515 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
517 tcg_temp_free_i64(xsh
);
518 tcg_temp_free_i64(xsl
);
521 #define VSX_STORE_SCALAR_DS(name, operation) \
522 static void gen_##name(DisasContext *ctx) \
527 if (unlikely(!ctx->altivec_enabled)) { \
528 gen_exception(ctx, POWERPC_EXCP_VPU); \
531 xth = tcg_temp_new_i64(); \
532 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
533 gen_set_access_type(ctx, ACCESS_INT); \
534 EA = tcg_temp_new(); \
535 gen_addr_imm_index(ctx, EA, 0x03); \
536 gen_qemu_##operation(ctx, xth, EA); \
537 /* NOTE: cpu_vsrl is undefined */ \
539 tcg_temp_free_i64(xth); \
542 VSX_LOAD_SCALAR_DS(stxsd
, st64_i64
)
543 VSX_LOAD_SCALAR_DS(stxssp
, st32fs
)
545 static void gen_mfvsrwz(DisasContext
*ctx
)
547 if (xS(ctx
->opcode
) < 32) {
548 if (unlikely(!ctx
->fpu_enabled
)) {
549 gen_exception(ctx
, POWERPC_EXCP_FPU
);
553 if (unlikely(!ctx
->altivec_enabled
)) {
554 gen_exception(ctx
, POWERPC_EXCP_VPU
);
558 TCGv_i64 tmp
= tcg_temp_new_i64();
559 TCGv_i64 xsh
= tcg_temp_new_i64();
560 get_cpu_vsrh(xsh
, xS(ctx
->opcode
));
561 tcg_gen_ext32u_i64(tmp
, xsh
);
562 tcg_gen_trunc_i64_tl(cpu_gpr
[rA(ctx
->opcode
)], tmp
);
563 tcg_temp_free_i64(tmp
);
564 tcg_temp_free_i64(xsh
);
567 static void gen_mtvsrwa(DisasContext
*ctx
)
569 if (xS(ctx
->opcode
) < 32) {
570 if (unlikely(!ctx
->fpu_enabled
)) {
571 gen_exception(ctx
, POWERPC_EXCP_FPU
);
575 if (unlikely(!ctx
->altivec_enabled
)) {
576 gen_exception(ctx
, POWERPC_EXCP_VPU
);
580 TCGv_i64 tmp
= tcg_temp_new_i64();
581 TCGv_i64 xsh
= tcg_temp_new_i64();
582 tcg_gen_extu_tl_i64(tmp
, cpu_gpr
[rA(ctx
->opcode
)]);
583 tcg_gen_ext32s_i64(xsh
, tmp
);
584 set_cpu_vsrh(xT(ctx
->opcode
), xsh
);
585 tcg_temp_free_i64(tmp
);
586 tcg_temp_free_i64(xsh
);
589 static void gen_mtvsrwz(DisasContext
*ctx
)
591 if (xS(ctx
->opcode
) < 32) {
592 if (unlikely(!ctx
->fpu_enabled
)) {
593 gen_exception(ctx
, POWERPC_EXCP_FPU
);
597 if (unlikely(!ctx
->altivec_enabled
)) {
598 gen_exception(ctx
, POWERPC_EXCP_VPU
);
602 TCGv_i64 tmp
= tcg_temp_new_i64();
603 TCGv_i64 xsh
= tcg_temp_new_i64();
604 tcg_gen_extu_tl_i64(tmp
, cpu_gpr
[rA(ctx
->opcode
)]);
605 tcg_gen_ext32u_i64(xsh
, tmp
);
606 set_cpu_vsrh(xT(ctx
->opcode
), xsh
);
607 tcg_temp_free_i64(tmp
);
608 tcg_temp_free_i64(xsh
);
611 #if defined(TARGET_PPC64)
612 static void gen_mfvsrd(DisasContext
*ctx
)
615 if (xS(ctx
->opcode
) < 32) {
616 if (unlikely(!ctx
->fpu_enabled
)) {
617 gen_exception(ctx
, POWERPC_EXCP_FPU
);
621 if (unlikely(!ctx
->altivec_enabled
)) {
622 gen_exception(ctx
, POWERPC_EXCP_VPU
);
626 t0
= tcg_temp_new_i64();
627 get_cpu_vsrh(t0
, xS(ctx
->opcode
));
628 tcg_gen_mov_i64(cpu_gpr
[rA(ctx
->opcode
)], t0
);
629 tcg_temp_free_i64(t0
);
632 static void gen_mtvsrd(DisasContext
*ctx
)
635 if (xS(ctx
->opcode
) < 32) {
636 if (unlikely(!ctx
->fpu_enabled
)) {
637 gen_exception(ctx
, POWERPC_EXCP_FPU
);
641 if (unlikely(!ctx
->altivec_enabled
)) {
642 gen_exception(ctx
, POWERPC_EXCP_VPU
);
646 t0
= tcg_temp_new_i64();
647 tcg_gen_mov_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
648 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
649 tcg_temp_free_i64(t0
);
652 static void gen_mfvsrld(DisasContext
*ctx
)
655 if (xS(ctx
->opcode
) < 32) {
656 if (unlikely(!ctx
->vsx_enabled
)) {
657 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
661 if (unlikely(!ctx
->altivec_enabled
)) {
662 gen_exception(ctx
, POWERPC_EXCP_VPU
);
666 t0
= tcg_temp_new_i64();
667 get_cpu_vsrl(t0
, xS(ctx
->opcode
));
668 tcg_gen_mov_i64(cpu_gpr
[rA(ctx
->opcode
)], t0
);
669 tcg_temp_free_i64(t0
);
672 static void gen_mtvsrdd(DisasContext
*ctx
)
675 if (xT(ctx
->opcode
) < 32) {
676 if (unlikely(!ctx
->vsx_enabled
)) {
677 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
681 if (unlikely(!ctx
->altivec_enabled
)) {
682 gen_exception(ctx
, POWERPC_EXCP_VPU
);
687 t0
= tcg_temp_new_i64();
688 if (!rA(ctx
->opcode
)) {
689 tcg_gen_movi_i64(t0
, 0);
691 tcg_gen_mov_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
693 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
695 tcg_gen_mov_i64(t0
, cpu_gpr
[rB(ctx
->opcode
)]);
696 set_cpu_vsrl(xT(ctx
->opcode
), t0
);
697 tcg_temp_free_i64(t0
);
700 static void gen_mtvsrws(DisasContext
*ctx
)
703 if (xT(ctx
->opcode
) < 32) {
704 if (unlikely(!ctx
->vsx_enabled
)) {
705 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
709 if (unlikely(!ctx
->altivec_enabled
)) {
710 gen_exception(ctx
, POWERPC_EXCP_VPU
);
715 t0
= tcg_temp_new_i64();
716 tcg_gen_deposit_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)],
717 cpu_gpr
[rA(ctx
->opcode
)], 32, 32);
718 set_cpu_vsrl(xT(ctx
->opcode
), t0
);
719 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
720 tcg_temp_free_i64(t0
);
725 static void gen_xxpermdi(DisasContext
*ctx
)
729 if (unlikely(!ctx
->vsx_enabled
)) {
730 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
734 xh
= tcg_temp_new_i64();
735 xl
= tcg_temp_new_i64();
737 if (unlikely((xT(ctx
->opcode
) == xA(ctx
->opcode
)) ||
738 (xT(ctx
->opcode
) == xB(ctx
->opcode
)))) {
739 if ((DM(ctx
->opcode
) & 2) == 0) {
740 get_cpu_vsrh(xh
, xA(ctx
->opcode
));
742 get_cpu_vsrl(xh
, xA(ctx
->opcode
));
744 if ((DM(ctx
->opcode
) & 1) == 0) {
745 get_cpu_vsrh(xl
, xB(ctx
->opcode
));
747 get_cpu_vsrl(xl
, xB(ctx
->opcode
));
750 set_cpu_vsrh(xT(ctx
->opcode
), xh
);
751 set_cpu_vsrl(xT(ctx
->opcode
), xl
);
753 if ((DM(ctx
->opcode
) & 2) == 0) {
754 get_cpu_vsrh(xh
, xA(ctx
->opcode
));
755 set_cpu_vsrh(xT(ctx
->opcode
), xh
);
757 get_cpu_vsrl(xh
, xA(ctx
->opcode
));
758 set_cpu_vsrh(xT(ctx
->opcode
), xh
);
760 if ((DM(ctx
->opcode
) & 1) == 0) {
761 get_cpu_vsrh(xl
, xB(ctx
->opcode
));
762 set_cpu_vsrl(xT(ctx
->opcode
), xl
);
764 get_cpu_vsrl(xl
, xB(ctx
->opcode
));
765 set_cpu_vsrl(xT(ctx
->opcode
), xl
);
768 tcg_temp_free_i64(xh
);
769 tcg_temp_free_i64(xl
);
776 #define SGN_MASK_DP 0x8000000000000000ull
777 #define SGN_MASK_SP 0x8000000080000000ull
779 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
780 static void glue(gen_, name)(DisasContext * ctx) \
783 if (unlikely(!ctx->vsx_enabled)) { \
784 gen_exception(ctx, POWERPC_EXCP_VSXU); \
787 xb = tcg_temp_new_i64(); \
788 sgm = tcg_temp_new_i64(); \
789 get_cpu_vsrh(xb, xB(ctx->opcode)); \
790 tcg_gen_movi_i64(sgm, sgn_mask); \
793 tcg_gen_andc_i64(xb, xb, sgm); \
797 tcg_gen_or_i64(xb, xb, sgm); \
801 tcg_gen_xor_i64(xb, xb, sgm); \
805 TCGv_i64 xa = tcg_temp_new_i64(); \
806 get_cpu_vsrh(xa, xA(ctx->opcode)); \
807 tcg_gen_and_i64(xa, xa, sgm); \
808 tcg_gen_andc_i64(xb, xb, sgm); \
809 tcg_gen_or_i64(xb, xb, xa); \
810 tcg_temp_free_i64(xa); \
814 set_cpu_vsrh(xT(ctx->opcode), xb); \
815 tcg_temp_free_i64(xb); \
816 tcg_temp_free_i64(sgm); \
819 VSX_SCALAR_MOVE(xsabsdp
, OP_ABS
, SGN_MASK_DP
)
820 VSX_SCALAR_MOVE(xsnabsdp
, OP_NABS
, SGN_MASK_DP
)
821 VSX_SCALAR_MOVE(xsnegdp
, OP_NEG
, SGN_MASK_DP
)
822 VSX_SCALAR_MOVE(xscpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
824 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
825 static void glue(gen_, name)(DisasContext *ctx) \
828 int xt = rD(ctx->opcode) + 32; \
829 int xb = rB(ctx->opcode) + 32; \
830 TCGv_i64 xah, xbh, xbl, sgm, tmp; \
832 if (unlikely(!ctx->vsx_enabled)) { \
833 gen_exception(ctx, POWERPC_EXCP_VSXU); \
836 xbh = tcg_temp_new_i64(); \
837 xbl = tcg_temp_new_i64(); \
838 sgm = tcg_temp_new_i64(); \
839 tmp = tcg_temp_new_i64(); \
840 get_cpu_vsrh(xbh, xb); \
841 get_cpu_vsrl(xbl, xb); \
842 tcg_gen_movi_i64(sgm, sgn_mask); \
845 tcg_gen_andc_i64(xbh, xbh, sgm); \
848 tcg_gen_or_i64(xbh, xbh, sgm); \
851 tcg_gen_xor_i64(xbh, xbh, sgm); \
854 xah = tcg_temp_new_i64(); \
855 xa = rA(ctx->opcode) + 32; \
856 get_cpu_vsrh(tmp, xa); \
857 tcg_gen_and_i64(xah, tmp, sgm); \
858 tcg_gen_andc_i64(xbh, xbh, sgm); \
859 tcg_gen_or_i64(xbh, xbh, xah); \
860 tcg_temp_free_i64(xah); \
863 set_cpu_vsrh(xt, xbh); \
864 set_cpu_vsrl(xt, xbl); \
865 tcg_temp_free_i64(xbl); \
866 tcg_temp_free_i64(xbh); \
867 tcg_temp_free_i64(sgm); \
868 tcg_temp_free_i64(tmp); \
871 VSX_SCALAR_MOVE_QP(xsabsqp
, OP_ABS
, SGN_MASK_DP
)
872 VSX_SCALAR_MOVE_QP(xsnabsqp
, OP_NABS
, SGN_MASK_DP
)
873 VSX_SCALAR_MOVE_QP(xsnegqp
, OP_NEG
, SGN_MASK_DP
)
874 VSX_SCALAR_MOVE_QP(xscpsgnqp
, OP_CPSGN
, SGN_MASK_DP
)
876 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
877 static void glue(gen_, name)(DisasContext * ctx) \
879 TCGv_i64 xbh, xbl, sgm; \
880 if (unlikely(!ctx->vsx_enabled)) { \
881 gen_exception(ctx, POWERPC_EXCP_VSXU); \
884 xbh = tcg_temp_new_i64(); \
885 xbl = tcg_temp_new_i64(); \
886 sgm = tcg_temp_new_i64(); \
887 set_cpu_vsrh(xB(ctx->opcode), xbh); \
888 set_cpu_vsrl(xB(ctx->opcode), xbl); \
889 tcg_gen_movi_i64(sgm, sgn_mask); \
892 tcg_gen_andc_i64(xbh, xbh, sgm); \
893 tcg_gen_andc_i64(xbl, xbl, sgm); \
897 tcg_gen_or_i64(xbh, xbh, sgm); \
898 tcg_gen_or_i64(xbl, xbl, sgm); \
902 tcg_gen_xor_i64(xbh, xbh, sgm); \
903 tcg_gen_xor_i64(xbl, xbl, sgm); \
907 TCGv_i64 xah = tcg_temp_new_i64(); \
908 TCGv_i64 xal = tcg_temp_new_i64(); \
909 get_cpu_vsrh(xah, xA(ctx->opcode)); \
910 get_cpu_vsrl(xal, xA(ctx->opcode)); \
911 tcg_gen_and_i64(xah, xah, sgm); \
912 tcg_gen_and_i64(xal, xal, sgm); \
913 tcg_gen_andc_i64(xbh, xbh, sgm); \
914 tcg_gen_andc_i64(xbl, xbl, sgm); \
915 tcg_gen_or_i64(xbh, xbh, xah); \
916 tcg_gen_or_i64(xbl, xbl, xal); \
917 tcg_temp_free_i64(xah); \
918 tcg_temp_free_i64(xal); \
922 set_cpu_vsrh(xT(ctx->opcode), xbh); \
923 set_cpu_vsrl(xT(ctx->opcode), xbl); \
924 tcg_temp_free_i64(xbh); \
925 tcg_temp_free_i64(xbl); \
926 tcg_temp_free_i64(sgm); \
929 VSX_VECTOR_MOVE(xvabsdp
, OP_ABS
, SGN_MASK_DP
)
930 VSX_VECTOR_MOVE(xvnabsdp
, OP_NABS
, SGN_MASK_DP
)
931 VSX_VECTOR_MOVE(xvnegdp
, OP_NEG
, SGN_MASK_DP
)
932 VSX_VECTOR_MOVE(xvcpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
933 VSX_VECTOR_MOVE(xvabssp
, OP_ABS
, SGN_MASK_SP
)
934 VSX_VECTOR_MOVE(xvnabssp
, OP_NABS
, SGN_MASK_SP
)
935 VSX_VECTOR_MOVE(xvnegsp
, OP_NEG
, SGN_MASK_SP
)
936 VSX_VECTOR_MOVE(xvcpsgnsp
, OP_CPSGN
, SGN_MASK_SP
)
938 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
939 static void gen_##name(DisasContext * ctx) \
942 if (unlikely(!ctx->vsx_enabled)) { \
943 gen_exception(ctx, POWERPC_EXCP_VSXU); \
946 opc = tcg_const_i32(ctx->opcode); \
947 gen_helper_##name(cpu_env, opc); \
948 tcg_temp_free_i32(opc); \
951 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
952 static void gen_##name(DisasContext * ctx) \
956 if (unlikely(!ctx->vsx_enabled)) { \
957 gen_exception(ctx, POWERPC_EXCP_VSXU); \
960 t0 = tcg_temp_new_i64(); \
961 t1 = tcg_temp_new_i64(); \
962 get_cpu_vsrh(t0, xB(ctx->opcode)); \
963 gen_helper_##name(t1, cpu_env, t0); \
964 set_cpu_vsrh(xT(ctx->opcode), t1); \
965 tcg_temp_free_i64(t0); \
966 tcg_temp_free_i64(t1); \
969 GEN_VSX_HELPER_2(xsadddp
, 0x00, 0x04, 0, PPC2_VSX
)
970 GEN_VSX_HELPER_2(xsaddqp
, 0x04, 0x00, 0, PPC2_ISA300
)
971 GEN_VSX_HELPER_2(xssubdp
, 0x00, 0x05, 0, PPC2_VSX
)
972 GEN_VSX_HELPER_2(xsmuldp
, 0x00, 0x06, 0, PPC2_VSX
)
973 GEN_VSX_HELPER_2(xsmulqp
, 0x04, 0x01, 0, PPC2_ISA300
)
974 GEN_VSX_HELPER_2(xsdivdp
, 0x00, 0x07, 0, PPC2_VSX
)
975 GEN_VSX_HELPER_2(xsdivqp
, 0x04, 0x11, 0, PPC2_ISA300
)
976 GEN_VSX_HELPER_2(xsredp
, 0x14, 0x05, 0, PPC2_VSX
)
977 GEN_VSX_HELPER_2(xssqrtdp
, 0x16, 0x04, 0, PPC2_VSX
)
978 GEN_VSX_HELPER_2(xsrsqrtedp
, 0x14, 0x04, 0, PPC2_VSX
)
979 GEN_VSX_HELPER_2(xstdivdp
, 0x14, 0x07, 0, PPC2_VSX
)
980 GEN_VSX_HELPER_2(xstsqrtdp
, 0x14, 0x06, 0, PPC2_VSX
)
981 GEN_VSX_HELPER_2(xsmaddadp
, 0x04, 0x04, 0, PPC2_VSX
)
982 GEN_VSX_HELPER_2(xsmaddmdp
, 0x04, 0x05, 0, PPC2_VSX
)
983 GEN_VSX_HELPER_2(xsmsubadp
, 0x04, 0x06, 0, PPC2_VSX
)
984 GEN_VSX_HELPER_2(xsmsubmdp
, 0x04, 0x07, 0, PPC2_VSX
)
985 GEN_VSX_HELPER_2(xsnmaddadp
, 0x04, 0x14, 0, PPC2_VSX
)
986 GEN_VSX_HELPER_2(xsnmaddmdp
, 0x04, 0x15, 0, PPC2_VSX
)
987 GEN_VSX_HELPER_2(xsnmsubadp
, 0x04, 0x16, 0, PPC2_VSX
)
988 GEN_VSX_HELPER_2(xsnmsubmdp
, 0x04, 0x17, 0, PPC2_VSX
)
989 GEN_VSX_HELPER_2(xscmpeqdp
, 0x0C, 0x00, 0, PPC2_ISA300
)
990 GEN_VSX_HELPER_2(xscmpgtdp
, 0x0C, 0x01, 0, PPC2_ISA300
)
991 GEN_VSX_HELPER_2(xscmpgedp
, 0x0C, 0x02, 0, PPC2_ISA300
)
992 GEN_VSX_HELPER_2(xscmpnedp
, 0x0C, 0x03, 0, PPC2_ISA300
)
993 GEN_VSX_HELPER_2(xscmpexpdp
, 0x0C, 0x07, 0, PPC2_ISA300
)
994 GEN_VSX_HELPER_2(xscmpexpqp
, 0x04, 0x05, 0, PPC2_ISA300
)
995 GEN_VSX_HELPER_2(xscmpodp
, 0x0C, 0x05, 0, PPC2_VSX
)
996 GEN_VSX_HELPER_2(xscmpudp
, 0x0C, 0x04, 0, PPC2_VSX
)
997 GEN_VSX_HELPER_2(xscmpoqp
, 0x04, 0x04, 0, PPC2_VSX
)
998 GEN_VSX_HELPER_2(xscmpuqp
, 0x04, 0x14, 0, PPC2_VSX
)
999 GEN_VSX_HELPER_2(xsmaxdp
, 0x00, 0x14, 0, PPC2_VSX
)
1000 GEN_VSX_HELPER_2(xsmindp
, 0x00, 0x15, 0, PPC2_VSX
)
1001 GEN_VSX_HELPER_2(xsmaxcdp
, 0x00, 0x10, 0, PPC2_ISA300
)
1002 GEN_VSX_HELPER_2(xsmincdp
, 0x00, 0x11, 0, PPC2_ISA300
)
1003 GEN_VSX_HELPER_2(xsmaxjdp
, 0x00, 0x12, 0, PPC2_ISA300
)
1004 GEN_VSX_HELPER_2(xsminjdp
, 0x00, 0x12, 0, PPC2_ISA300
)
1005 GEN_VSX_HELPER_2(xscvdphp
, 0x16, 0x15, 0x11, PPC2_ISA300
)
1006 GEN_VSX_HELPER_2(xscvdpsp
, 0x12, 0x10, 0, PPC2_VSX
)
1007 GEN_VSX_HELPER_2(xscvdpqp
, 0x04, 0x1A, 0x16, PPC2_ISA300
)
1008 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn
, 0x16, 0x10, 0, PPC2_VSX207
)
1009 GEN_VSX_HELPER_2(xscvqpdp
, 0x04, 0x1A, 0x14, PPC2_ISA300
)
1010 GEN_VSX_HELPER_2(xscvqpsdz
, 0x04, 0x1A, 0x19, PPC2_ISA300
)
1011 GEN_VSX_HELPER_2(xscvqpswz
, 0x04, 0x1A, 0x09, PPC2_ISA300
)
1012 GEN_VSX_HELPER_2(xscvqpudz
, 0x04, 0x1A, 0x11, PPC2_ISA300
)
1013 GEN_VSX_HELPER_2(xscvqpuwz
, 0x04, 0x1A, 0x01, PPC2_ISA300
)
1014 GEN_VSX_HELPER_2(xscvhpdp
, 0x16, 0x15, 0x10, PPC2_ISA300
)
1015 GEN_VSX_HELPER_2(xscvsdqp
, 0x04, 0x1A, 0x0A, PPC2_ISA300
)
1016 GEN_VSX_HELPER_2(xscvspdp
, 0x12, 0x14, 0, PPC2_VSX
)
1017 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn
, 0x16, 0x14, 0, PPC2_VSX207
)
1018 GEN_VSX_HELPER_2(xscvdpsxds
, 0x10, 0x15, 0, PPC2_VSX
)
1019 GEN_VSX_HELPER_2(xscvdpsxws
, 0x10, 0x05, 0, PPC2_VSX
)
1020 GEN_VSX_HELPER_2(xscvdpuxds
, 0x10, 0x14, 0, PPC2_VSX
)
1021 GEN_VSX_HELPER_2(xscvdpuxws
, 0x10, 0x04, 0, PPC2_VSX
)
1022 GEN_VSX_HELPER_2(xscvsxddp
, 0x10, 0x17, 0, PPC2_VSX
)
1023 GEN_VSX_HELPER_2(xscvudqp
, 0x04, 0x1A, 0x02, PPC2_ISA300
)
1024 GEN_VSX_HELPER_2(xscvuxddp
, 0x10, 0x16, 0, PPC2_VSX
)
1025 GEN_VSX_HELPER_2(xsrdpi
, 0x12, 0x04, 0, PPC2_VSX
)
1026 GEN_VSX_HELPER_2(xsrdpic
, 0x16, 0x06, 0, PPC2_VSX
)
1027 GEN_VSX_HELPER_2(xsrdpim
, 0x12, 0x07, 0, PPC2_VSX
)
1028 GEN_VSX_HELPER_2(xsrdpip
, 0x12, 0x06, 0, PPC2_VSX
)
1029 GEN_VSX_HELPER_2(xsrdpiz
, 0x12, 0x05, 0, PPC2_VSX
)
1030 GEN_VSX_HELPER_XT_XB_ENV(xsrsp
, 0x12, 0x11, 0, PPC2_VSX207
)
1032 GEN_VSX_HELPER_2(xsrqpi
, 0x05, 0x00, 0, PPC2_ISA300
)
1033 GEN_VSX_HELPER_2(xsrqpxp
, 0x05, 0x01, 0, PPC2_ISA300
)
1034 GEN_VSX_HELPER_2(xssqrtqp
, 0x04, 0x19, 0x1B, PPC2_ISA300
)
1035 GEN_VSX_HELPER_2(xssubqp
, 0x04, 0x10, 0, PPC2_ISA300
)
1037 GEN_VSX_HELPER_2(xsaddsp
, 0x00, 0x00, 0, PPC2_VSX207
)
1038 GEN_VSX_HELPER_2(xssubsp
, 0x00, 0x01, 0, PPC2_VSX207
)
1039 GEN_VSX_HELPER_2(xsmulsp
, 0x00, 0x02, 0, PPC2_VSX207
)
1040 GEN_VSX_HELPER_2(xsdivsp
, 0x00, 0x03, 0, PPC2_VSX207
)
1041 GEN_VSX_HELPER_2(xsresp
, 0x14, 0x01, 0, PPC2_VSX207
)
1042 GEN_VSX_HELPER_2(xssqrtsp
, 0x16, 0x00, 0, PPC2_VSX207
)
1043 GEN_VSX_HELPER_2(xsrsqrtesp
, 0x14, 0x00, 0, PPC2_VSX207
)
1044 GEN_VSX_HELPER_2(xsmaddasp
, 0x04, 0x00, 0, PPC2_VSX207
)
1045 GEN_VSX_HELPER_2(xsmaddmsp
, 0x04, 0x01, 0, PPC2_VSX207
)
1046 GEN_VSX_HELPER_2(xsmsubasp
, 0x04, 0x02, 0, PPC2_VSX207
)
1047 GEN_VSX_HELPER_2(xsmsubmsp
, 0x04, 0x03, 0, PPC2_VSX207
)
1048 GEN_VSX_HELPER_2(xsnmaddasp
, 0x04, 0x10, 0, PPC2_VSX207
)
1049 GEN_VSX_HELPER_2(xsnmaddmsp
, 0x04, 0x11, 0, PPC2_VSX207
)
1050 GEN_VSX_HELPER_2(xsnmsubasp
, 0x04, 0x12, 0, PPC2_VSX207
)
1051 GEN_VSX_HELPER_2(xsnmsubmsp
, 0x04, 0x13, 0, PPC2_VSX207
)
1052 GEN_VSX_HELPER_2(xscvsxdsp
, 0x10, 0x13, 0, PPC2_VSX207
)
1053 GEN_VSX_HELPER_2(xscvuxdsp
, 0x10, 0x12, 0, PPC2_VSX207
)
1054 GEN_VSX_HELPER_2(xststdcsp
, 0x14, 0x12, 0, PPC2_ISA300
)
1055 GEN_VSX_HELPER_2(xststdcdp
, 0x14, 0x16, 0, PPC2_ISA300
)
1056 GEN_VSX_HELPER_2(xststdcqp
, 0x04, 0x16, 0, PPC2_ISA300
)
1058 GEN_VSX_HELPER_2(xvadddp
, 0x00, 0x0C, 0, PPC2_VSX
)
1059 GEN_VSX_HELPER_2(xvsubdp
, 0x00, 0x0D, 0, PPC2_VSX
)
1060 GEN_VSX_HELPER_2(xvmuldp
, 0x00, 0x0E, 0, PPC2_VSX
)
1061 GEN_VSX_HELPER_2(xvdivdp
, 0x00, 0x0F, 0, PPC2_VSX
)
1062 GEN_VSX_HELPER_2(xvredp
, 0x14, 0x0D, 0, PPC2_VSX
)
1063 GEN_VSX_HELPER_2(xvsqrtdp
, 0x16, 0x0C, 0, PPC2_VSX
)
1064 GEN_VSX_HELPER_2(xvrsqrtedp
, 0x14, 0x0C, 0, PPC2_VSX
)
1065 GEN_VSX_HELPER_2(xvtdivdp
, 0x14, 0x0F, 0, PPC2_VSX
)
1066 GEN_VSX_HELPER_2(xvtsqrtdp
, 0x14, 0x0E, 0, PPC2_VSX
)
1067 GEN_VSX_HELPER_2(xvmaddadp
, 0x04, 0x0C, 0, PPC2_VSX
)
1068 GEN_VSX_HELPER_2(xvmaddmdp
, 0x04, 0x0D, 0, PPC2_VSX
)
1069 GEN_VSX_HELPER_2(xvmsubadp
, 0x04, 0x0E, 0, PPC2_VSX
)
1070 GEN_VSX_HELPER_2(xvmsubmdp
, 0x04, 0x0F, 0, PPC2_VSX
)
1071 GEN_VSX_HELPER_2(xvnmaddadp
, 0x04, 0x1C, 0, PPC2_VSX
)
1072 GEN_VSX_HELPER_2(xvnmaddmdp
, 0x04, 0x1D, 0, PPC2_VSX
)
1073 GEN_VSX_HELPER_2(xvnmsubadp
, 0x04, 0x1E, 0, PPC2_VSX
)
1074 GEN_VSX_HELPER_2(xvnmsubmdp
, 0x04, 0x1F, 0, PPC2_VSX
)
1075 GEN_VSX_HELPER_2(xvmaxdp
, 0x00, 0x1C, 0, PPC2_VSX
)
1076 GEN_VSX_HELPER_2(xvmindp
, 0x00, 0x1D, 0, PPC2_VSX
)
1077 GEN_VSX_HELPER_2(xvcmpeqdp
, 0x0C, 0x0C, 0, PPC2_VSX
)
1078 GEN_VSX_HELPER_2(xvcmpgtdp
, 0x0C, 0x0D, 0, PPC2_VSX
)
1079 GEN_VSX_HELPER_2(xvcmpgedp
, 0x0C, 0x0E, 0, PPC2_VSX
)
1080 GEN_VSX_HELPER_2(xvcmpnedp
, 0x0C, 0x0F, 0, PPC2_ISA300
)
1081 GEN_VSX_HELPER_2(xvcvdpsp
, 0x12, 0x18, 0, PPC2_VSX
)
1082 GEN_VSX_HELPER_2(xvcvdpsxds
, 0x10, 0x1D, 0, PPC2_VSX
)
1083 GEN_VSX_HELPER_2(xvcvdpsxws
, 0x10, 0x0D, 0, PPC2_VSX
)
1084 GEN_VSX_HELPER_2(xvcvdpuxds
, 0x10, 0x1C, 0, PPC2_VSX
)
1085 GEN_VSX_HELPER_2(xvcvdpuxws
, 0x10, 0x0C, 0, PPC2_VSX
)
1086 GEN_VSX_HELPER_2(xvcvsxddp
, 0x10, 0x1F, 0, PPC2_VSX
)
1087 GEN_VSX_HELPER_2(xvcvuxddp
, 0x10, 0x1E, 0, PPC2_VSX
)
1088 GEN_VSX_HELPER_2(xvcvsxwdp
, 0x10, 0x0F, 0, PPC2_VSX
)
1089 GEN_VSX_HELPER_2(xvcvuxwdp
, 0x10, 0x0E, 0, PPC2_VSX
)
1090 GEN_VSX_HELPER_2(xvrdpi
, 0x12, 0x0C, 0, PPC2_VSX
)
1091 GEN_VSX_HELPER_2(xvrdpic
, 0x16, 0x0E, 0, PPC2_VSX
)
1092 GEN_VSX_HELPER_2(xvrdpim
, 0x12, 0x0F, 0, PPC2_VSX
)
1093 GEN_VSX_HELPER_2(xvrdpip
, 0x12, 0x0E, 0, PPC2_VSX
)
1094 GEN_VSX_HELPER_2(xvrdpiz
, 0x12, 0x0D, 0, PPC2_VSX
)
1096 GEN_VSX_HELPER_2(xvaddsp
, 0x00, 0x08, 0, PPC2_VSX
)
1097 GEN_VSX_HELPER_2(xvsubsp
, 0x00, 0x09, 0, PPC2_VSX
)
1098 GEN_VSX_HELPER_2(xvmulsp
, 0x00, 0x0A, 0, PPC2_VSX
)
1099 GEN_VSX_HELPER_2(xvdivsp
, 0x00, 0x0B, 0, PPC2_VSX
)
1100 GEN_VSX_HELPER_2(xvresp
, 0x14, 0x09, 0, PPC2_VSX
)
1101 GEN_VSX_HELPER_2(xvsqrtsp
, 0x16, 0x08, 0, PPC2_VSX
)
1102 GEN_VSX_HELPER_2(xvrsqrtesp
, 0x14, 0x08, 0, PPC2_VSX
)
1103 GEN_VSX_HELPER_2(xvtdivsp
, 0x14, 0x0B, 0, PPC2_VSX
)
1104 GEN_VSX_HELPER_2(xvtsqrtsp
, 0x14, 0x0A, 0, PPC2_VSX
)
1105 GEN_VSX_HELPER_2(xvmaddasp
, 0x04, 0x08, 0, PPC2_VSX
)
1106 GEN_VSX_HELPER_2(xvmaddmsp
, 0x04, 0x09, 0, PPC2_VSX
)
1107 GEN_VSX_HELPER_2(xvmsubasp
, 0x04, 0x0A, 0, PPC2_VSX
)
1108 GEN_VSX_HELPER_2(xvmsubmsp
, 0x04, 0x0B, 0, PPC2_VSX
)
1109 GEN_VSX_HELPER_2(xvnmaddasp
, 0x04, 0x18, 0, PPC2_VSX
)
1110 GEN_VSX_HELPER_2(xvnmaddmsp
, 0x04, 0x19, 0, PPC2_VSX
)
1111 GEN_VSX_HELPER_2(xvnmsubasp
, 0x04, 0x1A, 0, PPC2_VSX
)
1112 GEN_VSX_HELPER_2(xvnmsubmsp
, 0x04, 0x1B, 0, PPC2_VSX
)
1113 GEN_VSX_HELPER_2(xvmaxsp
, 0x00, 0x18, 0, PPC2_VSX
)
1114 GEN_VSX_HELPER_2(xvminsp
, 0x00, 0x19, 0, PPC2_VSX
)
1115 GEN_VSX_HELPER_2(xvcmpeqsp
, 0x0C, 0x08, 0, PPC2_VSX
)
1116 GEN_VSX_HELPER_2(xvcmpgtsp
, 0x0C, 0x09, 0, PPC2_VSX
)
1117 GEN_VSX_HELPER_2(xvcmpgesp
, 0x0C, 0x0A, 0, PPC2_VSX
)
1118 GEN_VSX_HELPER_2(xvcmpnesp
, 0x0C, 0x0B, 0, PPC2_VSX
)
1119 GEN_VSX_HELPER_2(xvcvspdp
, 0x12, 0x1C, 0, PPC2_VSX
)
1120 GEN_VSX_HELPER_2(xvcvhpsp
, 0x16, 0x1D, 0x18, PPC2_ISA300
)
1121 GEN_VSX_HELPER_2(xvcvsphp
, 0x16, 0x1D, 0x19, PPC2_ISA300
)
1122 GEN_VSX_HELPER_2(xvcvspsxds
, 0x10, 0x19, 0, PPC2_VSX
)
1123 GEN_VSX_HELPER_2(xvcvspsxws
, 0x10, 0x09, 0, PPC2_VSX
)
1124 GEN_VSX_HELPER_2(xvcvspuxds
, 0x10, 0x18, 0, PPC2_VSX
)
1125 GEN_VSX_HELPER_2(xvcvspuxws
, 0x10, 0x08, 0, PPC2_VSX
)
1126 GEN_VSX_HELPER_2(xvcvsxdsp
, 0x10, 0x1B, 0, PPC2_VSX
)
1127 GEN_VSX_HELPER_2(xvcvuxdsp
, 0x10, 0x1A, 0, PPC2_VSX
)
1128 GEN_VSX_HELPER_2(xvcvsxwsp
, 0x10, 0x0B, 0, PPC2_VSX
)
1129 GEN_VSX_HELPER_2(xvcvuxwsp
, 0x10, 0x0A, 0, PPC2_VSX
)
1130 GEN_VSX_HELPER_2(xvrspi
, 0x12, 0x08, 0, PPC2_VSX
)
1131 GEN_VSX_HELPER_2(xvrspic
, 0x16, 0x0A, 0, PPC2_VSX
)
1132 GEN_VSX_HELPER_2(xvrspim
, 0x12, 0x0B, 0, PPC2_VSX
)
1133 GEN_VSX_HELPER_2(xvrspip
, 0x12, 0x0A, 0, PPC2_VSX
)
1134 GEN_VSX_HELPER_2(xvrspiz
, 0x12, 0x09, 0, PPC2_VSX
)
1135 GEN_VSX_HELPER_2(xvtstdcsp
, 0x14, 0x1A, 0, PPC2_VSX
)
1136 GEN_VSX_HELPER_2(xvtstdcdp
, 0x14, 0x1E, 0, PPC2_VSX
)
1137 GEN_VSX_HELPER_2(xxperm
, 0x08, 0x03, 0, PPC2_ISA300
)
1138 GEN_VSX_HELPER_2(xxpermr
, 0x08, 0x07, 0, PPC2_ISA300
)
1140 static void gen_xxbrd(DisasContext
*ctx
)
1147 if (unlikely(!ctx
->vsx_enabled
)) {
1148 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1151 xth
= tcg_temp_new_i64();
1152 xtl
= tcg_temp_new_i64();
1153 xbh
= tcg_temp_new_i64();
1154 xbl
= tcg_temp_new_i64();
1155 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1156 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1158 tcg_gen_bswap64_i64(xth
, xbh
);
1159 tcg_gen_bswap64_i64(xtl
, xbl
);
1160 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1161 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1163 tcg_temp_free_i64(xth
);
1164 tcg_temp_free_i64(xtl
);
1165 tcg_temp_free_i64(xbh
);
1166 tcg_temp_free_i64(xbl
);
1169 static void gen_xxbrh(DisasContext
*ctx
)
1176 if (unlikely(!ctx
->vsx_enabled
)) {
1177 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1180 xth
= tcg_temp_new_i64();
1181 xtl
= tcg_temp_new_i64();
1182 xbh
= tcg_temp_new_i64();
1183 xbl
= tcg_temp_new_i64();
1184 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1185 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1187 gen_bswap16x8(xth
, xtl
, xbh
, xbl
);
1188 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1189 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1191 tcg_temp_free_i64(xth
);
1192 tcg_temp_free_i64(xtl
);
1193 tcg_temp_free_i64(xbh
);
1194 tcg_temp_free_i64(xbl
);
1197 static void gen_xxbrq(DisasContext
*ctx
)
1205 if (unlikely(!ctx
->vsx_enabled
)) {
1206 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1209 xth
= tcg_temp_new_i64();
1210 xtl
= tcg_temp_new_i64();
1211 xbh
= tcg_temp_new_i64();
1212 xbl
= tcg_temp_new_i64();
1213 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1214 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1215 t0
= tcg_temp_new_i64();
1217 tcg_gen_bswap64_i64(t0
, xbl
);
1218 tcg_gen_bswap64_i64(xtl
, xbh
);
1219 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1220 tcg_gen_mov_i64(xth
, t0
);
1221 set_cpu_vsrl(xT(ctx
->opcode
), xth
);
1223 tcg_temp_free_i64(t0
);
1224 tcg_temp_free_i64(xth
);
1225 tcg_temp_free_i64(xtl
);
1226 tcg_temp_free_i64(xbh
);
1227 tcg_temp_free_i64(xbl
);
1230 static void gen_xxbrw(DisasContext
*ctx
)
1237 if (unlikely(!ctx
->vsx_enabled
)) {
1238 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1241 xth
= tcg_temp_new_i64();
1242 xtl
= tcg_temp_new_i64();
1243 xbh
= tcg_temp_new_i64();
1244 xbl
= tcg_temp_new_i64();
1245 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1246 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1248 gen_bswap32x4(xth
, xtl
, xbh
, xbl
);
1249 set_cpu_vsrl(xT(ctx
->opcode
), xth
);
1250 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1252 tcg_temp_free_i64(xth
);
1253 tcg_temp_free_i64(xtl
);
1254 tcg_temp_free_i64(xbh
);
1255 tcg_temp_free_i64(xbl
);
1258 #define VSX_LOGICAL(name, tcg_op) \
1259 static void glue(gen_, name)(DisasContext * ctx) \
1264 if (unlikely(!ctx->vsx_enabled)) { \
1265 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1268 t0 = tcg_temp_new_i64(); \
1269 t1 = tcg_temp_new_i64(); \
1270 t2 = tcg_temp_new_i64(); \
1271 get_cpu_vsrh(t0, xA(ctx->opcode)); \
1272 get_cpu_vsrh(t1, xB(ctx->opcode)); \
1273 tcg_op(t2, t0, t1); \
1274 set_cpu_vsrh(xT(ctx->opcode), t2); \
1275 get_cpu_vsrl(t0, xA(ctx->opcode)); \
1276 get_cpu_vsrl(t1, xB(ctx->opcode)); \
1277 tcg_op(t2, t0, t1); \
1278 set_cpu_vsrl(xT(ctx->opcode), t2); \
1279 tcg_temp_free_i64(t0); \
1280 tcg_temp_free_i64(t1); \
1281 tcg_temp_free_i64(t2); \
1284 VSX_LOGICAL(xxland
, tcg_gen_and_i64
)
1285 VSX_LOGICAL(xxlandc
, tcg_gen_andc_i64
)
1286 VSX_LOGICAL(xxlor
, tcg_gen_or_i64
)
1287 VSX_LOGICAL(xxlxor
, tcg_gen_xor_i64
)
1288 VSX_LOGICAL(xxlnor
, tcg_gen_nor_i64
)
1289 VSX_LOGICAL(xxleqv
, tcg_gen_eqv_i64
)
1290 VSX_LOGICAL(xxlnand
, tcg_gen_nand_i64
)
1291 VSX_LOGICAL(xxlorc
, tcg_gen_orc_i64
)
1293 #define VSX_XXMRG(name, high) \
1294 static void glue(gen_, name)(DisasContext * ctx) \
1296 TCGv_i64 a0, a1, b0, b1, tmp; \
1297 if (unlikely(!ctx->vsx_enabled)) { \
1298 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1301 a0 = tcg_temp_new_i64(); \
1302 a1 = tcg_temp_new_i64(); \
1303 b0 = tcg_temp_new_i64(); \
1304 b1 = tcg_temp_new_i64(); \
1305 tmp = tcg_temp_new_i64(); \
1307 get_cpu_vsrh(a0, xA(ctx->opcode)); \
1308 get_cpu_vsrh(a1, xA(ctx->opcode)); \
1309 get_cpu_vsrh(b0, xB(ctx->opcode)); \
1310 get_cpu_vsrh(b1, xB(ctx->opcode)); \
1312 get_cpu_vsrl(a0, xA(ctx->opcode)); \
1313 get_cpu_vsrl(a1, xA(ctx->opcode)); \
1314 get_cpu_vsrl(b0, xB(ctx->opcode)); \
1315 get_cpu_vsrl(b1, xB(ctx->opcode)); \
1317 tcg_gen_shri_i64(a0, a0, 32); \
1318 tcg_gen_shri_i64(b0, b0, 32); \
1319 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \
1320 set_cpu_vsrh(xT(ctx->opcode), tmp); \
1321 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \
1322 set_cpu_vsrl(xT(ctx->opcode), tmp); \
1323 tcg_temp_free_i64(a0); \
1324 tcg_temp_free_i64(a1); \
1325 tcg_temp_free_i64(b0); \
1326 tcg_temp_free_i64(b1); \
1327 tcg_temp_free_i64(tmp); \
1330 VSX_XXMRG(xxmrghw
, 1)
1331 VSX_XXMRG(xxmrglw
, 0)
1333 static void gen_xxsel(DisasContext
* ctx
)
1335 TCGv_i64 a
, b
, c
, tmp
;
1336 if (unlikely(!ctx
->vsx_enabled
)) {
1337 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1340 a
= tcg_temp_new_i64();
1341 b
= tcg_temp_new_i64();
1342 c
= tcg_temp_new_i64();
1343 tmp
= tcg_temp_new_i64();
1345 get_cpu_vsrh(a
, xA(ctx
->opcode
));
1346 get_cpu_vsrh(b
, xB(ctx
->opcode
));
1347 get_cpu_vsrh(c
, xC(ctx
->opcode
));
1349 tcg_gen_and_i64(b
, b
, c
);
1350 tcg_gen_andc_i64(a
, a
, c
);
1351 tcg_gen_or_i64(tmp
, a
, b
);
1352 set_cpu_vsrh(xT(ctx
->opcode
), tmp
);
1354 get_cpu_vsrl(a
, xA(ctx
->opcode
));
1355 get_cpu_vsrl(b
, xB(ctx
->opcode
));
1356 get_cpu_vsrl(c
, xC(ctx
->opcode
));
1358 tcg_gen_and_i64(b
, b
, c
);
1359 tcg_gen_andc_i64(a
, a
, c
);
1360 tcg_gen_or_i64(tmp
, a
, b
);
1361 set_cpu_vsrl(xT(ctx
->opcode
), tmp
);
1363 tcg_temp_free_i64(a
);
1364 tcg_temp_free_i64(b
);
1365 tcg_temp_free_i64(c
);
1366 tcg_temp_free_i64(tmp
);
1369 static void gen_xxspltw(DisasContext
*ctx
)
1374 if (unlikely(!ctx
->vsx_enabled
)) {
1375 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1379 vsr
= tcg_temp_new_i64();
1380 if (UIM(ctx
->opcode
) & 2) {
1381 get_cpu_vsrl(vsr
, xB(ctx
->opcode
));
1383 get_cpu_vsrh(vsr
, xB(ctx
->opcode
));
1386 b
= tcg_temp_new_i64();
1387 b2
= tcg_temp_new_i64();
1389 if (UIM(ctx
->opcode
) & 1) {
1390 tcg_gen_ext32u_i64(b
, vsr
);
1392 tcg_gen_shri_i64(b
, vsr
, 32);
1395 tcg_gen_shli_i64(b2
, b
, 32);
1396 tcg_gen_or_i64(vsr
, b
, b2
);
1397 set_cpu_vsrh(xT(ctx
->opcode
), vsr
);
1398 set_cpu_vsrl(xT(ctx
->opcode
), vsr
);
1400 tcg_temp_free_i64(vsr
);
1401 tcg_temp_free_i64(b
);
1402 tcg_temp_free_i64(b2
);
1405 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1407 static void gen_xxspltib(DisasContext
*ctx
)
1409 unsigned char uim8
= IMM8(ctx
->opcode
);
1411 if (xS(ctx
->opcode
) < 32) {
1412 if (unlikely(!ctx
->altivec_enabled
)) {
1413 gen_exception(ctx
, POWERPC_EXCP_VPU
);
1417 if (unlikely(!ctx
->vsx_enabled
)) {
1418 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1422 vsr
= tcg_temp_new_i64();
1423 tcg_gen_movi_i64(vsr
, pattern(uim8
));
1424 set_cpu_vsrh(xT(ctx
->opcode
), vsr
);
1425 set_cpu_vsrl(xT(ctx
->opcode
), vsr
);
1426 tcg_temp_free_i64(vsr
);
1429 static void gen_xxsldwi(DisasContext
*ctx
)
1432 if (unlikely(!ctx
->vsx_enabled
)) {
1433 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1436 xth
= tcg_temp_new_i64();
1437 xtl
= tcg_temp_new_i64();
1439 switch (SHW(ctx
->opcode
)) {
1441 get_cpu_vsrh(xth
, xA(ctx
->opcode
));
1442 get_cpu_vsrl(xtl
, xA(ctx
->opcode
));
1446 TCGv_i64 t0
= tcg_temp_new_i64();
1447 get_cpu_vsrh(xth
, xA(ctx
->opcode
));
1448 tcg_gen_shli_i64(xth
, xth
, 32);
1449 get_cpu_vsrl(t0
, xA(ctx
->opcode
));
1450 tcg_gen_shri_i64(t0
, t0
, 32);
1451 tcg_gen_or_i64(xth
, xth
, t0
);
1452 get_cpu_vsrl(xtl
, xA(ctx
->opcode
));
1453 tcg_gen_shli_i64(xtl
, xtl
, 32);
1454 get_cpu_vsrh(t0
, xB(ctx
->opcode
));
1455 tcg_gen_shri_i64(t0
, t0
, 32);
1456 tcg_gen_or_i64(xtl
, xtl
, t0
);
1457 tcg_temp_free_i64(t0
);
1461 get_cpu_vsrl(xth
, xA(ctx
->opcode
));
1462 get_cpu_vsrh(xtl
, xB(ctx
->opcode
));
1466 TCGv_i64 t0
= tcg_temp_new_i64();
1467 get_cpu_vsrl(xth
, xA(ctx
->opcode
));
1468 tcg_gen_shli_i64(xth
, xth
, 32);
1469 get_cpu_vsrh(t0
, xB(ctx
->opcode
));
1470 tcg_gen_shri_i64(t0
, t0
, 32);
1471 tcg_gen_or_i64(xth
, xth
, t0
);
1472 get_cpu_vsrh(xtl
, xB(ctx
->opcode
));
1473 tcg_gen_shli_i64(xtl
, xtl
, 32);
1474 get_cpu_vsrl(t0
, xB(ctx
->opcode
));
1475 tcg_gen_shri_i64(t0
, t0
, 32);
1476 tcg_gen_or_i64(xtl
, xtl
, t0
);
1477 tcg_temp_free_i64(t0
);
1482 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1483 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1485 tcg_temp_free_i64(xth
);
1486 tcg_temp_free_i64(xtl
);
1489 #define VSX_EXTRACT_INSERT(name) \
1490 static void gen_##name(DisasContext *ctx) \
1495 uint8_t uimm = UIMM4(ctx->opcode); \
1497 if (unlikely(!ctx->vsx_enabled)) { \
1498 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1501 xt = tcg_const_tl(xT(ctx->opcode)); \
1502 xb = tcg_const_tl(xB(ctx->opcode)); \
1503 t0 = tcg_temp_new_i32(); \
1504 t1 = tcg_temp_new_i64(); \
1505 /* uimm > 15 out of bound and for \
1506 * uimm > 12 handle as per hardware in helper \
1509 tcg_gen_movi_i64(t1, 0); \
1510 set_cpu_vsrh(xT(ctx->opcode), t1); \
1511 set_cpu_vsrl(xT(ctx->opcode), t1); \
1514 tcg_gen_movi_i32(t0, uimm); \
1515 gen_helper_##name(cpu_env, xt, xb, t0); \
1516 tcg_temp_free(xb); \
1517 tcg_temp_free(xt); \
1518 tcg_temp_free_i32(t0); \
1519 tcg_temp_free_i64(t1); \
1522 VSX_EXTRACT_INSERT(xxextractuw
)
1523 VSX_EXTRACT_INSERT(xxinsertw
)
1526 static void gen_xsxexpdp(DisasContext
*ctx
)
1528 TCGv rt
= cpu_gpr
[rD(ctx
->opcode
)];
1530 if (unlikely(!ctx
->vsx_enabled
)) {
1531 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1534 t0
= tcg_temp_new_i64();
1535 get_cpu_vsrh(t0
, xB(ctx
->opcode
));
1536 tcg_gen_extract_i64(rt
, t0
, 52, 11);
1537 tcg_temp_free_i64(t0
);
1540 static void gen_xsxexpqp(DisasContext
*ctx
)
1546 if (unlikely(!ctx
->vsx_enabled
)) {
1547 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1550 xth
= tcg_temp_new_i64();
1551 xtl
= tcg_temp_new_i64();
1552 xbh
= tcg_temp_new_i64();
1553 get_cpu_vsrh(xbh
, rB(ctx
->opcode
) + 32);
1555 tcg_gen_extract_i64(xth
, xbh
, 48, 15);
1556 set_cpu_vsrh(rD(ctx
->opcode
) + 32, xth
);
1557 tcg_gen_movi_i64(xtl
, 0);
1558 set_cpu_vsrl(rD(ctx
->opcode
) + 32, xtl
);
1560 tcg_temp_free_i64(xbh
);
1561 tcg_temp_free_i64(xth
);
1562 tcg_temp_free_i64(xtl
);
1565 static void gen_xsiexpdp(DisasContext
*ctx
)
1568 TCGv ra
= cpu_gpr
[rA(ctx
->opcode
)];
1569 TCGv rb
= cpu_gpr
[rB(ctx
->opcode
)];
1572 if (unlikely(!ctx
->vsx_enabled
)) {
1573 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1576 t0
= tcg_temp_new_i64();
1577 xth
= tcg_temp_new_i64();
1578 tcg_gen_andi_i64(xth
, ra
, 0x800FFFFFFFFFFFFF);
1579 tcg_gen_andi_i64(t0
, rb
, 0x7FF);
1580 tcg_gen_shli_i64(t0
, t0
, 52);
1581 tcg_gen_or_i64(xth
, xth
, t0
);
1582 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1583 /* dword[1] is undefined */
1584 tcg_temp_free_i64(t0
);
1585 tcg_temp_free_i64(xth
);
1588 static void gen_xsiexpqp(DisasContext
*ctx
)
1597 if (unlikely(!ctx
->vsx_enabled
)) {
1598 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1601 xth
= tcg_temp_new_i64();
1602 xtl
= tcg_temp_new_i64();
1603 xah
= tcg_temp_new_i64();
1604 xal
= tcg_temp_new_i64();
1605 get_cpu_vsrh(xah
, rA(ctx
->opcode
) + 32);
1606 get_cpu_vsrl(xal
, rA(ctx
->opcode
) + 32);
1607 xbh
= tcg_temp_new_i64();
1608 get_cpu_vsrh(xbh
, rB(ctx
->opcode
) + 32);
1609 t0
= tcg_temp_new_i64();
1611 tcg_gen_andi_i64(xth
, xah
, 0x8000FFFFFFFFFFFF);
1612 tcg_gen_andi_i64(t0
, xbh
, 0x7FFF);
1613 tcg_gen_shli_i64(t0
, t0
, 48);
1614 tcg_gen_or_i64(xth
, xth
, t0
);
1615 set_cpu_vsrh(rD(ctx
->opcode
) + 32, xth
);
1616 tcg_gen_mov_i64(xtl
, xal
);
1617 set_cpu_vsrl(rD(ctx
->opcode
) + 32, xtl
);
1619 tcg_temp_free_i64(t0
);
1620 tcg_temp_free_i64(xth
);
1621 tcg_temp_free_i64(xtl
);
1622 tcg_temp_free_i64(xah
);
1623 tcg_temp_free_i64(xal
);
1624 tcg_temp_free_i64(xbh
);
1627 static void gen_xsxsigdp(DisasContext
*ctx
)
1629 TCGv rt
= cpu_gpr
[rD(ctx
->opcode
)];
1630 TCGv_i64 t0
, t1
, zr
, nan
, exp
;
1632 if (unlikely(!ctx
->vsx_enabled
)) {
1633 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1636 exp
= tcg_temp_new_i64();
1637 t0
= tcg_temp_new_i64();
1638 t1
= tcg_temp_new_i64();
1639 zr
= tcg_const_i64(0);
1640 nan
= tcg_const_i64(2047);
1642 get_cpu_vsrh(t1
, xB(ctx
->opcode
));
1643 tcg_gen_extract_i64(exp
, t1
, 52, 11);
1644 tcg_gen_movi_i64(t0
, 0x0010000000000000);
1645 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1646 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1647 get_cpu_vsrh(t1
, xB(ctx
->opcode
));
1648 tcg_gen_andi_i64(rt
, t1
, 0x000FFFFFFFFFFFFF);
1649 tcg_gen_or_i64(rt
, rt
, t0
);
1651 tcg_temp_free_i64(t0
);
1652 tcg_temp_free_i64(t1
);
1653 tcg_temp_free_i64(exp
);
1654 tcg_temp_free_i64(zr
);
1655 tcg_temp_free_i64(nan
);
1658 static void gen_xsxsigqp(DisasContext
*ctx
)
1660 TCGv_i64 t0
, zr
, nan
, exp
;
1666 if (unlikely(!ctx
->vsx_enabled
)) {
1667 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1670 xth
= tcg_temp_new_i64();
1671 xtl
= tcg_temp_new_i64();
1672 xbh
= tcg_temp_new_i64();
1673 xbl
= tcg_temp_new_i64();
1674 get_cpu_vsrh(xbh
, rB(ctx
->opcode
) + 32);
1675 get_cpu_vsrl(xbl
, rB(ctx
->opcode
) + 32);
1676 exp
= tcg_temp_new_i64();
1677 t0
= tcg_temp_new_i64();
1678 zr
= tcg_const_i64(0);
1679 nan
= tcg_const_i64(32767);
1681 tcg_gen_extract_i64(exp
, xbh
, 48, 15);
1682 tcg_gen_movi_i64(t0
, 0x0001000000000000);
1683 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1684 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1685 tcg_gen_andi_i64(xth
, xbh
, 0x0000FFFFFFFFFFFF);
1686 tcg_gen_or_i64(xth
, xth
, t0
);
1687 set_cpu_vsrh(rD(ctx
->opcode
) + 32, xth
);
1688 tcg_gen_mov_i64(xtl
, xbl
);
1689 set_cpu_vsrl(rD(ctx
->opcode
) + 32, xtl
);
1691 tcg_temp_free_i64(t0
);
1692 tcg_temp_free_i64(exp
);
1693 tcg_temp_free_i64(zr
);
1694 tcg_temp_free_i64(nan
);
1695 tcg_temp_free_i64(xth
);
1696 tcg_temp_free_i64(xtl
);
1697 tcg_temp_free_i64(xbh
);
1698 tcg_temp_free_i64(xbl
);
1702 static void gen_xviexpsp(DisasContext
*ctx
)
1712 if (unlikely(!ctx
->vsx_enabled
)) {
1713 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1716 xth
= tcg_temp_new_i64();
1717 xtl
= tcg_temp_new_i64();
1718 xah
= tcg_temp_new_i64();
1719 xal
= tcg_temp_new_i64();
1720 xbh
= tcg_temp_new_i64();
1721 xbl
= tcg_temp_new_i64();
1722 get_cpu_vsrh(xah
, xA(ctx
->opcode
));
1723 get_cpu_vsrl(xal
, xA(ctx
->opcode
));
1724 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1725 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1726 t0
= tcg_temp_new_i64();
1728 tcg_gen_andi_i64(xth
, xah
, 0x807FFFFF807FFFFF);
1729 tcg_gen_andi_i64(t0
, xbh
, 0xFF000000FF);
1730 tcg_gen_shli_i64(t0
, t0
, 23);
1731 tcg_gen_or_i64(xth
, xth
, t0
);
1732 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1733 tcg_gen_andi_i64(xtl
, xal
, 0x807FFFFF807FFFFF);
1734 tcg_gen_andi_i64(t0
, xbl
, 0xFF000000FF);
1735 tcg_gen_shli_i64(t0
, t0
, 23);
1736 tcg_gen_or_i64(xtl
, xtl
, t0
);
1737 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1739 tcg_temp_free_i64(t0
);
1740 tcg_temp_free_i64(xth
);
1741 tcg_temp_free_i64(xtl
);
1742 tcg_temp_free_i64(xah
);
1743 tcg_temp_free_i64(xal
);
1744 tcg_temp_free_i64(xbh
);
1745 tcg_temp_free_i64(xbl
);
1748 static void gen_xviexpdp(DisasContext
*ctx
)
1758 if (unlikely(!ctx
->vsx_enabled
)) {
1759 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1762 xth
= tcg_temp_new_i64();
1763 xtl
= tcg_temp_new_i64();
1764 xah
= tcg_temp_new_i64();
1765 xal
= tcg_temp_new_i64();
1766 xbh
= tcg_temp_new_i64();
1767 xbl
= tcg_temp_new_i64();
1768 get_cpu_vsrh(xah
, xA(ctx
->opcode
));
1769 get_cpu_vsrl(xal
, xA(ctx
->opcode
));
1770 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1771 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1772 t0
= tcg_temp_new_i64();
1774 tcg_gen_andi_i64(xth
, xah
, 0x800FFFFFFFFFFFFF);
1775 tcg_gen_andi_i64(t0
, xbh
, 0x7FF);
1776 tcg_gen_shli_i64(t0
, t0
, 52);
1777 tcg_gen_or_i64(xth
, xth
, t0
);
1778 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1779 tcg_gen_andi_i64(xtl
, xal
, 0x800FFFFFFFFFFFFF);
1780 tcg_gen_andi_i64(t0
, xbl
, 0x7FF);
1781 tcg_gen_shli_i64(t0
, t0
, 52);
1782 tcg_gen_or_i64(xtl
, xtl
, t0
);
1783 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1785 tcg_temp_free_i64(t0
);
1786 tcg_temp_free_i64(xth
);
1787 tcg_temp_free_i64(xtl
);
1788 tcg_temp_free_i64(xah
);
1789 tcg_temp_free_i64(xal
);
1790 tcg_temp_free_i64(xbh
);
1791 tcg_temp_free_i64(xbl
);
1794 static void gen_xvxexpsp(DisasContext
*ctx
)
1801 if (unlikely(!ctx
->vsx_enabled
)) {
1802 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1805 xth
= tcg_temp_new_i64();
1806 xtl
= tcg_temp_new_i64();
1807 xbh
= tcg_temp_new_i64();
1808 xbl
= tcg_temp_new_i64();
1809 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1810 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1812 tcg_gen_shri_i64(xth
, xbh
, 23);
1813 tcg_gen_andi_i64(xth
, xth
, 0xFF000000FF);
1814 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1815 tcg_gen_shri_i64(xtl
, xbl
, 23);
1816 tcg_gen_andi_i64(xtl
, xtl
, 0xFF000000FF);
1817 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1819 tcg_temp_free_i64(xth
);
1820 tcg_temp_free_i64(xtl
);
1821 tcg_temp_free_i64(xbh
);
1822 tcg_temp_free_i64(xbl
);
1825 static void gen_xvxexpdp(DisasContext
*ctx
)
1832 if (unlikely(!ctx
->vsx_enabled
)) {
1833 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1836 xth
= tcg_temp_new_i64();
1837 xtl
= tcg_temp_new_i64();
1838 xbh
= tcg_temp_new_i64();
1839 xbl
= tcg_temp_new_i64();
1840 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1841 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1843 tcg_gen_extract_i64(xth
, xbh
, 52, 11);
1844 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1845 tcg_gen_extract_i64(xtl
, xbl
, 52, 11);
1846 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1848 tcg_temp_free_i64(xth
);
1849 tcg_temp_free_i64(xtl
);
1850 tcg_temp_free_i64(xbh
);
1851 tcg_temp_free_i64(xbl
);
1854 GEN_VSX_HELPER_2(xvxsigsp
, 0x00, 0x04, 0, PPC2_ISA300
)
1856 static void gen_xvxsigdp(DisasContext
*ctx
)
1862 TCGv_i64 t0
, zr
, nan
, exp
;
1864 if (unlikely(!ctx
->vsx_enabled
)) {
1865 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1868 xth
= tcg_temp_new_i64();
1869 xtl
= tcg_temp_new_i64();
1870 xbh
= tcg_temp_new_i64();
1871 xbl
= tcg_temp_new_i64();
1872 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1873 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1874 exp
= tcg_temp_new_i64();
1875 t0
= tcg_temp_new_i64();
1876 zr
= tcg_const_i64(0);
1877 nan
= tcg_const_i64(2047);
1879 tcg_gen_extract_i64(exp
, xbh
, 52, 11);
1880 tcg_gen_movi_i64(t0
, 0x0010000000000000);
1881 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1882 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1883 tcg_gen_andi_i64(xth
, xbh
, 0x000FFFFFFFFFFFFF);
1884 tcg_gen_or_i64(xth
, xth
, t0
);
1885 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1887 tcg_gen_extract_i64(exp
, xbl
, 52, 11);
1888 tcg_gen_movi_i64(t0
, 0x0010000000000000);
1889 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1890 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1891 tcg_gen_andi_i64(xtl
, xbl
, 0x000FFFFFFFFFFFFF);
1892 tcg_gen_or_i64(xtl
, xtl
, t0
);
1893 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1895 tcg_temp_free_i64(t0
);
1896 tcg_temp_free_i64(exp
);
1897 tcg_temp_free_i64(zr
);
1898 tcg_temp_free_i64(nan
);
1899 tcg_temp_free_i64(xth
);
1900 tcg_temp_free_i64(xtl
);
1901 tcg_temp_free_i64(xbh
);
1902 tcg_temp_free_i64(xbl
);
1908 #undef GEN_XX3_RC_FORM
1909 #undef GEN_XX3FORM_DM