]>
git.proxmox.com Git - mirror_qemu.git/blob - target/ppc/translate/vsx-impl.inc.c
1 /*** VSX extension ***/
3 static inline void get_cpu_vsrh(TCGv_i64 dst
, int n
)
5 tcg_gen_ld_i64(dst
, cpu_env
, vsr64_offset(n
, true));
8 static inline void get_cpu_vsrl(TCGv_i64 dst
, int n
)
10 tcg_gen_ld_i64(dst
, cpu_env
, vsr64_offset(n
, false));
13 static inline void set_cpu_vsrh(int n
, TCGv_i64 src
)
15 tcg_gen_st_i64(src
, cpu_env
, vsr64_offset(n
, true));
18 static inline void set_cpu_vsrl(int n
, TCGv_i64 src
)
20 tcg_gen_st_i64(src
, cpu_env
, vsr64_offset(n
, false));
23 #define VSX_LOAD_SCALAR(name, operation) \
24 static void gen_##name(DisasContext *ctx) \
28 if (unlikely(!ctx->vsx_enabled)) { \
29 gen_exception(ctx, POWERPC_EXCP_VSXU); \
32 t0 = tcg_temp_new_i64(); \
33 gen_set_access_type(ctx, ACCESS_INT); \
34 EA = tcg_temp_new(); \
35 gen_addr_reg_index(ctx, EA); \
36 gen_qemu_##operation(ctx, t0, EA); \
37 set_cpu_vsrh(xT(ctx->opcode), t0); \
38 /* NOTE: cpu_vsrl is undefined */ \
40 tcg_temp_free_i64(t0); \
43 VSX_LOAD_SCALAR(lxsdx
, ld64_i64
)
44 VSX_LOAD_SCALAR(lxsiwax
, ld32s_i64
)
45 VSX_LOAD_SCALAR(lxsibzx
, ld8u_i64
)
46 VSX_LOAD_SCALAR(lxsihzx
, ld16u_i64
)
47 VSX_LOAD_SCALAR(lxsiwzx
, ld32u_i64
)
48 VSX_LOAD_SCALAR(lxsspx
, ld32fs
)
50 static void gen_lxvd2x(DisasContext
*ctx
)
54 if (unlikely(!ctx
->vsx_enabled
)) {
55 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
58 t0
= tcg_temp_new_i64();
59 gen_set_access_type(ctx
, ACCESS_INT
);
61 gen_addr_reg_index(ctx
, EA
);
62 gen_qemu_ld64_i64(ctx
, t0
, EA
);
63 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
64 tcg_gen_addi_tl(EA
, EA
, 8);
65 gen_qemu_ld64_i64(ctx
, t0
, EA
);
66 set_cpu_vsrl(xT(ctx
->opcode
), t0
);
68 tcg_temp_free_i64(t0
);
71 static void gen_lxvdsx(DisasContext
*ctx
)
76 if (unlikely(!ctx
->vsx_enabled
)) {
77 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
80 t0
= tcg_temp_new_i64();
81 t1
= tcg_temp_new_i64();
82 gen_set_access_type(ctx
, ACCESS_INT
);
84 gen_addr_reg_index(ctx
, EA
);
85 gen_qemu_ld64_i64(ctx
, t0
, EA
);
86 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
87 tcg_gen_mov_i64(t1
, t0
);
88 set_cpu_vsrl(xT(ctx
->opcode
), t1
);
90 tcg_temp_free_i64(t0
);
91 tcg_temp_free_i64(t1
);
94 static void gen_lxvw4x(DisasContext
*ctx
)
99 if (unlikely(!ctx
->vsx_enabled
)) {
100 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
103 xth
= tcg_temp_new_i64();
104 xtl
= tcg_temp_new_i64();
106 gen_set_access_type(ctx
, ACCESS_INT
);
109 gen_addr_reg_index(ctx
, EA
);
111 TCGv_i64 t0
= tcg_temp_new_i64();
112 TCGv_i64 t1
= tcg_temp_new_i64();
114 tcg_gen_qemu_ld_i64(t0
, EA
, ctx
->mem_idx
, MO_LEQ
);
115 tcg_gen_shri_i64(t1
, t0
, 32);
116 tcg_gen_deposit_i64(xth
, t1
, t0
, 32, 32);
117 tcg_gen_addi_tl(EA
, EA
, 8);
118 tcg_gen_qemu_ld_i64(t0
, EA
, ctx
->mem_idx
, MO_LEQ
);
119 tcg_gen_shri_i64(t1
, t0
, 32);
120 tcg_gen_deposit_i64(xtl
, t1
, t0
, 32, 32);
121 tcg_temp_free_i64(t0
);
122 tcg_temp_free_i64(t1
);
124 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
125 tcg_gen_addi_tl(EA
, EA
, 8);
126 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
128 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
129 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
131 tcg_temp_free_i64(xth
);
132 tcg_temp_free_i64(xtl
);
135 static void gen_bswap16x8(TCGv_i64 outh
, TCGv_i64 outl
,
136 TCGv_i64 inh
, TCGv_i64 inl
)
138 TCGv_i64 mask
= tcg_const_i64(0x00FF00FF00FF00FF);
139 TCGv_i64 t0
= tcg_temp_new_i64();
140 TCGv_i64 t1
= tcg_temp_new_i64();
142 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
143 tcg_gen_and_i64(t0
, inh
, mask
);
144 tcg_gen_shli_i64(t0
, t0
, 8);
145 tcg_gen_shri_i64(t1
, inh
, 8);
146 tcg_gen_and_i64(t1
, t1
, mask
);
147 tcg_gen_or_i64(outh
, t0
, t1
);
149 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
150 tcg_gen_and_i64(t0
, inl
, mask
);
151 tcg_gen_shli_i64(t0
, t0
, 8);
152 tcg_gen_shri_i64(t1
, inl
, 8);
153 tcg_gen_and_i64(t1
, t1
, mask
);
154 tcg_gen_or_i64(outl
, t0
, t1
);
156 tcg_temp_free_i64(t0
);
157 tcg_temp_free_i64(t1
);
158 tcg_temp_free_i64(mask
);
161 static void gen_bswap32x4(TCGv_i64 outh
, TCGv_i64 outl
,
162 TCGv_i64 inh
, TCGv_i64 inl
)
164 TCGv_i64 hi
= tcg_temp_new_i64();
165 TCGv_i64 lo
= tcg_temp_new_i64();
167 tcg_gen_bswap64_i64(hi
, inh
);
168 tcg_gen_bswap64_i64(lo
, inl
);
169 tcg_gen_shri_i64(outh
, hi
, 32);
170 tcg_gen_deposit_i64(outh
, outh
, hi
, 32, 32);
171 tcg_gen_shri_i64(outl
, lo
, 32);
172 tcg_gen_deposit_i64(outl
, outl
, lo
, 32, 32);
174 tcg_temp_free_i64(hi
);
175 tcg_temp_free_i64(lo
);
177 static void gen_lxvh8x(DisasContext
*ctx
)
183 if (unlikely(!ctx
->vsx_enabled
)) {
184 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
187 xth
= tcg_temp_new_i64();
188 xtl
= tcg_temp_new_i64();
189 gen_set_access_type(ctx
, ACCESS_INT
);
192 gen_addr_reg_index(ctx
, EA
);
193 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
194 tcg_gen_addi_tl(EA
, EA
, 8);
195 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
197 gen_bswap16x8(xth
, xtl
, xth
, xtl
);
199 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
200 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
202 tcg_temp_free_i64(xth
);
203 tcg_temp_free_i64(xtl
);
206 static void gen_lxvb16x(DisasContext
*ctx
)
212 if (unlikely(!ctx
->vsx_enabled
)) {
213 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
216 xth
= tcg_temp_new_i64();
217 xtl
= tcg_temp_new_i64();
218 gen_set_access_type(ctx
, ACCESS_INT
);
220 gen_addr_reg_index(ctx
, EA
);
221 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
222 tcg_gen_addi_tl(EA
, EA
, 8);
223 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
224 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
225 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
227 tcg_temp_free_i64(xth
);
228 tcg_temp_free_i64(xtl
);
231 #define VSX_VECTOR_LOAD(name, op, indexed) \
232 static void gen_##name(DisasContext *ctx) \
240 xt = xT(ctx->opcode); \
242 xt = DQxT(ctx->opcode); \
246 if (unlikely(!ctx->vsx_enabled)) { \
247 gen_exception(ctx, POWERPC_EXCP_VSXU); \
251 if (unlikely(!ctx->altivec_enabled)) { \
252 gen_exception(ctx, POWERPC_EXCP_VPU); \
256 xth = tcg_temp_new_i64(); \
257 xtl = tcg_temp_new_i64(); \
258 gen_set_access_type(ctx, ACCESS_INT); \
259 EA = tcg_temp_new(); \
261 gen_addr_reg_index(ctx, EA); \
263 gen_addr_imm_index(ctx, EA, 0x0F); \
265 if (ctx->le_mode) { \
266 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
267 set_cpu_vsrl(xt, xtl); \
268 tcg_gen_addi_tl(EA, EA, 8); \
269 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
270 set_cpu_vsrh(xt, xth); \
272 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
273 set_cpu_vsrh(xt, xth); \
274 tcg_gen_addi_tl(EA, EA, 8); \
275 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
276 set_cpu_vsrl(xt, xtl); \
279 tcg_temp_free_i64(xth); \
280 tcg_temp_free_i64(xtl); \
283 VSX_VECTOR_LOAD(lxv
, ld_i64
, 0)
284 VSX_VECTOR_LOAD(lxvx
, ld_i64
, 1)
286 #define VSX_VECTOR_STORE(name, op, indexed) \
287 static void gen_##name(DisasContext *ctx) \
295 xt = xT(ctx->opcode); \
297 xt = DQxT(ctx->opcode); \
301 if (unlikely(!ctx->vsx_enabled)) { \
302 gen_exception(ctx, POWERPC_EXCP_VSXU); \
306 if (unlikely(!ctx->altivec_enabled)) { \
307 gen_exception(ctx, POWERPC_EXCP_VPU); \
311 xth = tcg_temp_new_i64(); \
312 xtl = tcg_temp_new_i64(); \
313 get_cpu_vsrh(xth, xt); \
314 get_cpu_vsrl(xtl, xt); \
315 gen_set_access_type(ctx, ACCESS_INT); \
316 EA = tcg_temp_new(); \
318 gen_addr_reg_index(ctx, EA); \
320 gen_addr_imm_index(ctx, EA, 0x0F); \
322 if (ctx->le_mode) { \
323 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
324 tcg_gen_addi_tl(EA, EA, 8); \
325 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
327 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
328 tcg_gen_addi_tl(EA, EA, 8); \
329 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
332 tcg_temp_free_i64(xth); \
333 tcg_temp_free_i64(xtl); \
336 VSX_VECTOR_STORE(stxv
, st_i64
, 0)
337 VSX_VECTOR_STORE(stxvx
, st_i64
, 1)
340 #define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
341 static void gen_##name(DisasContext *ctx) \
345 if (xT(ctx->opcode) < 32) { \
346 if (unlikely(!ctx->vsx_enabled)) { \
347 gen_exception(ctx, POWERPC_EXCP_VSXU); \
351 if (unlikely(!ctx->altivec_enabled)) { \
352 gen_exception(ctx, POWERPC_EXCP_VPU); \
356 EA = tcg_temp_new(); \
357 xt = tcg_const_tl(xT(ctx->opcode)); \
358 gen_set_access_type(ctx, ACCESS_INT); \
359 gen_addr_register(ctx, EA); \
360 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
365 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl
)
366 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll
)
367 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl
)
368 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll
)
371 #define VSX_LOAD_SCALAR_DS(name, operation) \
372 static void gen_##name(DisasContext *ctx) \
377 if (unlikely(!ctx->altivec_enabled)) { \
378 gen_exception(ctx, POWERPC_EXCP_VPU); \
381 xth = tcg_temp_new_i64(); \
382 gen_set_access_type(ctx, ACCESS_INT); \
383 EA = tcg_temp_new(); \
384 gen_addr_imm_index(ctx, EA, 0x03); \
385 gen_qemu_##operation(ctx, xth, EA); \
386 set_cpu_vsrh(rD(ctx->opcode) + 32, xth); \
387 /* NOTE: cpu_vsrl is undefined */ \
389 tcg_temp_free_i64(xth); \
392 VSX_LOAD_SCALAR_DS(lxsd
, ld64_i64
)
393 VSX_LOAD_SCALAR_DS(lxssp
, ld32fs
)
395 #define VSX_STORE_SCALAR(name, operation) \
396 static void gen_##name(DisasContext *ctx) \
400 if (unlikely(!ctx->vsx_enabled)) { \
401 gen_exception(ctx, POWERPC_EXCP_VSXU); \
404 t0 = tcg_temp_new_i64(); \
405 gen_set_access_type(ctx, ACCESS_INT); \
406 EA = tcg_temp_new(); \
407 gen_addr_reg_index(ctx, EA); \
408 get_cpu_vsrh(t0, xS(ctx->opcode)); \
409 gen_qemu_##operation(ctx, t0, EA); \
411 tcg_temp_free_i64(t0); \
414 VSX_STORE_SCALAR(stxsdx
, st64_i64
)
416 VSX_STORE_SCALAR(stxsibx
, st8_i64
)
417 VSX_STORE_SCALAR(stxsihx
, st16_i64
)
418 VSX_STORE_SCALAR(stxsiwx
, st32_i64
)
419 VSX_STORE_SCALAR(stxsspx
, st32fs
)
421 static void gen_stxvd2x(DisasContext
*ctx
)
425 if (unlikely(!ctx
->vsx_enabled
)) {
426 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
429 t0
= tcg_temp_new_i64();
430 gen_set_access_type(ctx
, ACCESS_INT
);
432 gen_addr_reg_index(ctx
, EA
);
433 get_cpu_vsrh(t0
, xS(ctx
->opcode
));
434 gen_qemu_st64_i64(ctx
, t0
, EA
);
435 tcg_gen_addi_tl(EA
, EA
, 8);
436 get_cpu_vsrl(t0
, xS(ctx
->opcode
));
437 gen_qemu_st64_i64(ctx
, t0
, EA
);
439 tcg_temp_free_i64(t0
);
442 static void gen_stxvw4x(DisasContext
*ctx
)
448 if (unlikely(!ctx
->vsx_enabled
)) {
449 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
452 xsh
= tcg_temp_new_i64();
453 xsl
= tcg_temp_new_i64();
454 get_cpu_vsrh(xsh
, xS(ctx
->opcode
));
455 get_cpu_vsrl(xsl
, xS(ctx
->opcode
));
456 gen_set_access_type(ctx
, ACCESS_INT
);
458 gen_addr_reg_index(ctx
, EA
);
460 TCGv_i64 t0
= tcg_temp_new_i64();
461 TCGv_i64 t1
= tcg_temp_new_i64();
463 tcg_gen_shri_i64(t0
, xsh
, 32);
464 tcg_gen_deposit_i64(t1
, t0
, xsh
, 32, 32);
465 tcg_gen_qemu_st_i64(t1
, EA
, ctx
->mem_idx
, MO_LEQ
);
466 tcg_gen_addi_tl(EA
, EA
, 8);
467 tcg_gen_shri_i64(t0
, xsl
, 32);
468 tcg_gen_deposit_i64(t1
, t0
, xsl
, 32, 32);
469 tcg_gen_qemu_st_i64(t1
, EA
, ctx
->mem_idx
, MO_LEQ
);
470 tcg_temp_free_i64(t0
);
471 tcg_temp_free_i64(t1
);
473 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
474 tcg_gen_addi_tl(EA
, EA
, 8);
475 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
478 tcg_temp_free_i64(xsh
);
479 tcg_temp_free_i64(xsl
);
482 static void gen_stxvh8x(DisasContext
*ctx
)
488 if (unlikely(!ctx
->vsx_enabled
)) {
489 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
492 xsh
= tcg_temp_new_i64();
493 xsl
= tcg_temp_new_i64();
494 get_cpu_vsrh(xsh
, xS(ctx
->opcode
));
495 get_cpu_vsrl(xsl
, xS(ctx
->opcode
));
496 gen_set_access_type(ctx
, ACCESS_INT
);
498 gen_addr_reg_index(ctx
, EA
);
500 TCGv_i64 outh
= tcg_temp_new_i64();
501 TCGv_i64 outl
= tcg_temp_new_i64();
503 gen_bswap16x8(outh
, outl
, xsh
, xsl
);
504 tcg_gen_qemu_st_i64(outh
, EA
, ctx
->mem_idx
, MO_BEQ
);
505 tcg_gen_addi_tl(EA
, EA
, 8);
506 tcg_gen_qemu_st_i64(outl
, EA
, ctx
->mem_idx
, MO_BEQ
);
507 tcg_temp_free_i64(outh
);
508 tcg_temp_free_i64(outl
);
510 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
511 tcg_gen_addi_tl(EA
, EA
, 8);
512 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
515 tcg_temp_free_i64(xsh
);
516 tcg_temp_free_i64(xsl
);
519 static void gen_stxvb16x(DisasContext
*ctx
)
525 if (unlikely(!ctx
->vsx_enabled
)) {
526 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
529 xsh
= tcg_temp_new_i64();
530 xsl
= tcg_temp_new_i64();
531 get_cpu_vsrh(xsh
, xS(ctx
->opcode
));
532 get_cpu_vsrl(xsl
, xS(ctx
->opcode
));
533 gen_set_access_type(ctx
, ACCESS_INT
);
535 gen_addr_reg_index(ctx
, EA
);
536 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
537 tcg_gen_addi_tl(EA
, EA
, 8);
538 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
540 tcg_temp_free_i64(xsh
);
541 tcg_temp_free_i64(xsl
);
544 #define VSX_STORE_SCALAR_DS(name, operation) \
545 static void gen_##name(DisasContext *ctx) \
550 if (unlikely(!ctx->altivec_enabled)) { \
551 gen_exception(ctx, POWERPC_EXCP_VPU); \
554 xth = tcg_temp_new_i64(); \
555 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
556 gen_set_access_type(ctx, ACCESS_INT); \
557 EA = tcg_temp_new(); \
558 gen_addr_imm_index(ctx, EA, 0x03); \
559 gen_qemu_##operation(ctx, xth, EA); \
560 /* NOTE: cpu_vsrl is undefined */ \
562 tcg_temp_free_i64(xth); \
565 VSX_STORE_SCALAR_DS(stxsd
, st64_i64
)
566 VSX_STORE_SCALAR_DS(stxssp
, st32fs
)
568 static void gen_mfvsrwz(DisasContext
*ctx
)
570 if (xS(ctx
->opcode
) < 32) {
571 if (unlikely(!ctx
->fpu_enabled
)) {
572 gen_exception(ctx
, POWERPC_EXCP_FPU
);
576 if (unlikely(!ctx
->altivec_enabled
)) {
577 gen_exception(ctx
, POWERPC_EXCP_VPU
);
581 TCGv_i64 tmp
= tcg_temp_new_i64();
582 TCGv_i64 xsh
= tcg_temp_new_i64();
583 get_cpu_vsrh(xsh
, xS(ctx
->opcode
));
584 tcg_gen_ext32u_i64(tmp
, xsh
);
585 tcg_gen_trunc_i64_tl(cpu_gpr
[rA(ctx
->opcode
)], tmp
);
586 tcg_temp_free_i64(tmp
);
587 tcg_temp_free_i64(xsh
);
590 static void gen_mtvsrwa(DisasContext
*ctx
)
592 if (xS(ctx
->opcode
) < 32) {
593 if (unlikely(!ctx
->fpu_enabled
)) {
594 gen_exception(ctx
, POWERPC_EXCP_FPU
);
598 if (unlikely(!ctx
->altivec_enabled
)) {
599 gen_exception(ctx
, POWERPC_EXCP_VPU
);
603 TCGv_i64 tmp
= tcg_temp_new_i64();
604 TCGv_i64 xsh
= tcg_temp_new_i64();
605 tcg_gen_extu_tl_i64(tmp
, cpu_gpr
[rA(ctx
->opcode
)]);
606 tcg_gen_ext32s_i64(xsh
, tmp
);
607 set_cpu_vsrh(xT(ctx
->opcode
), xsh
);
608 tcg_temp_free_i64(tmp
);
609 tcg_temp_free_i64(xsh
);
612 static void gen_mtvsrwz(DisasContext
*ctx
)
614 if (xS(ctx
->opcode
) < 32) {
615 if (unlikely(!ctx
->fpu_enabled
)) {
616 gen_exception(ctx
, POWERPC_EXCP_FPU
);
620 if (unlikely(!ctx
->altivec_enabled
)) {
621 gen_exception(ctx
, POWERPC_EXCP_VPU
);
625 TCGv_i64 tmp
= tcg_temp_new_i64();
626 TCGv_i64 xsh
= tcg_temp_new_i64();
627 tcg_gen_extu_tl_i64(tmp
, cpu_gpr
[rA(ctx
->opcode
)]);
628 tcg_gen_ext32u_i64(xsh
, tmp
);
629 set_cpu_vsrh(xT(ctx
->opcode
), xsh
);
630 tcg_temp_free_i64(tmp
);
631 tcg_temp_free_i64(xsh
);
634 #if defined(TARGET_PPC64)
635 static void gen_mfvsrd(DisasContext
*ctx
)
638 if (xS(ctx
->opcode
) < 32) {
639 if (unlikely(!ctx
->fpu_enabled
)) {
640 gen_exception(ctx
, POWERPC_EXCP_FPU
);
644 if (unlikely(!ctx
->altivec_enabled
)) {
645 gen_exception(ctx
, POWERPC_EXCP_VPU
);
649 t0
= tcg_temp_new_i64();
650 get_cpu_vsrh(t0
, xS(ctx
->opcode
));
651 tcg_gen_mov_i64(cpu_gpr
[rA(ctx
->opcode
)], t0
);
652 tcg_temp_free_i64(t0
);
655 static void gen_mtvsrd(DisasContext
*ctx
)
658 if (xS(ctx
->opcode
) < 32) {
659 if (unlikely(!ctx
->fpu_enabled
)) {
660 gen_exception(ctx
, POWERPC_EXCP_FPU
);
664 if (unlikely(!ctx
->altivec_enabled
)) {
665 gen_exception(ctx
, POWERPC_EXCP_VPU
);
669 t0
= tcg_temp_new_i64();
670 tcg_gen_mov_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
671 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
672 tcg_temp_free_i64(t0
);
675 static void gen_mfvsrld(DisasContext
*ctx
)
678 if (xS(ctx
->opcode
) < 32) {
679 if (unlikely(!ctx
->vsx_enabled
)) {
680 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
684 if (unlikely(!ctx
->altivec_enabled
)) {
685 gen_exception(ctx
, POWERPC_EXCP_VPU
);
689 t0
= tcg_temp_new_i64();
690 get_cpu_vsrl(t0
, xS(ctx
->opcode
));
691 tcg_gen_mov_i64(cpu_gpr
[rA(ctx
->opcode
)], t0
);
692 tcg_temp_free_i64(t0
);
695 static void gen_mtvsrdd(DisasContext
*ctx
)
698 if (xT(ctx
->opcode
) < 32) {
699 if (unlikely(!ctx
->vsx_enabled
)) {
700 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
704 if (unlikely(!ctx
->altivec_enabled
)) {
705 gen_exception(ctx
, POWERPC_EXCP_VPU
);
710 t0
= tcg_temp_new_i64();
711 if (!rA(ctx
->opcode
)) {
712 tcg_gen_movi_i64(t0
, 0);
714 tcg_gen_mov_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
716 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
718 tcg_gen_mov_i64(t0
, cpu_gpr
[rB(ctx
->opcode
)]);
719 set_cpu_vsrl(xT(ctx
->opcode
), t0
);
720 tcg_temp_free_i64(t0
);
723 static void gen_mtvsrws(DisasContext
*ctx
)
726 if (xT(ctx
->opcode
) < 32) {
727 if (unlikely(!ctx
->vsx_enabled
)) {
728 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
732 if (unlikely(!ctx
->altivec_enabled
)) {
733 gen_exception(ctx
, POWERPC_EXCP_VPU
);
738 t0
= tcg_temp_new_i64();
739 tcg_gen_deposit_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)],
740 cpu_gpr
[rA(ctx
->opcode
)], 32, 32);
741 set_cpu_vsrl(xT(ctx
->opcode
), t0
);
742 set_cpu_vsrh(xT(ctx
->opcode
), t0
);
743 tcg_temp_free_i64(t0
);
748 static void gen_xxpermdi(DisasContext
*ctx
)
752 if (unlikely(!ctx
->vsx_enabled
)) {
753 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
757 xh
= tcg_temp_new_i64();
758 xl
= tcg_temp_new_i64();
760 if (unlikely((xT(ctx
->opcode
) == xA(ctx
->opcode
)) ||
761 (xT(ctx
->opcode
) == xB(ctx
->opcode
)))) {
762 if ((DM(ctx
->opcode
) & 2) == 0) {
763 get_cpu_vsrh(xh
, xA(ctx
->opcode
));
765 get_cpu_vsrl(xh
, xA(ctx
->opcode
));
767 if ((DM(ctx
->opcode
) & 1) == 0) {
768 get_cpu_vsrh(xl
, xB(ctx
->opcode
));
770 get_cpu_vsrl(xl
, xB(ctx
->opcode
));
773 set_cpu_vsrh(xT(ctx
->opcode
), xh
);
774 set_cpu_vsrl(xT(ctx
->opcode
), xl
);
776 if ((DM(ctx
->opcode
) & 2) == 0) {
777 get_cpu_vsrh(xh
, xA(ctx
->opcode
));
778 set_cpu_vsrh(xT(ctx
->opcode
), xh
);
780 get_cpu_vsrl(xh
, xA(ctx
->opcode
));
781 set_cpu_vsrh(xT(ctx
->opcode
), xh
);
783 if ((DM(ctx
->opcode
) & 1) == 0) {
784 get_cpu_vsrh(xl
, xB(ctx
->opcode
));
785 set_cpu_vsrl(xT(ctx
->opcode
), xl
);
787 get_cpu_vsrl(xl
, xB(ctx
->opcode
));
788 set_cpu_vsrl(xT(ctx
->opcode
), xl
);
791 tcg_temp_free_i64(xh
);
792 tcg_temp_free_i64(xl
);
799 #define SGN_MASK_DP 0x8000000000000000ull
800 #define SGN_MASK_SP 0x8000000080000000ull
802 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
803 static void glue(gen_, name)(DisasContext *ctx) \
806 if (unlikely(!ctx->vsx_enabled)) { \
807 gen_exception(ctx, POWERPC_EXCP_VSXU); \
810 xb = tcg_temp_new_i64(); \
811 sgm = tcg_temp_new_i64(); \
812 get_cpu_vsrh(xb, xB(ctx->opcode)); \
813 tcg_gen_movi_i64(sgm, sgn_mask); \
816 tcg_gen_andc_i64(xb, xb, sgm); \
820 tcg_gen_or_i64(xb, xb, sgm); \
824 tcg_gen_xor_i64(xb, xb, sgm); \
828 TCGv_i64 xa = tcg_temp_new_i64(); \
829 get_cpu_vsrh(xa, xA(ctx->opcode)); \
830 tcg_gen_and_i64(xa, xa, sgm); \
831 tcg_gen_andc_i64(xb, xb, sgm); \
832 tcg_gen_or_i64(xb, xb, xa); \
833 tcg_temp_free_i64(xa); \
837 set_cpu_vsrh(xT(ctx->opcode), xb); \
838 tcg_temp_free_i64(xb); \
839 tcg_temp_free_i64(sgm); \
842 VSX_SCALAR_MOVE(xsabsdp
, OP_ABS
, SGN_MASK_DP
)
843 VSX_SCALAR_MOVE(xsnabsdp
, OP_NABS
, SGN_MASK_DP
)
844 VSX_SCALAR_MOVE(xsnegdp
, OP_NEG
, SGN_MASK_DP
)
845 VSX_SCALAR_MOVE(xscpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
847 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
848 static void glue(gen_, name)(DisasContext *ctx) \
851 int xt = rD(ctx->opcode) + 32; \
852 int xb = rB(ctx->opcode) + 32; \
853 TCGv_i64 xah, xbh, xbl, sgm, tmp; \
855 if (unlikely(!ctx->vsx_enabled)) { \
856 gen_exception(ctx, POWERPC_EXCP_VSXU); \
859 xbh = tcg_temp_new_i64(); \
860 xbl = tcg_temp_new_i64(); \
861 sgm = tcg_temp_new_i64(); \
862 tmp = tcg_temp_new_i64(); \
863 get_cpu_vsrh(xbh, xb); \
864 get_cpu_vsrl(xbl, xb); \
865 tcg_gen_movi_i64(sgm, sgn_mask); \
868 tcg_gen_andc_i64(xbh, xbh, sgm); \
871 tcg_gen_or_i64(xbh, xbh, sgm); \
874 tcg_gen_xor_i64(xbh, xbh, sgm); \
877 xah = tcg_temp_new_i64(); \
878 xa = rA(ctx->opcode) + 32; \
879 get_cpu_vsrh(tmp, xa); \
880 tcg_gen_and_i64(xah, tmp, sgm); \
881 tcg_gen_andc_i64(xbh, xbh, sgm); \
882 tcg_gen_or_i64(xbh, xbh, xah); \
883 tcg_temp_free_i64(xah); \
886 set_cpu_vsrh(xt, xbh); \
887 set_cpu_vsrl(xt, xbl); \
888 tcg_temp_free_i64(xbl); \
889 tcg_temp_free_i64(xbh); \
890 tcg_temp_free_i64(sgm); \
891 tcg_temp_free_i64(tmp); \
894 VSX_SCALAR_MOVE_QP(xsabsqp
, OP_ABS
, SGN_MASK_DP
)
895 VSX_SCALAR_MOVE_QP(xsnabsqp
, OP_NABS
, SGN_MASK_DP
)
896 VSX_SCALAR_MOVE_QP(xsnegqp
, OP_NEG
, SGN_MASK_DP
)
897 VSX_SCALAR_MOVE_QP(xscpsgnqp
, OP_CPSGN
, SGN_MASK_DP
)
899 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
900 static void glue(gen_, name)(DisasContext *ctx) \
902 TCGv_i64 xbh, xbl, sgm; \
903 if (unlikely(!ctx->vsx_enabled)) { \
904 gen_exception(ctx, POWERPC_EXCP_VSXU); \
907 xbh = tcg_temp_new_i64(); \
908 xbl = tcg_temp_new_i64(); \
909 sgm = tcg_temp_new_i64(); \
910 get_cpu_vsrh(xbh, xB(ctx->opcode)); \
911 get_cpu_vsrl(xbl, xB(ctx->opcode)); \
912 tcg_gen_movi_i64(sgm, sgn_mask); \
915 tcg_gen_andc_i64(xbh, xbh, sgm); \
916 tcg_gen_andc_i64(xbl, xbl, sgm); \
920 tcg_gen_or_i64(xbh, xbh, sgm); \
921 tcg_gen_or_i64(xbl, xbl, sgm); \
925 tcg_gen_xor_i64(xbh, xbh, sgm); \
926 tcg_gen_xor_i64(xbl, xbl, sgm); \
930 TCGv_i64 xah = tcg_temp_new_i64(); \
931 TCGv_i64 xal = tcg_temp_new_i64(); \
932 get_cpu_vsrh(xah, xA(ctx->opcode)); \
933 get_cpu_vsrl(xal, xA(ctx->opcode)); \
934 tcg_gen_and_i64(xah, xah, sgm); \
935 tcg_gen_and_i64(xal, xal, sgm); \
936 tcg_gen_andc_i64(xbh, xbh, sgm); \
937 tcg_gen_andc_i64(xbl, xbl, sgm); \
938 tcg_gen_or_i64(xbh, xbh, xah); \
939 tcg_gen_or_i64(xbl, xbl, xal); \
940 tcg_temp_free_i64(xah); \
941 tcg_temp_free_i64(xal); \
945 set_cpu_vsrh(xT(ctx->opcode), xbh); \
946 set_cpu_vsrl(xT(ctx->opcode), xbl); \
947 tcg_temp_free_i64(xbh); \
948 tcg_temp_free_i64(xbl); \
949 tcg_temp_free_i64(sgm); \
952 VSX_VECTOR_MOVE(xvabsdp
, OP_ABS
, SGN_MASK_DP
)
953 VSX_VECTOR_MOVE(xvnabsdp
, OP_NABS
, SGN_MASK_DP
)
954 VSX_VECTOR_MOVE(xvnegdp
, OP_NEG
, SGN_MASK_DP
)
955 VSX_VECTOR_MOVE(xvcpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
956 VSX_VECTOR_MOVE(xvabssp
, OP_ABS
, SGN_MASK_SP
)
957 VSX_VECTOR_MOVE(xvnabssp
, OP_NABS
, SGN_MASK_SP
)
958 VSX_VECTOR_MOVE(xvnegsp
, OP_NEG
, SGN_MASK_SP
)
959 VSX_VECTOR_MOVE(xvcpsgnsp
, OP_CPSGN
, SGN_MASK_SP
)
961 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
962 static void gen_##name(DisasContext *ctx) \
965 if (unlikely(!ctx->vsx_enabled)) { \
966 gen_exception(ctx, POWERPC_EXCP_VSXU); \
969 opc = tcg_const_i32(ctx->opcode); \
970 gen_helper_##name(cpu_env, opc); \
971 tcg_temp_free_i32(opc); \
974 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
975 static void gen_##name(DisasContext *ctx) \
979 if (unlikely(!ctx->vsx_enabled)) { \
980 gen_exception(ctx, POWERPC_EXCP_VSXU); \
983 t0 = tcg_temp_new_i64(); \
984 t1 = tcg_temp_new_i64(); \
985 get_cpu_vsrh(t0, xB(ctx->opcode)); \
986 gen_helper_##name(t1, cpu_env, t0); \
987 set_cpu_vsrh(xT(ctx->opcode), t1); \
988 tcg_temp_free_i64(t0); \
989 tcg_temp_free_i64(t1); \
992 GEN_VSX_HELPER_2(xsadddp
, 0x00, 0x04, 0, PPC2_VSX
)
993 GEN_VSX_HELPER_2(xsaddqp
, 0x04, 0x00, 0, PPC2_ISA300
)
994 GEN_VSX_HELPER_2(xssubdp
, 0x00, 0x05, 0, PPC2_VSX
)
995 GEN_VSX_HELPER_2(xsmuldp
, 0x00, 0x06, 0, PPC2_VSX
)
996 GEN_VSX_HELPER_2(xsmulqp
, 0x04, 0x01, 0, PPC2_ISA300
)
997 GEN_VSX_HELPER_2(xsdivdp
, 0x00, 0x07, 0, PPC2_VSX
)
998 GEN_VSX_HELPER_2(xsdivqp
, 0x04, 0x11, 0, PPC2_ISA300
)
999 GEN_VSX_HELPER_2(xsredp
, 0x14, 0x05, 0, PPC2_VSX
)
1000 GEN_VSX_HELPER_2(xssqrtdp
, 0x16, 0x04, 0, PPC2_VSX
)
1001 GEN_VSX_HELPER_2(xsrsqrtedp
, 0x14, 0x04, 0, PPC2_VSX
)
1002 GEN_VSX_HELPER_2(xstdivdp
, 0x14, 0x07, 0, PPC2_VSX
)
1003 GEN_VSX_HELPER_2(xstsqrtdp
, 0x14, 0x06, 0, PPC2_VSX
)
1004 GEN_VSX_HELPER_2(xsmaddadp
, 0x04, 0x04, 0, PPC2_VSX
)
1005 GEN_VSX_HELPER_2(xsmaddmdp
, 0x04, 0x05, 0, PPC2_VSX
)
1006 GEN_VSX_HELPER_2(xsmsubadp
, 0x04, 0x06, 0, PPC2_VSX
)
1007 GEN_VSX_HELPER_2(xsmsubmdp
, 0x04, 0x07, 0, PPC2_VSX
)
1008 GEN_VSX_HELPER_2(xsnmaddadp
, 0x04, 0x14, 0, PPC2_VSX
)
1009 GEN_VSX_HELPER_2(xsnmaddmdp
, 0x04, 0x15, 0, PPC2_VSX
)
1010 GEN_VSX_HELPER_2(xsnmsubadp
, 0x04, 0x16, 0, PPC2_VSX
)
1011 GEN_VSX_HELPER_2(xsnmsubmdp
, 0x04, 0x17, 0, PPC2_VSX
)
1012 GEN_VSX_HELPER_2(xscmpeqdp
, 0x0C, 0x00, 0, PPC2_ISA300
)
1013 GEN_VSX_HELPER_2(xscmpgtdp
, 0x0C, 0x01, 0, PPC2_ISA300
)
1014 GEN_VSX_HELPER_2(xscmpgedp
, 0x0C, 0x02, 0, PPC2_ISA300
)
1015 GEN_VSX_HELPER_2(xscmpnedp
, 0x0C, 0x03, 0, PPC2_ISA300
)
1016 GEN_VSX_HELPER_2(xscmpexpdp
, 0x0C, 0x07, 0, PPC2_ISA300
)
1017 GEN_VSX_HELPER_2(xscmpexpqp
, 0x04, 0x05, 0, PPC2_ISA300
)
1018 GEN_VSX_HELPER_2(xscmpodp
, 0x0C, 0x05, 0, PPC2_VSX
)
1019 GEN_VSX_HELPER_2(xscmpudp
, 0x0C, 0x04, 0, PPC2_VSX
)
1020 GEN_VSX_HELPER_2(xscmpoqp
, 0x04, 0x04, 0, PPC2_VSX
)
1021 GEN_VSX_HELPER_2(xscmpuqp
, 0x04, 0x14, 0, PPC2_VSX
)
1022 GEN_VSX_HELPER_2(xsmaxdp
, 0x00, 0x14, 0, PPC2_VSX
)
1023 GEN_VSX_HELPER_2(xsmindp
, 0x00, 0x15, 0, PPC2_VSX
)
1024 GEN_VSX_HELPER_2(xsmaxcdp
, 0x00, 0x10, 0, PPC2_ISA300
)
1025 GEN_VSX_HELPER_2(xsmincdp
, 0x00, 0x11, 0, PPC2_ISA300
)
1026 GEN_VSX_HELPER_2(xsmaxjdp
, 0x00, 0x12, 0, PPC2_ISA300
)
1027 GEN_VSX_HELPER_2(xsminjdp
, 0x00, 0x12, 0, PPC2_ISA300
)
1028 GEN_VSX_HELPER_2(xscvdphp
, 0x16, 0x15, 0x11, PPC2_ISA300
)
1029 GEN_VSX_HELPER_2(xscvdpsp
, 0x12, 0x10, 0, PPC2_VSX
)
1030 GEN_VSX_HELPER_2(xscvdpqp
, 0x04, 0x1A, 0x16, PPC2_ISA300
)
1031 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn
, 0x16, 0x10, 0, PPC2_VSX207
)
1032 GEN_VSX_HELPER_2(xscvqpdp
, 0x04, 0x1A, 0x14, PPC2_ISA300
)
1033 GEN_VSX_HELPER_2(xscvqpsdz
, 0x04, 0x1A, 0x19, PPC2_ISA300
)
1034 GEN_VSX_HELPER_2(xscvqpswz
, 0x04, 0x1A, 0x09, PPC2_ISA300
)
1035 GEN_VSX_HELPER_2(xscvqpudz
, 0x04, 0x1A, 0x11, PPC2_ISA300
)
1036 GEN_VSX_HELPER_2(xscvqpuwz
, 0x04, 0x1A, 0x01, PPC2_ISA300
)
1037 GEN_VSX_HELPER_2(xscvhpdp
, 0x16, 0x15, 0x10, PPC2_ISA300
)
1038 GEN_VSX_HELPER_2(xscvsdqp
, 0x04, 0x1A, 0x0A, PPC2_ISA300
)
1039 GEN_VSX_HELPER_2(xscvspdp
, 0x12, 0x14, 0, PPC2_VSX
)
1040 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn
, 0x16, 0x14, 0, PPC2_VSX207
)
1041 GEN_VSX_HELPER_2(xscvdpsxds
, 0x10, 0x15, 0, PPC2_VSX
)
1042 GEN_VSX_HELPER_2(xscvdpsxws
, 0x10, 0x05, 0, PPC2_VSX
)
1043 GEN_VSX_HELPER_2(xscvdpuxds
, 0x10, 0x14, 0, PPC2_VSX
)
1044 GEN_VSX_HELPER_2(xscvdpuxws
, 0x10, 0x04, 0, PPC2_VSX
)
1045 GEN_VSX_HELPER_2(xscvsxddp
, 0x10, 0x17, 0, PPC2_VSX
)
1046 GEN_VSX_HELPER_2(xscvudqp
, 0x04, 0x1A, 0x02, PPC2_ISA300
)
1047 GEN_VSX_HELPER_2(xscvuxddp
, 0x10, 0x16, 0, PPC2_VSX
)
1048 GEN_VSX_HELPER_2(xsrdpi
, 0x12, 0x04, 0, PPC2_VSX
)
1049 GEN_VSX_HELPER_2(xsrdpic
, 0x16, 0x06, 0, PPC2_VSX
)
1050 GEN_VSX_HELPER_2(xsrdpim
, 0x12, 0x07, 0, PPC2_VSX
)
1051 GEN_VSX_HELPER_2(xsrdpip
, 0x12, 0x06, 0, PPC2_VSX
)
1052 GEN_VSX_HELPER_2(xsrdpiz
, 0x12, 0x05, 0, PPC2_VSX
)
1053 GEN_VSX_HELPER_XT_XB_ENV(xsrsp
, 0x12, 0x11, 0, PPC2_VSX207
)
1055 GEN_VSX_HELPER_2(xsrqpi
, 0x05, 0x00, 0, PPC2_ISA300
)
1056 GEN_VSX_HELPER_2(xsrqpxp
, 0x05, 0x01, 0, PPC2_ISA300
)
1057 GEN_VSX_HELPER_2(xssqrtqp
, 0x04, 0x19, 0x1B, PPC2_ISA300
)
1058 GEN_VSX_HELPER_2(xssubqp
, 0x04, 0x10, 0, PPC2_ISA300
)
1060 GEN_VSX_HELPER_2(xsaddsp
, 0x00, 0x00, 0, PPC2_VSX207
)
1061 GEN_VSX_HELPER_2(xssubsp
, 0x00, 0x01, 0, PPC2_VSX207
)
1062 GEN_VSX_HELPER_2(xsmulsp
, 0x00, 0x02, 0, PPC2_VSX207
)
1063 GEN_VSX_HELPER_2(xsdivsp
, 0x00, 0x03, 0, PPC2_VSX207
)
1064 GEN_VSX_HELPER_2(xsresp
, 0x14, 0x01, 0, PPC2_VSX207
)
1065 GEN_VSX_HELPER_2(xssqrtsp
, 0x16, 0x00, 0, PPC2_VSX207
)
1066 GEN_VSX_HELPER_2(xsrsqrtesp
, 0x14, 0x00, 0, PPC2_VSX207
)
1067 GEN_VSX_HELPER_2(xsmaddasp
, 0x04, 0x00, 0, PPC2_VSX207
)
1068 GEN_VSX_HELPER_2(xsmaddmsp
, 0x04, 0x01, 0, PPC2_VSX207
)
1069 GEN_VSX_HELPER_2(xsmsubasp
, 0x04, 0x02, 0, PPC2_VSX207
)
1070 GEN_VSX_HELPER_2(xsmsubmsp
, 0x04, 0x03, 0, PPC2_VSX207
)
1071 GEN_VSX_HELPER_2(xsnmaddasp
, 0x04, 0x10, 0, PPC2_VSX207
)
1072 GEN_VSX_HELPER_2(xsnmaddmsp
, 0x04, 0x11, 0, PPC2_VSX207
)
1073 GEN_VSX_HELPER_2(xsnmsubasp
, 0x04, 0x12, 0, PPC2_VSX207
)
1074 GEN_VSX_HELPER_2(xsnmsubmsp
, 0x04, 0x13, 0, PPC2_VSX207
)
1075 GEN_VSX_HELPER_2(xscvsxdsp
, 0x10, 0x13, 0, PPC2_VSX207
)
1076 GEN_VSX_HELPER_2(xscvuxdsp
, 0x10, 0x12, 0, PPC2_VSX207
)
1077 GEN_VSX_HELPER_2(xststdcsp
, 0x14, 0x12, 0, PPC2_ISA300
)
1078 GEN_VSX_HELPER_2(xststdcdp
, 0x14, 0x16, 0, PPC2_ISA300
)
1079 GEN_VSX_HELPER_2(xststdcqp
, 0x04, 0x16, 0, PPC2_ISA300
)
1081 GEN_VSX_HELPER_2(xvadddp
, 0x00, 0x0C, 0, PPC2_VSX
)
1082 GEN_VSX_HELPER_2(xvsubdp
, 0x00, 0x0D, 0, PPC2_VSX
)
1083 GEN_VSX_HELPER_2(xvmuldp
, 0x00, 0x0E, 0, PPC2_VSX
)
1084 GEN_VSX_HELPER_2(xvdivdp
, 0x00, 0x0F, 0, PPC2_VSX
)
1085 GEN_VSX_HELPER_2(xvredp
, 0x14, 0x0D, 0, PPC2_VSX
)
1086 GEN_VSX_HELPER_2(xvsqrtdp
, 0x16, 0x0C, 0, PPC2_VSX
)
1087 GEN_VSX_HELPER_2(xvrsqrtedp
, 0x14, 0x0C, 0, PPC2_VSX
)
1088 GEN_VSX_HELPER_2(xvtdivdp
, 0x14, 0x0F, 0, PPC2_VSX
)
1089 GEN_VSX_HELPER_2(xvtsqrtdp
, 0x14, 0x0E, 0, PPC2_VSX
)
1090 GEN_VSX_HELPER_2(xvmaddadp
, 0x04, 0x0C, 0, PPC2_VSX
)
1091 GEN_VSX_HELPER_2(xvmaddmdp
, 0x04, 0x0D, 0, PPC2_VSX
)
1092 GEN_VSX_HELPER_2(xvmsubadp
, 0x04, 0x0E, 0, PPC2_VSX
)
1093 GEN_VSX_HELPER_2(xvmsubmdp
, 0x04, 0x0F, 0, PPC2_VSX
)
1094 GEN_VSX_HELPER_2(xvnmaddadp
, 0x04, 0x1C, 0, PPC2_VSX
)
1095 GEN_VSX_HELPER_2(xvnmaddmdp
, 0x04, 0x1D, 0, PPC2_VSX
)
1096 GEN_VSX_HELPER_2(xvnmsubadp
, 0x04, 0x1E, 0, PPC2_VSX
)
1097 GEN_VSX_HELPER_2(xvnmsubmdp
, 0x04, 0x1F, 0, PPC2_VSX
)
1098 GEN_VSX_HELPER_2(xvmaxdp
, 0x00, 0x1C, 0, PPC2_VSX
)
1099 GEN_VSX_HELPER_2(xvmindp
, 0x00, 0x1D, 0, PPC2_VSX
)
1100 GEN_VSX_HELPER_2(xvcmpeqdp
, 0x0C, 0x0C, 0, PPC2_VSX
)
1101 GEN_VSX_HELPER_2(xvcmpgtdp
, 0x0C, 0x0D, 0, PPC2_VSX
)
1102 GEN_VSX_HELPER_2(xvcmpgedp
, 0x0C, 0x0E, 0, PPC2_VSX
)
1103 GEN_VSX_HELPER_2(xvcmpnedp
, 0x0C, 0x0F, 0, PPC2_ISA300
)
1104 GEN_VSX_HELPER_2(xvcvdpsp
, 0x12, 0x18, 0, PPC2_VSX
)
1105 GEN_VSX_HELPER_2(xvcvdpsxds
, 0x10, 0x1D, 0, PPC2_VSX
)
1106 GEN_VSX_HELPER_2(xvcvdpsxws
, 0x10, 0x0D, 0, PPC2_VSX
)
1107 GEN_VSX_HELPER_2(xvcvdpuxds
, 0x10, 0x1C, 0, PPC2_VSX
)
1108 GEN_VSX_HELPER_2(xvcvdpuxws
, 0x10, 0x0C, 0, PPC2_VSX
)
1109 GEN_VSX_HELPER_2(xvcvsxddp
, 0x10, 0x1F, 0, PPC2_VSX
)
1110 GEN_VSX_HELPER_2(xvcvuxddp
, 0x10, 0x1E, 0, PPC2_VSX
)
1111 GEN_VSX_HELPER_2(xvcvsxwdp
, 0x10, 0x0F, 0, PPC2_VSX
)
1112 GEN_VSX_HELPER_2(xvcvuxwdp
, 0x10, 0x0E, 0, PPC2_VSX
)
1113 GEN_VSX_HELPER_2(xvrdpi
, 0x12, 0x0C, 0, PPC2_VSX
)
1114 GEN_VSX_HELPER_2(xvrdpic
, 0x16, 0x0E, 0, PPC2_VSX
)
1115 GEN_VSX_HELPER_2(xvrdpim
, 0x12, 0x0F, 0, PPC2_VSX
)
1116 GEN_VSX_HELPER_2(xvrdpip
, 0x12, 0x0E, 0, PPC2_VSX
)
1117 GEN_VSX_HELPER_2(xvrdpiz
, 0x12, 0x0D, 0, PPC2_VSX
)
1119 GEN_VSX_HELPER_2(xvaddsp
, 0x00, 0x08, 0, PPC2_VSX
)
1120 GEN_VSX_HELPER_2(xvsubsp
, 0x00, 0x09, 0, PPC2_VSX
)
1121 GEN_VSX_HELPER_2(xvmulsp
, 0x00, 0x0A, 0, PPC2_VSX
)
1122 GEN_VSX_HELPER_2(xvdivsp
, 0x00, 0x0B, 0, PPC2_VSX
)
1123 GEN_VSX_HELPER_2(xvresp
, 0x14, 0x09, 0, PPC2_VSX
)
1124 GEN_VSX_HELPER_2(xvsqrtsp
, 0x16, 0x08, 0, PPC2_VSX
)
1125 GEN_VSX_HELPER_2(xvrsqrtesp
, 0x14, 0x08, 0, PPC2_VSX
)
1126 GEN_VSX_HELPER_2(xvtdivsp
, 0x14, 0x0B, 0, PPC2_VSX
)
1127 GEN_VSX_HELPER_2(xvtsqrtsp
, 0x14, 0x0A, 0, PPC2_VSX
)
1128 GEN_VSX_HELPER_2(xvmaddasp
, 0x04, 0x08, 0, PPC2_VSX
)
1129 GEN_VSX_HELPER_2(xvmaddmsp
, 0x04, 0x09, 0, PPC2_VSX
)
1130 GEN_VSX_HELPER_2(xvmsubasp
, 0x04, 0x0A, 0, PPC2_VSX
)
1131 GEN_VSX_HELPER_2(xvmsubmsp
, 0x04, 0x0B, 0, PPC2_VSX
)
1132 GEN_VSX_HELPER_2(xvnmaddasp
, 0x04, 0x18, 0, PPC2_VSX
)
1133 GEN_VSX_HELPER_2(xvnmaddmsp
, 0x04, 0x19, 0, PPC2_VSX
)
1134 GEN_VSX_HELPER_2(xvnmsubasp
, 0x04, 0x1A, 0, PPC2_VSX
)
1135 GEN_VSX_HELPER_2(xvnmsubmsp
, 0x04, 0x1B, 0, PPC2_VSX
)
1136 GEN_VSX_HELPER_2(xvmaxsp
, 0x00, 0x18, 0, PPC2_VSX
)
1137 GEN_VSX_HELPER_2(xvminsp
, 0x00, 0x19, 0, PPC2_VSX
)
1138 GEN_VSX_HELPER_2(xvcmpeqsp
, 0x0C, 0x08, 0, PPC2_VSX
)
1139 GEN_VSX_HELPER_2(xvcmpgtsp
, 0x0C, 0x09, 0, PPC2_VSX
)
1140 GEN_VSX_HELPER_2(xvcmpgesp
, 0x0C, 0x0A, 0, PPC2_VSX
)
1141 GEN_VSX_HELPER_2(xvcmpnesp
, 0x0C, 0x0B, 0, PPC2_VSX
)
1142 GEN_VSX_HELPER_2(xvcvspdp
, 0x12, 0x1C, 0, PPC2_VSX
)
1143 GEN_VSX_HELPER_2(xvcvhpsp
, 0x16, 0x1D, 0x18, PPC2_ISA300
)
1144 GEN_VSX_HELPER_2(xvcvsphp
, 0x16, 0x1D, 0x19, PPC2_ISA300
)
1145 GEN_VSX_HELPER_2(xvcvspsxds
, 0x10, 0x19, 0, PPC2_VSX
)
1146 GEN_VSX_HELPER_2(xvcvspsxws
, 0x10, 0x09, 0, PPC2_VSX
)
1147 GEN_VSX_HELPER_2(xvcvspuxds
, 0x10, 0x18, 0, PPC2_VSX
)
1148 GEN_VSX_HELPER_2(xvcvspuxws
, 0x10, 0x08, 0, PPC2_VSX
)
1149 GEN_VSX_HELPER_2(xvcvsxdsp
, 0x10, 0x1B, 0, PPC2_VSX
)
1150 GEN_VSX_HELPER_2(xvcvuxdsp
, 0x10, 0x1A, 0, PPC2_VSX
)
1151 GEN_VSX_HELPER_2(xvcvsxwsp
, 0x10, 0x0B, 0, PPC2_VSX
)
1152 GEN_VSX_HELPER_2(xvcvuxwsp
, 0x10, 0x0A, 0, PPC2_VSX
)
1153 GEN_VSX_HELPER_2(xvrspi
, 0x12, 0x08, 0, PPC2_VSX
)
1154 GEN_VSX_HELPER_2(xvrspic
, 0x16, 0x0A, 0, PPC2_VSX
)
1155 GEN_VSX_HELPER_2(xvrspim
, 0x12, 0x0B, 0, PPC2_VSX
)
1156 GEN_VSX_HELPER_2(xvrspip
, 0x12, 0x0A, 0, PPC2_VSX
)
1157 GEN_VSX_HELPER_2(xvrspiz
, 0x12, 0x09, 0, PPC2_VSX
)
1158 GEN_VSX_HELPER_2(xvtstdcsp
, 0x14, 0x1A, 0, PPC2_VSX
)
1159 GEN_VSX_HELPER_2(xvtstdcdp
, 0x14, 0x1E, 0, PPC2_VSX
)
1160 GEN_VSX_HELPER_2(xxperm
, 0x08, 0x03, 0, PPC2_ISA300
)
1161 GEN_VSX_HELPER_2(xxpermr
, 0x08, 0x07, 0, PPC2_ISA300
)
1163 static void gen_xxbrd(DisasContext
*ctx
)
1170 if (unlikely(!ctx
->vsx_enabled
)) {
1171 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1174 xth
= tcg_temp_new_i64();
1175 xtl
= tcg_temp_new_i64();
1176 xbh
= tcg_temp_new_i64();
1177 xbl
= tcg_temp_new_i64();
1178 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1179 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1181 tcg_gen_bswap64_i64(xth
, xbh
);
1182 tcg_gen_bswap64_i64(xtl
, xbl
);
1183 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1184 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1186 tcg_temp_free_i64(xth
);
1187 tcg_temp_free_i64(xtl
);
1188 tcg_temp_free_i64(xbh
);
1189 tcg_temp_free_i64(xbl
);
1192 static void gen_xxbrh(DisasContext
*ctx
)
1199 if (unlikely(!ctx
->vsx_enabled
)) {
1200 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1203 xth
= tcg_temp_new_i64();
1204 xtl
= tcg_temp_new_i64();
1205 xbh
= tcg_temp_new_i64();
1206 xbl
= tcg_temp_new_i64();
1207 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1208 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1210 gen_bswap16x8(xth
, xtl
, xbh
, xbl
);
1211 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1212 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1214 tcg_temp_free_i64(xth
);
1215 tcg_temp_free_i64(xtl
);
1216 tcg_temp_free_i64(xbh
);
1217 tcg_temp_free_i64(xbl
);
1220 static void gen_xxbrq(DisasContext
*ctx
)
1228 if (unlikely(!ctx
->vsx_enabled
)) {
1229 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1232 xth
= tcg_temp_new_i64();
1233 xtl
= tcg_temp_new_i64();
1234 xbh
= tcg_temp_new_i64();
1235 xbl
= tcg_temp_new_i64();
1236 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1237 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1238 t0
= tcg_temp_new_i64();
1240 tcg_gen_bswap64_i64(t0
, xbl
);
1241 tcg_gen_bswap64_i64(xtl
, xbh
);
1242 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1243 tcg_gen_mov_i64(xth
, t0
);
1244 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1246 tcg_temp_free_i64(t0
);
1247 tcg_temp_free_i64(xth
);
1248 tcg_temp_free_i64(xtl
);
1249 tcg_temp_free_i64(xbh
);
1250 tcg_temp_free_i64(xbl
);
1253 static void gen_xxbrw(DisasContext
*ctx
)
1260 if (unlikely(!ctx
->vsx_enabled
)) {
1261 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1264 xth
= tcg_temp_new_i64();
1265 xtl
= tcg_temp_new_i64();
1266 xbh
= tcg_temp_new_i64();
1267 xbl
= tcg_temp_new_i64();
1268 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1269 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1271 gen_bswap32x4(xth
, xtl
, xbh
, xbl
);
1272 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1273 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1275 tcg_temp_free_i64(xth
);
1276 tcg_temp_free_i64(xtl
);
1277 tcg_temp_free_i64(xbh
);
1278 tcg_temp_free_i64(xbl
);
1281 #define VSX_LOGICAL(name, vece, tcg_op) \
1282 static void glue(gen_, name)(DisasContext *ctx) \
1284 if (unlikely(!ctx->vsx_enabled)) { \
1285 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1288 tcg_op(vece, vsr_full_offset(xT(ctx->opcode)), \
1289 vsr_full_offset(xA(ctx->opcode)), \
1290 vsr_full_offset(xB(ctx->opcode)), 16, 16); \
1293 VSX_LOGICAL(xxland
, MO_64
, tcg_gen_gvec_and
)
1294 VSX_LOGICAL(xxlandc
, MO_64
, tcg_gen_gvec_andc
)
1295 VSX_LOGICAL(xxlor
, MO_64
, tcg_gen_gvec_or
)
1296 VSX_LOGICAL(xxlxor
, MO_64
, tcg_gen_gvec_xor
)
1297 VSX_LOGICAL(xxlnor
, MO_64
, tcg_gen_gvec_nor
)
1298 VSX_LOGICAL(xxleqv
, MO_64
, tcg_gen_gvec_eqv
)
1299 VSX_LOGICAL(xxlnand
, MO_64
, tcg_gen_gvec_nand
)
1300 VSX_LOGICAL(xxlorc
, MO_64
, tcg_gen_gvec_orc
)
1302 #define VSX_XXMRG(name, high) \
1303 static void glue(gen_, name)(DisasContext *ctx) \
1305 TCGv_i64 a0, a1, b0, b1, tmp; \
1306 if (unlikely(!ctx->vsx_enabled)) { \
1307 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1310 a0 = tcg_temp_new_i64(); \
1311 a1 = tcg_temp_new_i64(); \
1312 b0 = tcg_temp_new_i64(); \
1313 b1 = tcg_temp_new_i64(); \
1314 tmp = tcg_temp_new_i64(); \
1316 get_cpu_vsrh(a0, xA(ctx->opcode)); \
1317 get_cpu_vsrh(a1, xA(ctx->opcode)); \
1318 get_cpu_vsrh(b0, xB(ctx->opcode)); \
1319 get_cpu_vsrh(b1, xB(ctx->opcode)); \
1321 get_cpu_vsrl(a0, xA(ctx->opcode)); \
1322 get_cpu_vsrl(a1, xA(ctx->opcode)); \
1323 get_cpu_vsrl(b0, xB(ctx->opcode)); \
1324 get_cpu_vsrl(b1, xB(ctx->opcode)); \
1326 tcg_gen_shri_i64(a0, a0, 32); \
1327 tcg_gen_shri_i64(b0, b0, 32); \
1328 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \
1329 set_cpu_vsrh(xT(ctx->opcode), tmp); \
1330 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \
1331 set_cpu_vsrl(xT(ctx->opcode), tmp); \
1332 tcg_temp_free_i64(a0); \
1333 tcg_temp_free_i64(a1); \
1334 tcg_temp_free_i64(b0); \
1335 tcg_temp_free_i64(b1); \
1336 tcg_temp_free_i64(tmp); \
1339 VSX_XXMRG(xxmrghw
, 1)
1340 VSX_XXMRG(xxmrglw
, 0)
1342 static void gen_xxsel(DisasContext
*ctx
)
1344 int rt
= xT(ctx
->opcode
);
1345 int ra
= xA(ctx
->opcode
);
1346 int rb
= xB(ctx
->opcode
);
1347 int rc
= xC(ctx
->opcode
);
1349 if (unlikely(!ctx
->vsx_enabled
)) {
1350 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1353 tcg_gen_gvec_bitsel(MO_64
, vsr_full_offset(rt
), vsr_full_offset(rc
),
1354 vsr_full_offset(rb
), vsr_full_offset(ra
), 16, 16);
1357 static void gen_xxspltw(DisasContext
*ctx
)
1359 int rt
= xT(ctx
->opcode
);
1360 int rb
= xB(ctx
->opcode
);
1361 int uim
= UIM(ctx
->opcode
);
1364 if (unlikely(!ctx
->vsx_enabled
)) {
1365 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1369 tofs
= vsr_full_offset(rt
);
1370 bofs
= vsr_full_offset(rb
);
1371 bofs
+= uim
<< MO_32
;
1372 #ifndef HOST_WORDS_BIG_ENDIAN
1376 tcg_gen_gvec_dup_mem(MO_32
, tofs
, bofs
, 16, 16);
1379 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1381 static void gen_xxspltib(DisasContext
*ctx
)
1383 uint8_t uim8
= IMM8(ctx
->opcode
);
1384 int rt
= xT(ctx
->opcode
);
1387 if (unlikely(!ctx
->vsx_enabled
)) {
1388 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1392 if (unlikely(!ctx
->altivec_enabled
)) {
1393 gen_exception(ctx
, POWERPC_EXCP_VPU
);
1397 tcg_gen_gvec_dup8i(vsr_full_offset(rt
), 16, 16, uim8
);
1400 static void gen_xxsldwi(DisasContext
*ctx
)
1403 if (unlikely(!ctx
->vsx_enabled
)) {
1404 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1407 xth
= tcg_temp_new_i64();
1408 xtl
= tcg_temp_new_i64();
1410 switch (SHW(ctx
->opcode
)) {
1412 get_cpu_vsrh(xth
, xA(ctx
->opcode
));
1413 get_cpu_vsrl(xtl
, xA(ctx
->opcode
));
1417 TCGv_i64 t0
= tcg_temp_new_i64();
1418 get_cpu_vsrh(xth
, xA(ctx
->opcode
));
1419 tcg_gen_shli_i64(xth
, xth
, 32);
1420 get_cpu_vsrl(t0
, xA(ctx
->opcode
));
1421 tcg_gen_shri_i64(t0
, t0
, 32);
1422 tcg_gen_or_i64(xth
, xth
, t0
);
1423 get_cpu_vsrl(xtl
, xA(ctx
->opcode
));
1424 tcg_gen_shli_i64(xtl
, xtl
, 32);
1425 get_cpu_vsrh(t0
, xB(ctx
->opcode
));
1426 tcg_gen_shri_i64(t0
, t0
, 32);
1427 tcg_gen_or_i64(xtl
, xtl
, t0
);
1428 tcg_temp_free_i64(t0
);
1432 get_cpu_vsrl(xth
, xA(ctx
->opcode
));
1433 get_cpu_vsrh(xtl
, xB(ctx
->opcode
));
1437 TCGv_i64 t0
= tcg_temp_new_i64();
1438 get_cpu_vsrl(xth
, xA(ctx
->opcode
));
1439 tcg_gen_shli_i64(xth
, xth
, 32);
1440 get_cpu_vsrh(t0
, xB(ctx
->opcode
));
1441 tcg_gen_shri_i64(t0
, t0
, 32);
1442 tcg_gen_or_i64(xth
, xth
, t0
);
1443 get_cpu_vsrh(xtl
, xB(ctx
->opcode
));
1444 tcg_gen_shli_i64(xtl
, xtl
, 32);
1445 get_cpu_vsrl(t0
, xB(ctx
->opcode
));
1446 tcg_gen_shri_i64(t0
, t0
, 32);
1447 tcg_gen_or_i64(xtl
, xtl
, t0
);
1448 tcg_temp_free_i64(t0
);
1453 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1454 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1456 tcg_temp_free_i64(xth
);
1457 tcg_temp_free_i64(xtl
);
1460 #define VSX_EXTRACT_INSERT(name) \
1461 static void gen_##name(DisasContext *ctx) \
1466 uint8_t uimm = UIMM4(ctx->opcode); \
1468 if (unlikely(!ctx->vsx_enabled)) { \
1469 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1472 xt = tcg_const_tl(xT(ctx->opcode)); \
1473 xb = tcg_const_tl(xB(ctx->opcode)); \
1474 t0 = tcg_temp_new_i32(); \
1475 t1 = tcg_temp_new_i64(); \
1477 * uimm > 15 out of bound and for \
1478 * uimm > 12 handle as per hardware in helper \
1481 tcg_gen_movi_i64(t1, 0); \
1482 set_cpu_vsrh(xT(ctx->opcode), t1); \
1483 set_cpu_vsrl(xT(ctx->opcode), t1); \
1486 tcg_gen_movi_i32(t0, uimm); \
1487 gen_helper_##name(cpu_env, xt, xb, t0); \
1488 tcg_temp_free(xb); \
1489 tcg_temp_free(xt); \
1490 tcg_temp_free_i32(t0); \
1491 tcg_temp_free_i64(t1); \
1494 VSX_EXTRACT_INSERT(xxextractuw
)
1495 VSX_EXTRACT_INSERT(xxinsertw
)
1498 static void gen_xsxexpdp(DisasContext
*ctx
)
1500 TCGv rt
= cpu_gpr
[rD(ctx
->opcode
)];
1502 if (unlikely(!ctx
->vsx_enabled
)) {
1503 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1506 t0
= tcg_temp_new_i64();
1507 get_cpu_vsrh(t0
, xB(ctx
->opcode
));
1508 tcg_gen_extract_i64(rt
, t0
, 52, 11);
1509 tcg_temp_free_i64(t0
);
1512 static void gen_xsxexpqp(DisasContext
*ctx
)
1518 if (unlikely(!ctx
->vsx_enabled
)) {
1519 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1522 xth
= tcg_temp_new_i64();
1523 xtl
= tcg_temp_new_i64();
1524 xbh
= tcg_temp_new_i64();
1525 get_cpu_vsrh(xbh
, rB(ctx
->opcode
) + 32);
1527 tcg_gen_extract_i64(xth
, xbh
, 48, 15);
1528 set_cpu_vsrh(rD(ctx
->opcode
) + 32, xth
);
1529 tcg_gen_movi_i64(xtl
, 0);
1530 set_cpu_vsrl(rD(ctx
->opcode
) + 32, xtl
);
1532 tcg_temp_free_i64(xbh
);
1533 tcg_temp_free_i64(xth
);
1534 tcg_temp_free_i64(xtl
);
1537 static void gen_xsiexpdp(DisasContext
*ctx
)
1540 TCGv ra
= cpu_gpr
[rA(ctx
->opcode
)];
1541 TCGv rb
= cpu_gpr
[rB(ctx
->opcode
)];
1544 if (unlikely(!ctx
->vsx_enabled
)) {
1545 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1548 t0
= tcg_temp_new_i64();
1549 xth
= tcg_temp_new_i64();
1550 tcg_gen_andi_i64(xth
, ra
, 0x800FFFFFFFFFFFFF);
1551 tcg_gen_andi_i64(t0
, rb
, 0x7FF);
1552 tcg_gen_shli_i64(t0
, t0
, 52);
1553 tcg_gen_or_i64(xth
, xth
, t0
);
1554 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1555 /* dword[1] is undefined */
1556 tcg_temp_free_i64(t0
);
1557 tcg_temp_free_i64(xth
);
1560 static void gen_xsiexpqp(DisasContext
*ctx
)
1569 if (unlikely(!ctx
->vsx_enabled
)) {
1570 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1573 xth
= tcg_temp_new_i64();
1574 xtl
= tcg_temp_new_i64();
1575 xah
= tcg_temp_new_i64();
1576 xal
= tcg_temp_new_i64();
1577 get_cpu_vsrh(xah
, rA(ctx
->opcode
) + 32);
1578 get_cpu_vsrl(xal
, rA(ctx
->opcode
) + 32);
1579 xbh
= tcg_temp_new_i64();
1580 get_cpu_vsrh(xbh
, rB(ctx
->opcode
) + 32);
1581 t0
= tcg_temp_new_i64();
1583 tcg_gen_andi_i64(xth
, xah
, 0x8000FFFFFFFFFFFF);
1584 tcg_gen_andi_i64(t0
, xbh
, 0x7FFF);
1585 tcg_gen_shli_i64(t0
, t0
, 48);
1586 tcg_gen_or_i64(xth
, xth
, t0
);
1587 set_cpu_vsrh(rD(ctx
->opcode
) + 32, xth
);
1588 tcg_gen_mov_i64(xtl
, xal
);
1589 set_cpu_vsrl(rD(ctx
->opcode
) + 32, xtl
);
1591 tcg_temp_free_i64(t0
);
1592 tcg_temp_free_i64(xth
);
1593 tcg_temp_free_i64(xtl
);
1594 tcg_temp_free_i64(xah
);
1595 tcg_temp_free_i64(xal
);
1596 tcg_temp_free_i64(xbh
);
1599 static void gen_xsxsigdp(DisasContext
*ctx
)
1601 TCGv rt
= cpu_gpr
[rD(ctx
->opcode
)];
1602 TCGv_i64 t0
, t1
, zr
, nan
, exp
;
1604 if (unlikely(!ctx
->vsx_enabled
)) {
1605 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1608 exp
= tcg_temp_new_i64();
1609 t0
= tcg_temp_new_i64();
1610 t1
= tcg_temp_new_i64();
1611 zr
= tcg_const_i64(0);
1612 nan
= tcg_const_i64(2047);
1614 get_cpu_vsrh(t1
, xB(ctx
->opcode
));
1615 tcg_gen_extract_i64(exp
, t1
, 52, 11);
1616 tcg_gen_movi_i64(t0
, 0x0010000000000000);
1617 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1618 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1619 get_cpu_vsrh(t1
, xB(ctx
->opcode
));
1620 tcg_gen_deposit_i64(rt
, t0
, t1
, 0, 52);
1622 tcg_temp_free_i64(t0
);
1623 tcg_temp_free_i64(t1
);
1624 tcg_temp_free_i64(exp
);
1625 tcg_temp_free_i64(zr
);
1626 tcg_temp_free_i64(nan
);
1629 static void gen_xsxsigqp(DisasContext
*ctx
)
1631 TCGv_i64 t0
, zr
, nan
, exp
;
1637 if (unlikely(!ctx
->vsx_enabled
)) {
1638 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1641 xth
= tcg_temp_new_i64();
1642 xtl
= tcg_temp_new_i64();
1643 xbh
= tcg_temp_new_i64();
1644 xbl
= tcg_temp_new_i64();
1645 get_cpu_vsrh(xbh
, rB(ctx
->opcode
) + 32);
1646 get_cpu_vsrl(xbl
, rB(ctx
->opcode
) + 32);
1647 exp
= tcg_temp_new_i64();
1648 t0
= tcg_temp_new_i64();
1649 zr
= tcg_const_i64(0);
1650 nan
= tcg_const_i64(32767);
1652 tcg_gen_extract_i64(exp
, xbh
, 48, 15);
1653 tcg_gen_movi_i64(t0
, 0x0001000000000000);
1654 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1655 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1656 tcg_gen_deposit_i64(xth
, t0
, xbh
, 0, 48);
1657 set_cpu_vsrh(rD(ctx
->opcode
) + 32, xth
);
1658 tcg_gen_mov_i64(xtl
, xbl
);
1659 set_cpu_vsrl(rD(ctx
->opcode
) + 32, xtl
);
1661 tcg_temp_free_i64(t0
);
1662 tcg_temp_free_i64(exp
);
1663 tcg_temp_free_i64(zr
);
1664 tcg_temp_free_i64(nan
);
1665 tcg_temp_free_i64(xth
);
1666 tcg_temp_free_i64(xtl
);
1667 tcg_temp_free_i64(xbh
);
1668 tcg_temp_free_i64(xbl
);
1672 static void gen_xviexpsp(DisasContext
*ctx
)
1682 if (unlikely(!ctx
->vsx_enabled
)) {
1683 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1686 xth
= tcg_temp_new_i64();
1687 xtl
= tcg_temp_new_i64();
1688 xah
= tcg_temp_new_i64();
1689 xal
= tcg_temp_new_i64();
1690 xbh
= tcg_temp_new_i64();
1691 xbl
= tcg_temp_new_i64();
1692 get_cpu_vsrh(xah
, xA(ctx
->opcode
));
1693 get_cpu_vsrl(xal
, xA(ctx
->opcode
));
1694 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1695 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1696 t0
= tcg_temp_new_i64();
1698 tcg_gen_andi_i64(xth
, xah
, 0x807FFFFF807FFFFF);
1699 tcg_gen_andi_i64(t0
, xbh
, 0xFF000000FF);
1700 tcg_gen_shli_i64(t0
, t0
, 23);
1701 tcg_gen_or_i64(xth
, xth
, t0
);
1702 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1703 tcg_gen_andi_i64(xtl
, xal
, 0x807FFFFF807FFFFF);
1704 tcg_gen_andi_i64(t0
, xbl
, 0xFF000000FF);
1705 tcg_gen_shli_i64(t0
, t0
, 23);
1706 tcg_gen_or_i64(xtl
, xtl
, t0
);
1707 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1709 tcg_temp_free_i64(t0
);
1710 tcg_temp_free_i64(xth
);
1711 tcg_temp_free_i64(xtl
);
1712 tcg_temp_free_i64(xah
);
1713 tcg_temp_free_i64(xal
);
1714 tcg_temp_free_i64(xbh
);
1715 tcg_temp_free_i64(xbl
);
1718 static void gen_xviexpdp(DisasContext
*ctx
)
1727 if (unlikely(!ctx
->vsx_enabled
)) {
1728 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1731 xth
= tcg_temp_new_i64();
1732 xtl
= tcg_temp_new_i64();
1733 xah
= tcg_temp_new_i64();
1734 xal
= tcg_temp_new_i64();
1735 xbh
= tcg_temp_new_i64();
1736 xbl
= tcg_temp_new_i64();
1737 get_cpu_vsrh(xah
, xA(ctx
->opcode
));
1738 get_cpu_vsrl(xal
, xA(ctx
->opcode
));
1739 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1740 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1742 tcg_gen_deposit_i64(xth
, xah
, xbh
, 52, 11);
1743 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1745 tcg_gen_deposit_i64(xtl
, xal
, xbl
, 52, 11);
1746 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1748 tcg_temp_free_i64(xth
);
1749 tcg_temp_free_i64(xtl
);
1750 tcg_temp_free_i64(xah
);
1751 tcg_temp_free_i64(xal
);
1752 tcg_temp_free_i64(xbh
);
1753 tcg_temp_free_i64(xbl
);
1756 static void gen_xvxexpsp(DisasContext
*ctx
)
1763 if (unlikely(!ctx
->vsx_enabled
)) {
1764 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1767 xth
= tcg_temp_new_i64();
1768 xtl
= tcg_temp_new_i64();
1769 xbh
= tcg_temp_new_i64();
1770 xbl
= tcg_temp_new_i64();
1771 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1772 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1774 tcg_gen_shri_i64(xth
, xbh
, 23);
1775 tcg_gen_andi_i64(xth
, xth
, 0xFF000000FF);
1776 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1777 tcg_gen_shri_i64(xtl
, xbl
, 23);
1778 tcg_gen_andi_i64(xtl
, xtl
, 0xFF000000FF);
1779 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1781 tcg_temp_free_i64(xth
);
1782 tcg_temp_free_i64(xtl
);
1783 tcg_temp_free_i64(xbh
);
1784 tcg_temp_free_i64(xbl
);
1787 static void gen_xvxexpdp(DisasContext
*ctx
)
1794 if (unlikely(!ctx
->vsx_enabled
)) {
1795 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1798 xth
= tcg_temp_new_i64();
1799 xtl
= tcg_temp_new_i64();
1800 xbh
= tcg_temp_new_i64();
1801 xbl
= tcg_temp_new_i64();
1802 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1803 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1805 tcg_gen_extract_i64(xth
, xbh
, 52, 11);
1806 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1807 tcg_gen_extract_i64(xtl
, xbl
, 52, 11);
1808 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1810 tcg_temp_free_i64(xth
);
1811 tcg_temp_free_i64(xtl
);
1812 tcg_temp_free_i64(xbh
);
1813 tcg_temp_free_i64(xbl
);
1816 GEN_VSX_HELPER_2(xvxsigsp
, 0x00, 0x04, 0, PPC2_ISA300
)
1818 static void gen_xvxsigdp(DisasContext
*ctx
)
1824 TCGv_i64 t0
, zr
, nan
, exp
;
1826 if (unlikely(!ctx
->vsx_enabled
)) {
1827 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1830 xth
= tcg_temp_new_i64();
1831 xtl
= tcg_temp_new_i64();
1832 xbh
= tcg_temp_new_i64();
1833 xbl
= tcg_temp_new_i64();
1834 get_cpu_vsrh(xbh
, xB(ctx
->opcode
));
1835 get_cpu_vsrl(xbl
, xB(ctx
->opcode
));
1836 exp
= tcg_temp_new_i64();
1837 t0
= tcg_temp_new_i64();
1838 zr
= tcg_const_i64(0);
1839 nan
= tcg_const_i64(2047);
1841 tcg_gen_extract_i64(exp
, xbh
, 52, 11);
1842 tcg_gen_movi_i64(t0
, 0x0010000000000000);
1843 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1844 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1845 tcg_gen_deposit_i64(xth
, t0
, xbh
, 0, 52);
1846 set_cpu_vsrh(xT(ctx
->opcode
), xth
);
1848 tcg_gen_extract_i64(exp
, xbl
, 52, 11);
1849 tcg_gen_movi_i64(t0
, 0x0010000000000000);
1850 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1851 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1852 tcg_gen_deposit_i64(xtl
, t0
, xbl
, 0, 52);
1853 set_cpu_vsrl(xT(ctx
->opcode
), xtl
);
1855 tcg_temp_free_i64(t0
);
1856 tcg_temp_free_i64(exp
);
1857 tcg_temp_free_i64(zr
);
1858 tcg_temp_free_i64(nan
);
1859 tcg_temp_free_i64(xth
);
1860 tcg_temp_free_i64(xtl
);
1861 tcg_temp_free_i64(xbh
);
1862 tcg_temp_free_i64(xbl
);
1868 #undef GEN_XX3_RC_FORM
1869 #undef GEN_XX3FORM_DM