]>
git.proxmox.com Git - mirror_qemu.git/blob - target/ppc/translate/vsx-impl.inc.c
1 /*** VSX extension ***/
3 static inline TCGv_i64
cpu_vsrh(int n
)
12 static inline TCGv_i64
cpu_vsrl(int n
)
17 return cpu_avrl
[n
-32];
21 #define VSX_LOAD_SCALAR(name, operation) \
22 static void gen_##name(DisasContext *ctx) \
25 if (unlikely(!ctx->vsx_enabled)) { \
26 gen_exception(ctx, POWERPC_EXCP_VSXU); \
29 gen_set_access_type(ctx, ACCESS_INT); \
30 EA = tcg_temp_new(); \
31 gen_addr_reg_index(ctx, EA); \
32 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33 /* NOTE: cpu_vsrl is undefined */ \
37 VSX_LOAD_SCALAR(lxsdx
, ld64_i64
)
38 VSX_LOAD_SCALAR(lxsiwax
, ld32s_i64
)
39 VSX_LOAD_SCALAR(lxsibzx
, ld8u_i64
)
40 VSX_LOAD_SCALAR(lxsihzx
, ld16u_i64
)
41 VSX_LOAD_SCALAR(lxsiwzx
, ld32u_i64
)
42 VSX_LOAD_SCALAR(lxsspx
, ld32fs
)
44 static void gen_lxvd2x(DisasContext
*ctx
)
47 if (unlikely(!ctx
->vsx_enabled
)) {
48 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
51 gen_set_access_type(ctx
, ACCESS_INT
);
53 gen_addr_reg_index(ctx
, EA
);
54 gen_qemu_ld64_i64(ctx
, cpu_vsrh(xT(ctx
->opcode
)), EA
);
55 tcg_gen_addi_tl(EA
, EA
, 8);
56 gen_qemu_ld64_i64(ctx
, cpu_vsrl(xT(ctx
->opcode
)), EA
);
60 static void gen_lxvdsx(DisasContext
*ctx
)
63 if (unlikely(!ctx
->vsx_enabled
)) {
64 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
67 gen_set_access_type(ctx
, ACCESS_INT
);
69 gen_addr_reg_index(ctx
, EA
);
70 gen_qemu_ld64_i64(ctx
, cpu_vsrh(xT(ctx
->opcode
)), EA
);
71 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xT(ctx
->opcode
)));
75 static void gen_lxvw4x(DisasContext
*ctx
)
78 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
79 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
80 if (unlikely(!ctx
->vsx_enabled
)) {
81 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
84 gen_set_access_type(ctx
, ACCESS_INT
);
87 gen_addr_reg_index(ctx
, EA
);
89 TCGv_i64 t0
= tcg_temp_new_i64();
90 TCGv_i64 t1
= tcg_temp_new_i64();
92 tcg_gen_qemu_ld_i64(t0
, EA
, ctx
->mem_idx
, MO_LEQ
);
93 tcg_gen_shri_i64(t1
, t0
, 32);
94 tcg_gen_deposit_i64(xth
, t1
, t0
, 32, 32);
95 tcg_gen_addi_tl(EA
, EA
, 8);
96 tcg_gen_qemu_ld_i64(t0
, EA
, ctx
->mem_idx
, MO_LEQ
);
97 tcg_gen_shri_i64(t1
, t0
, 32);
98 tcg_gen_deposit_i64(xtl
, t1
, t0
, 32, 32);
99 tcg_temp_free_i64(t0
);
100 tcg_temp_free_i64(t1
);
102 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
103 tcg_gen_addi_tl(EA
, EA
, 8);
104 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
109 static void gen_bswap16x8(TCGv_i64 outh
, TCGv_i64 outl
,
110 TCGv_i64 inh
, TCGv_i64 inl
)
112 TCGv_i64 mask
= tcg_const_i64(0x00FF00FF00FF00FF);
113 TCGv_i64 t0
= tcg_temp_new_i64();
114 TCGv_i64 t1
= tcg_temp_new_i64();
116 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
117 tcg_gen_and_i64(t0
, inh
, mask
);
118 tcg_gen_shli_i64(t0
, t0
, 8);
119 tcg_gen_shri_i64(t1
, inh
, 8);
120 tcg_gen_and_i64(t1
, t1
, mask
);
121 tcg_gen_or_i64(outh
, t0
, t1
);
123 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
124 tcg_gen_and_i64(t0
, inl
, mask
);
125 tcg_gen_shli_i64(t0
, t0
, 8);
126 tcg_gen_shri_i64(t1
, inl
, 8);
127 tcg_gen_and_i64(t1
, t1
, mask
);
128 tcg_gen_or_i64(outl
, t0
, t1
);
130 tcg_temp_free_i64(t0
);
131 tcg_temp_free_i64(t1
);
132 tcg_temp_free_i64(mask
);
135 static void gen_bswap32x4(TCGv_i64 outh
, TCGv_i64 outl
,
136 TCGv_i64 inh
, TCGv_i64 inl
)
138 TCGv_i64 hi
= tcg_temp_new_i64();
139 TCGv_i64 lo
= tcg_temp_new_i64();
141 tcg_gen_bswap64_i64(hi
, inh
);
142 tcg_gen_bswap64_i64(lo
, inl
);
143 tcg_gen_shri_i64(outh
, hi
, 32);
144 tcg_gen_deposit_i64(outh
, outh
, hi
, 32, 32);
145 tcg_gen_shri_i64(outl
, lo
, 32);
146 tcg_gen_deposit_i64(outl
, outl
, lo
, 32, 32);
148 tcg_temp_free_i64(hi
);
149 tcg_temp_free_i64(lo
);
151 static void gen_lxvh8x(DisasContext
*ctx
)
154 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
155 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
157 if (unlikely(!ctx
->vsx_enabled
)) {
158 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
161 gen_set_access_type(ctx
, ACCESS_INT
);
164 gen_addr_reg_index(ctx
, EA
);
165 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
166 tcg_gen_addi_tl(EA
, EA
, 8);
167 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
169 gen_bswap16x8(xth
, xtl
, xth
, xtl
);
174 static void gen_lxvb16x(DisasContext
*ctx
)
177 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
178 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
180 if (unlikely(!ctx
->vsx_enabled
)) {
181 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
184 gen_set_access_type(ctx
, ACCESS_INT
);
186 gen_addr_reg_index(ctx
, EA
);
187 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
188 tcg_gen_addi_tl(EA
, EA
, 8);
189 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
193 #define VSX_VECTOR_LOAD_STORE(name, op, indexed) \
194 static void gen_##name(DisasContext *ctx) \
201 xt = xT(ctx->opcode); \
203 xt = DQxT(ctx->opcode); \
205 xth = cpu_vsrh(xt); \
206 xtl = cpu_vsrl(xt); \
209 if (unlikely(!ctx->vsx_enabled)) { \
210 gen_exception(ctx, POWERPC_EXCP_VSXU); \
214 if (unlikely(!ctx->altivec_enabled)) { \
215 gen_exception(ctx, POWERPC_EXCP_VPU); \
219 gen_set_access_type(ctx, ACCESS_INT); \
220 EA = tcg_temp_new(); \
222 gen_addr_reg_index(ctx, EA); \
224 gen_addr_imm_index(ctx, EA, 0x0F); \
226 if (ctx->le_mode) { \
227 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
228 tcg_gen_addi_tl(EA, EA, 8); \
229 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
231 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
232 tcg_gen_addi_tl(EA, EA, 8); \
233 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
238 VSX_VECTOR_LOAD_STORE(lxv
, ld_i64
, 0)
239 VSX_VECTOR_LOAD_STORE(stxv
, st_i64
, 0)
240 VSX_VECTOR_LOAD_STORE(lxvx
, ld_i64
, 1)
241 VSX_VECTOR_LOAD_STORE(stxvx
, st_i64
, 1)
244 #define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
245 static void gen_##name(DisasContext *ctx) \
249 if (xT(ctx->opcode) < 32) { \
250 if (unlikely(!ctx->vsx_enabled)) { \
251 gen_exception(ctx, POWERPC_EXCP_VSXU); \
255 if (unlikely(!ctx->altivec_enabled)) { \
256 gen_exception(ctx, POWERPC_EXCP_VPU); \
260 EA = tcg_temp_new(); \
261 xt = tcg_const_tl(xT(ctx->opcode)); \
262 gen_set_access_type(ctx, ACCESS_INT); \
263 gen_addr_register(ctx, EA); \
264 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
269 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl
)
270 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll
)
271 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl
)
272 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll
)
275 #define VSX_LOAD_SCALAR_DS(name, operation) \
276 static void gen_##name(DisasContext *ctx) \
279 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32); \
281 if (unlikely(!ctx->altivec_enabled)) { \
282 gen_exception(ctx, POWERPC_EXCP_VPU); \
285 gen_set_access_type(ctx, ACCESS_INT); \
286 EA = tcg_temp_new(); \
287 gen_addr_imm_index(ctx, EA, 0x03); \
288 gen_qemu_##operation(ctx, xth, EA); \
289 /* NOTE: cpu_vsrl is undefined */ \
293 VSX_LOAD_SCALAR_DS(lxsd
, ld64_i64
)
294 VSX_LOAD_SCALAR_DS(lxssp
, ld32fs
)
296 #define VSX_STORE_SCALAR(name, operation) \
297 static void gen_##name(DisasContext *ctx) \
300 if (unlikely(!ctx->vsx_enabled)) { \
301 gen_exception(ctx, POWERPC_EXCP_VSXU); \
304 gen_set_access_type(ctx, ACCESS_INT); \
305 EA = tcg_temp_new(); \
306 gen_addr_reg_index(ctx, EA); \
307 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
311 VSX_STORE_SCALAR(stxsdx
, st64_i64
)
313 VSX_STORE_SCALAR(stxsibx
, st8_i64
)
314 VSX_STORE_SCALAR(stxsihx
, st16_i64
)
315 VSX_STORE_SCALAR(stxsiwx
, st32_i64
)
316 VSX_STORE_SCALAR(stxsspx
, st32fs
)
318 static void gen_stxvd2x(DisasContext
*ctx
)
321 if (unlikely(!ctx
->vsx_enabled
)) {
322 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
325 gen_set_access_type(ctx
, ACCESS_INT
);
327 gen_addr_reg_index(ctx
, EA
);
328 gen_qemu_st64_i64(ctx
, cpu_vsrh(xS(ctx
->opcode
)), EA
);
329 tcg_gen_addi_tl(EA
, EA
, 8);
330 gen_qemu_st64_i64(ctx
, cpu_vsrl(xS(ctx
->opcode
)), EA
);
334 static void gen_stxvw4x(DisasContext
*ctx
)
336 TCGv_i64 xsh
= cpu_vsrh(xS(ctx
->opcode
));
337 TCGv_i64 xsl
= cpu_vsrl(xS(ctx
->opcode
));
339 if (unlikely(!ctx
->vsx_enabled
)) {
340 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
343 gen_set_access_type(ctx
, ACCESS_INT
);
345 gen_addr_reg_index(ctx
, EA
);
347 TCGv_i64 t0
= tcg_temp_new_i64();
348 TCGv_i64 t1
= tcg_temp_new_i64();
350 tcg_gen_shri_i64(t0
, xsh
, 32);
351 tcg_gen_deposit_i64(t1
, t0
, xsh
, 32, 32);
352 tcg_gen_qemu_st_i64(t1
, EA
, ctx
->mem_idx
, MO_LEQ
);
353 tcg_gen_addi_tl(EA
, EA
, 8);
354 tcg_gen_shri_i64(t0
, xsl
, 32);
355 tcg_gen_deposit_i64(t1
, t0
, xsl
, 32, 32);
356 tcg_gen_qemu_st_i64(t1
, EA
, ctx
->mem_idx
, MO_LEQ
);
357 tcg_temp_free_i64(t0
);
358 tcg_temp_free_i64(t1
);
360 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
361 tcg_gen_addi_tl(EA
, EA
, 8);
362 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
367 static void gen_stxvh8x(DisasContext
*ctx
)
369 TCGv_i64 xsh
= cpu_vsrh(xS(ctx
->opcode
));
370 TCGv_i64 xsl
= cpu_vsrl(xS(ctx
->opcode
));
373 if (unlikely(!ctx
->vsx_enabled
)) {
374 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
377 gen_set_access_type(ctx
, ACCESS_INT
);
379 gen_addr_reg_index(ctx
, EA
);
381 TCGv_i64 outh
= tcg_temp_new_i64();
382 TCGv_i64 outl
= tcg_temp_new_i64();
384 gen_bswap16x8(outh
, outl
, xsh
, xsl
);
385 tcg_gen_qemu_st_i64(outh
, EA
, ctx
->mem_idx
, MO_BEQ
);
386 tcg_gen_addi_tl(EA
, EA
, 8);
387 tcg_gen_qemu_st_i64(outl
, EA
, ctx
->mem_idx
, MO_BEQ
);
388 tcg_temp_free_i64(outh
);
389 tcg_temp_free_i64(outl
);
391 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
392 tcg_gen_addi_tl(EA
, EA
, 8);
393 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
398 static void gen_stxvb16x(DisasContext
*ctx
)
400 TCGv_i64 xsh
= cpu_vsrh(xS(ctx
->opcode
));
401 TCGv_i64 xsl
= cpu_vsrl(xS(ctx
->opcode
));
404 if (unlikely(!ctx
->vsx_enabled
)) {
405 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
408 gen_set_access_type(ctx
, ACCESS_INT
);
410 gen_addr_reg_index(ctx
, EA
);
411 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
412 tcg_gen_addi_tl(EA
, EA
, 8);
413 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
417 #define VSX_STORE_SCALAR_DS(name, operation) \
418 static void gen_##name(DisasContext *ctx) \
421 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32); \
423 if (unlikely(!ctx->altivec_enabled)) { \
424 gen_exception(ctx, POWERPC_EXCP_VPU); \
427 gen_set_access_type(ctx, ACCESS_INT); \
428 EA = tcg_temp_new(); \
429 gen_addr_imm_index(ctx, EA, 0x03); \
430 gen_qemu_##operation(ctx, xth, EA); \
431 /* NOTE: cpu_vsrl is undefined */ \
435 VSX_LOAD_SCALAR_DS(stxsd
, st64_i64
)
436 VSX_LOAD_SCALAR_DS(stxssp
, st32fs
)
438 #define MV_VSRW(name, tcgop1, tcgop2, target, source) \
439 static void gen_##name(DisasContext *ctx) \
441 if (xS(ctx->opcode) < 32) { \
442 if (unlikely(!ctx->fpu_enabled)) { \
443 gen_exception(ctx, POWERPC_EXCP_FPU); \
447 if (unlikely(!ctx->altivec_enabled)) { \
448 gen_exception(ctx, POWERPC_EXCP_VPU); \
452 TCGv_i64 tmp = tcg_temp_new_i64(); \
453 tcg_gen_##tcgop1(tmp, source); \
454 tcg_gen_##tcgop2(target, tmp); \
455 tcg_temp_free_i64(tmp); \
459 MV_VSRW(mfvsrwz
, ext32u_i64
, trunc_i64_tl
, cpu_gpr
[rA(ctx
->opcode
)], \
460 cpu_vsrh(xS(ctx
->opcode
)))
461 MV_VSRW(mtvsrwa
, extu_tl_i64
, ext32s_i64
, cpu_vsrh(xT(ctx
->opcode
)), \
462 cpu_gpr
[rA(ctx
->opcode
)])
463 MV_VSRW(mtvsrwz
, extu_tl_i64
, ext32u_i64
, cpu_vsrh(xT(ctx
->opcode
)), \
464 cpu_gpr
[rA(ctx
->opcode
)])
466 #if defined(TARGET_PPC64)
467 #define MV_VSRD(name, target, source) \
468 static void gen_##name(DisasContext *ctx) \
470 if (xS(ctx->opcode) < 32) { \
471 if (unlikely(!ctx->fpu_enabled)) { \
472 gen_exception(ctx, POWERPC_EXCP_FPU); \
476 if (unlikely(!ctx->altivec_enabled)) { \
477 gen_exception(ctx, POWERPC_EXCP_VPU); \
481 tcg_gen_mov_i64(target, source); \
484 MV_VSRD(mfvsrd
, cpu_gpr
[rA(ctx
->opcode
)], cpu_vsrh(xS(ctx
->opcode
)))
485 MV_VSRD(mtvsrd
, cpu_vsrh(xT(ctx
->opcode
)), cpu_gpr
[rA(ctx
->opcode
)])
487 static void gen_mfvsrld(DisasContext
*ctx
)
489 if (xS(ctx
->opcode
) < 32) {
490 if (unlikely(!ctx
->vsx_enabled
)) {
491 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
495 if (unlikely(!ctx
->altivec_enabled
)) {
496 gen_exception(ctx
, POWERPC_EXCP_VPU
);
501 tcg_gen_mov_i64(cpu_gpr
[rA(ctx
->opcode
)], cpu_vsrl(xS(ctx
->opcode
)));
504 static void gen_mtvsrdd(DisasContext
*ctx
)
506 if (xT(ctx
->opcode
) < 32) {
507 if (unlikely(!ctx
->vsx_enabled
)) {
508 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
512 if (unlikely(!ctx
->altivec_enabled
)) {
513 gen_exception(ctx
, POWERPC_EXCP_VPU
);
518 if (!rA(ctx
->opcode
)) {
519 tcg_gen_movi_i64(cpu_vsrh(xT(ctx
->opcode
)), 0);
521 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_gpr
[rA(ctx
->opcode
)]);
524 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_gpr
[rB(ctx
->opcode
)]);
527 static void gen_mtvsrws(DisasContext
*ctx
)
529 if (xT(ctx
->opcode
) < 32) {
530 if (unlikely(!ctx
->vsx_enabled
)) {
531 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
535 if (unlikely(!ctx
->altivec_enabled
)) {
536 gen_exception(ctx
, POWERPC_EXCP_VPU
);
541 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_gpr
[rA(ctx
->opcode
)],
542 cpu_gpr
[rA(ctx
->opcode
)], 32, 32);
543 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrl(xT(ctx
->opcode
)));
548 static void gen_xxpermdi(DisasContext
*ctx
)
550 if (unlikely(!ctx
->vsx_enabled
)) {
551 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
555 if (unlikely((xT(ctx
->opcode
) == xA(ctx
->opcode
)) ||
556 (xT(ctx
->opcode
) == xB(ctx
->opcode
)))) {
559 xh
= tcg_temp_new_i64();
560 xl
= tcg_temp_new_i64();
562 if ((DM(ctx
->opcode
) & 2) == 0) {
563 tcg_gen_mov_i64(xh
, cpu_vsrh(xA(ctx
->opcode
)));
565 tcg_gen_mov_i64(xh
, cpu_vsrl(xA(ctx
->opcode
)));
567 if ((DM(ctx
->opcode
) & 1) == 0) {
568 tcg_gen_mov_i64(xl
, cpu_vsrh(xB(ctx
->opcode
)));
570 tcg_gen_mov_i64(xl
, cpu_vsrl(xB(ctx
->opcode
)));
573 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), xh
);
574 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), xl
);
576 tcg_temp_free_i64(xh
);
577 tcg_temp_free_i64(xl
);
579 if ((DM(ctx
->opcode
) & 2) == 0) {
580 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrh(xA(ctx
->opcode
)));
582 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrl(xA(ctx
->opcode
)));
584 if ((DM(ctx
->opcode
) & 1) == 0) {
585 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xB(ctx
->opcode
)));
587 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrl(xB(ctx
->opcode
)));
596 #define SGN_MASK_DP 0x8000000000000000ull
597 #define SGN_MASK_SP 0x8000000080000000ull
599 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
600 static void glue(gen_, name)(DisasContext * ctx) \
603 if (unlikely(!ctx->vsx_enabled)) { \
604 gen_exception(ctx, POWERPC_EXCP_VSXU); \
607 xb = tcg_temp_new_i64(); \
608 sgm = tcg_temp_new_i64(); \
609 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
610 tcg_gen_movi_i64(sgm, sgn_mask); \
613 tcg_gen_andc_i64(xb, xb, sgm); \
617 tcg_gen_or_i64(xb, xb, sgm); \
621 tcg_gen_xor_i64(xb, xb, sgm); \
625 TCGv_i64 xa = tcg_temp_new_i64(); \
626 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
627 tcg_gen_and_i64(xa, xa, sgm); \
628 tcg_gen_andc_i64(xb, xb, sgm); \
629 tcg_gen_or_i64(xb, xb, xa); \
630 tcg_temp_free_i64(xa); \
634 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
635 tcg_temp_free_i64(xb); \
636 tcg_temp_free_i64(sgm); \
639 VSX_SCALAR_MOVE(xsabsdp
, OP_ABS
, SGN_MASK_DP
)
640 VSX_SCALAR_MOVE(xsnabsdp
, OP_NABS
, SGN_MASK_DP
)
641 VSX_SCALAR_MOVE(xsnegdp
, OP_NEG
, SGN_MASK_DP
)
642 VSX_SCALAR_MOVE(xscpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
644 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
645 static void glue(gen_, name)(DisasContext *ctx) \
648 int xt = rD(ctx->opcode) + 32; \
649 int xb = rB(ctx->opcode) + 32; \
650 TCGv_i64 xah, xbh, xbl, sgm; \
652 if (unlikely(!ctx->vsx_enabled)) { \
653 gen_exception(ctx, POWERPC_EXCP_VSXU); \
656 xbh = tcg_temp_new_i64(); \
657 xbl = tcg_temp_new_i64(); \
658 sgm = tcg_temp_new_i64(); \
659 tcg_gen_mov_i64(xbh, cpu_vsrh(xb)); \
660 tcg_gen_mov_i64(xbl, cpu_vsrl(xb)); \
661 tcg_gen_movi_i64(sgm, sgn_mask); \
664 tcg_gen_andc_i64(xbh, xbh, sgm); \
667 tcg_gen_or_i64(xbh, xbh, sgm); \
670 tcg_gen_xor_i64(xbh, xbh, sgm); \
673 xah = tcg_temp_new_i64(); \
674 xa = rA(ctx->opcode) + 32; \
675 tcg_gen_and_i64(xah, cpu_vsrh(xa), sgm); \
676 tcg_gen_andc_i64(xbh, xbh, sgm); \
677 tcg_gen_or_i64(xbh, xbh, xah); \
678 tcg_temp_free_i64(xah); \
681 tcg_gen_mov_i64(cpu_vsrh(xt), xbh); \
682 tcg_gen_mov_i64(cpu_vsrl(xt), xbl); \
683 tcg_temp_free_i64(xbl); \
684 tcg_temp_free_i64(xbh); \
685 tcg_temp_free_i64(sgm); \
688 VSX_SCALAR_MOVE_QP(xsabsqp
, OP_ABS
, SGN_MASK_DP
)
689 VSX_SCALAR_MOVE_QP(xsnabsqp
, OP_NABS
, SGN_MASK_DP
)
690 VSX_SCALAR_MOVE_QP(xsnegqp
, OP_NEG
, SGN_MASK_DP
)
691 VSX_SCALAR_MOVE_QP(xscpsgnqp
, OP_CPSGN
, SGN_MASK_DP
)
693 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
694 static void glue(gen_, name)(DisasContext * ctx) \
696 TCGv_i64 xbh, xbl, sgm; \
697 if (unlikely(!ctx->vsx_enabled)) { \
698 gen_exception(ctx, POWERPC_EXCP_VSXU); \
701 xbh = tcg_temp_new_i64(); \
702 xbl = tcg_temp_new_i64(); \
703 sgm = tcg_temp_new_i64(); \
704 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
705 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
706 tcg_gen_movi_i64(sgm, sgn_mask); \
709 tcg_gen_andc_i64(xbh, xbh, sgm); \
710 tcg_gen_andc_i64(xbl, xbl, sgm); \
714 tcg_gen_or_i64(xbh, xbh, sgm); \
715 tcg_gen_or_i64(xbl, xbl, sgm); \
719 tcg_gen_xor_i64(xbh, xbh, sgm); \
720 tcg_gen_xor_i64(xbl, xbl, sgm); \
724 TCGv_i64 xah = tcg_temp_new_i64(); \
725 TCGv_i64 xal = tcg_temp_new_i64(); \
726 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
727 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
728 tcg_gen_and_i64(xah, xah, sgm); \
729 tcg_gen_and_i64(xal, xal, sgm); \
730 tcg_gen_andc_i64(xbh, xbh, sgm); \
731 tcg_gen_andc_i64(xbl, xbl, sgm); \
732 tcg_gen_or_i64(xbh, xbh, xah); \
733 tcg_gen_or_i64(xbl, xbl, xal); \
734 tcg_temp_free_i64(xah); \
735 tcg_temp_free_i64(xal); \
739 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
740 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
741 tcg_temp_free_i64(xbh); \
742 tcg_temp_free_i64(xbl); \
743 tcg_temp_free_i64(sgm); \
746 VSX_VECTOR_MOVE(xvabsdp
, OP_ABS
, SGN_MASK_DP
)
747 VSX_VECTOR_MOVE(xvnabsdp
, OP_NABS
, SGN_MASK_DP
)
748 VSX_VECTOR_MOVE(xvnegdp
, OP_NEG
, SGN_MASK_DP
)
749 VSX_VECTOR_MOVE(xvcpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
750 VSX_VECTOR_MOVE(xvabssp
, OP_ABS
, SGN_MASK_SP
)
751 VSX_VECTOR_MOVE(xvnabssp
, OP_NABS
, SGN_MASK_SP
)
752 VSX_VECTOR_MOVE(xvnegsp
, OP_NEG
, SGN_MASK_SP
)
753 VSX_VECTOR_MOVE(xvcpsgnsp
, OP_CPSGN
, SGN_MASK_SP
)
755 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
756 static void gen_##name(DisasContext * ctx) \
759 if (unlikely(!ctx->vsx_enabled)) { \
760 gen_exception(ctx, POWERPC_EXCP_VSXU); \
763 opc = tcg_const_i32(ctx->opcode); \
764 gen_helper_##name(cpu_env, opc); \
765 tcg_temp_free_i32(opc); \
768 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
769 static void gen_##name(DisasContext * ctx) \
771 if (unlikely(!ctx->vsx_enabled)) { \
772 gen_exception(ctx, POWERPC_EXCP_VSXU); \
775 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
776 cpu_vsrh(xB(ctx->opcode))); \
779 GEN_VSX_HELPER_2(xsadddp
, 0x00, 0x04, 0, PPC2_VSX
)
780 GEN_VSX_HELPER_2(xsaddqp
, 0x04, 0x00, 0, PPC2_ISA300
)
781 GEN_VSX_HELPER_2(xssubdp
, 0x00, 0x05, 0, PPC2_VSX
)
782 GEN_VSX_HELPER_2(xsmuldp
, 0x00, 0x06, 0, PPC2_VSX
)
783 GEN_VSX_HELPER_2(xsmulqp
, 0x04, 0x01, 0, PPC2_ISA300
)
784 GEN_VSX_HELPER_2(xsdivdp
, 0x00, 0x07, 0, PPC2_VSX
)
785 GEN_VSX_HELPER_2(xsdivqp
, 0x04, 0x11, 0, PPC2_ISA300
)
786 GEN_VSX_HELPER_2(xsredp
, 0x14, 0x05, 0, PPC2_VSX
)
787 GEN_VSX_HELPER_2(xssqrtdp
, 0x16, 0x04, 0, PPC2_VSX
)
788 GEN_VSX_HELPER_2(xsrsqrtedp
, 0x14, 0x04, 0, PPC2_VSX
)
789 GEN_VSX_HELPER_2(xstdivdp
, 0x14, 0x07, 0, PPC2_VSX
)
790 GEN_VSX_HELPER_2(xstsqrtdp
, 0x14, 0x06, 0, PPC2_VSX
)
791 GEN_VSX_HELPER_2(xsmaddadp
, 0x04, 0x04, 0, PPC2_VSX
)
792 GEN_VSX_HELPER_2(xsmaddmdp
, 0x04, 0x05, 0, PPC2_VSX
)
793 GEN_VSX_HELPER_2(xsmsubadp
, 0x04, 0x06, 0, PPC2_VSX
)
794 GEN_VSX_HELPER_2(xsmsubmdp
, 0x04, 0x07, 0, PPC2_VSX
)
795 GEN_VSX_HELPER_2(xsnmaddadp
, 0x04, 0x14, 0, PPC2_VSX
)
796 GEN_VSX_HELPER_2(xsnmaddmdp
, 0x04, 0x15, 0, PPC2_VSX
)
797 GEN_VSX_HELPER_2(xsnmsubadp
, 0x04, 0x16, 0, PPC2_VSX
)
798 GEN_VSX_HELPER_2(xsnmsubmdp
, 0x04, 0x17, 0, PPC2_VSX
)
799 GEN_VSX_HELPER_2(xscmpeqdp
, 0x0C, 0x00, 0, PPC2_ISA300
)
800 GEN_VSX_HELPER_2(xscmpgtdp
, 0x0C, 0x01, 0, PPC2_ISA300
)
801 GEN_VSX_HELPER_2(xscmpgedp
, 0x0C, 0x02, 0, PPC2_ISA300
)
802 GEN_VSX_HELPER_2(xscmpnedp
, 0x0C, 0x03, 0, PPC2_ISA300
)
803 GEN_VSX_HELPER_2(xscmpexpdp
, 0x0C, 0x07, 0, PPC2_ISA300
)
804 GEN_VSX_HELPER_2(xscmpexpqp
, 0x04, 0x05, 0, PPC2_ISA300
)
805 GEN_VSX_HELPER_2(xscmpodp
, 0x0C, 0x05, 0, PPC2_VSX
)
806 GEN_VSX_HELPER_2(xscmpudp
, 0x0C, 0x04, 0, PPC2_VSX
)
807 GEN_VSX_HELPER_2(xscmpoqp
, 0x04, 0x04, 0, PPC2_VSX
)
808 GEN_VSX_HELPER_2(xscmpuqp
, 0x04, 0x14, 0, PPC2_VSX
)
809 GEN_VSX_HELPER_2(xsmaxdp
, 0x00, 0x14, 0, PPC2_VSX
)
810 GEN_VSX_HELPER_2(xsmindp
, 0x00, 0x15, 0, PPC2_VSX
)
811 GEN_VSX_HELPER_2(xscvdphp
, 0x16, 0x15, 0x11, PPC2_ISA300
)
812 GEN_VSX_HELPER_2(xscvdpsp
, 0x12, 0x10, 0, PPC2_VSX
)
813 GEN_VSX_HELPER_2(xscvdpqp
, 0x04, 0x1A, 0x16, PPC2_ISA300
)
814 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn
, 0x16, 0x10, 0, PPC2_VSX207
)
815 GEN_VSX_HELPER_2(xscvqpdp
, 0x04, 0x1A, 0x14, PPC2_ISA300
)
816 GEN_VSX_HELPER_2(xscvqpsdz
, 0x04, 0x1A, 0x19, PPC2_ISA300
)
817 GEN_VSX_HELPER_2(xscvqpswz
, 0x04, 0x1A, 0x09, PPC2_ISA300
)
818 GEN_VSX_HELPER_2(xscvhpdp
, 0x16, 0x15, 0x10, PPC2_ISA300
)
819 GEN_VSX_HELPER_2(xscvsdqp
, 0x04, 0x1A, 0x0A, PPC2_ISA300
)
820 GEN_VSX_HELPER_2(xscvspdp
, 0x12, 0x14, 0, PPC2_VSX
)
821 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn
, 0x16, 0x14, 0, PPC2_VSX207
)
822 GEN_VSX_HELPER_2(xscvdpsxds
, 0x10, 0x15, 0, PPC2_VSX
)
823 GEN_VSX_HELPER_2(xscvdpsxws
, 0x10, 0x05, 0, PPC2_VSX
)
824 GEN_VSX_HELPER_2(xscvdpuxds
, 0x10, 0x14, 0, PPC2_VSX
)
825 GEN_VSX_HELPER_2(xscvdpuxws
, 0x10, 0x04, 0, PPC2_VSX
)
826 GEN_VSX_HELPER_2(xscvsxddp
, 0x10, 0x17, 0, PPC2_VSX
)
827 GEN_VSX_HELPER_2(xscvudqp
, 0x04, 0x1A, 0x02, PPC2_ISA300
)
828 GEN_VSX_HELPER_2(xscvuxddp
, 0x10, 0x16, 0, PPC2_VSX
)
829 GEN_VSX_HELPER_2(xsrdpi
, 0x12, 0x04, 0, PPC2_VSX
)
830 GEN_VSX_HELPER_2(xsrdpic
, 0x16, 0x06, 0, PPC2_VSX
)
831 GEN_VSX_HELPER_2(xsrdpim
, 0x12, 0x07, 0, PPC2_VSX
)
832 GEN_VSX_HELPER_2(xsrdpip
, 0x12, 0x06, 0, PPC2_VSX
)
833 GEN_VSX_HELPER_2(xsrdpiz
, 0x12, 0x05, 0, PPC2_VSX
)
834 GEN_VSX_HELPER_XT_XB_ENV(xsrsp
, 0x12, 0x11, 0, PPC2_VSX207
)
836 GEN_VSX_HELPER_2(xsaddsp
, 0x00, 0x00, 0, PPC2_VSX207
)
837 GEN_VSX_HELPER_2(xssubsp
, 0x00, 0x01, 0, PPC2_VSX207
)
838 GEN_VSX_HELPER_2(xsmulsp
, 0x00, 0x02, 0, PPC2_VSX207
)
839 GEN_VSX_HELPER_2(xsdivsp
, 0x00, 0x03, 0, PPC2_VSX207
)
840 GEN_VSX_HELPER_2(xsresp
, 0x14, 0x01, 0, PPC2_VSX207
)
841 GEN_VSX_HELPER_2(xssqrtsp
, 0x16, 0x00, 0, PPC2_VSX207
)
842 GEN_VSX_HELPER_2(xsrsqrtesp
, 0x14, 0x00, 0, PPC2_VSX207
)
843 GEN_VSX_HELPER_2(xsmaddasp
, 0x04, 0x00, 0, PPC2_VSX207
)
844 GEN_VSX_HELPER_2(xsmaddmsp
, 0x04, 0x01, 0, PPC2_VSX207
)
845 GEN_VSX_HELPER_2(xsmsubasp
, 0x04, 0x02, 0, PPC2_VSX207
)
846 GEN_VSX_HELPER_2(xsmsubmsp
, 0x04, 0x03, 0, PPC2_VSX207
)
847 GEN_VSX_HELPER_2(xsnmaddasp
, 0x04, 0x10, 0, PPC2_VSX207
)
848 GEN_VSX_HELPER_2(xsnmaddmsp
, 0x04, 0x11, 0, PPC2_VSX207
)
849 GEN_VSX_HELPER_2(xsnmsubasp
, 0x04, 0x12, 0, PPC2_VSX207
)
850 GEN_VSX_HELPER_2(xsnmsubmsp
, 0x04, 0x13, 0, PPC2_VSX207
)
851 GEN_VSX_HELPER_2(xscvsxdsp
, 0x10, 0x13, 0, PPC2_VSX207
)
852 GEN_VSX_HELPER_2(xscvuxdsp
, 0x10, 0x12, 0, PPC2_VSX207
)
853 GEN_VSX_HELPER_2(xststdcsp
, 0x14, 0x12, 0, PPC2_ISA300
)
854 GEN_VSX_HELPER_2(xststdcdp
, 0x14, 0x16, 0, PPC2_ISA300
)
855 GEN_VSX_HELPER_2(xststdcqp
, 0x04, 0x16, 0, PPC2_ISA300
)
857 GEN_VSX_HELPER_2(xvadddp
, 0x00, 0x0C, 0, PPC2_VSX
)
858 GEN_VSX_HELPER_2(xvsubdp
, 0x00, 0x0D, 0, PPC2_VSX
)
859 GEN_VSX_HELPER_2(xvmuldp
, 0x00, 0x0E, 0, PPC2_VSX
)
860 GEN_VSX_HELPER_2(xvdivdp
, 0x00, 0x0F, 0, PPC2_VSX
)
861 GEN_VSX_HELPER_2(xvredp
, 0x14, 0x0D, 0, PPC2_VSX
)
862 GEN_VSX_HELPER_2(xvsqrtdp
, 0x16, 0x0C, 0, PPC2_VSX
)
863 GEN_VSX_HELPER_2(xvrsqrtedp
, 0x14, 0x0C, 0, PPC2_VSX
)
864 GEN_VSX_HELPER_2(xvtdivdp
, 0x14, 0x0F, 0, PPC2_VSX
)
865 GEN_VSX_HELPER_2(xvtsqrtdp
, 0x14, 0x0E, 0, PPC2_VSX
)
866 GEN_VSX_HELPER_2(xvmaddadp
, 0x04, 0x0C, 0, PPC2_VSX
)
867 GEN_VSX_HELPER_2(xvmaddmdp
, 0x04, 0x0D, 0, PPC2_VSX
)
868 GEN_VSX_HELPER_2(xvmsubadp
, 0x04, 0x0E, 0, PPC2_VSX
)
869 GEN_VSX_HELPER_2(xvmsubmdp
, 0x04, 0x0F, 0, PPC2_VSX
)
870 GEN_VSX_HELPER_2(xvnmaddadp
, 0x04, 0x1C, 0, PPC2_VSX
)
871 GEN_VSX_HELPER_2(xvnmaddmdp
, 0x04, 0x1D, 0, PPC2_VSX
)
872 GEN_VSX_HELPER_2(xvnmsubadp
, 0x04, 0x1E, 0, PPC2_VSX
)
873 GEN_VSX_HELPER_2(xvnmsubmdp
, 0x04, 0x1F, 0, PPC2_VSX
)
874 GEN_VSX_HELPER_2(xvmaxdp
, 0x00, 0x1C, 0, PPC2_VSX
)
875 GEN_VSX_HELPER_2(xvmindp
, 0x00, 0x1D, 0, PPC2_VSX
)
876 GEN_VSX_HELPER_2(xvcmpeqdp
, 0x0C, 0x0C, 0, PPC2_VSX
)
877 GEN_VSX_HELPER_2(xvcmpgtdp
, 0x0C, 0x0D, 0, PPC2_VSX
)
878 GEN_VSX_HELPER_2(xvcmpgedp
, 0x0C, 0x0E, 0, PPC2_VSX
)
879 GEN_VSX_HELPER_2(xvcmpnedp
, 0x0C, 0x0F, 0, PPC2_ISA300
)
880 GEN_VSX_HELPER_2(xvcvdpsp
, 0x12, 0x18, 0, PPC2_VSX
)
881 GEN_VSX_HELPER_2(xvcvdpsxds
, 0x10, 0x1D, 0, PPC2_VSX
)
882 GEN_VSX_HELPER_2(xvcvdpsxws
, 0x10, 0x0D, 0, PPC2_VSX
)
883 GEN_VSX_HELPER_2(xvcvdpuxds
, 0x10, 0x1C, 0, PPC2_VSX
)
884 GEN_VSX_HELPER_2(xvcvdpuxws
, 0x10, 0x0C, 0, PPC2_VSX
)
885 GEN_VSX_HELPER_2(xvcvsxddp
, 0x10, 0x1F, 0, PPC2_VSX
)
886 GEN_VSX_HELPER_2(xvcvuxddp
, 0x10, 0x1E, 0, PPC2_VSX
)
887 GEN_VSX_HELPER_2(xvcvsxwdp
, 0x10, 0x0F, 0, PPC2_VSX
)
888 GEN_VSX_HELPER_2(xvcvuxwdp
, 0x10, 0x0E, 0, PPC2_VSX
)
889 GEN_VSX_HELPER_2(xvrdpi
, 0x12, 0x0C, 0, PPC2_VSX
)
890 GEN_VSX_HELPER_2(xvrdpic
, 0x16, 0x0E, 0, PPC2_VSX
)
891 GEN_VSX_HELPER_2(xvrdpim
, 0x12, 0x0F, 0, PPC2_VSX
)
892 GEN_VSX_HELPER_2(xvrdpip
, 0x12, 0x0E, 0, PPC2_VSX
)
893 GEN_VSX_HELPER_2(xvrdpiz
, 0x12, 0x0D, 0, PPC2_VSX
)
895 GEN_VSX_HELPER_2(xvaddsp
, 0x00, 0x08, 0, PPC2_VSX
)
896 GEN_VSX_HELPER_2(xvsubsp
, 0x00, 0x09, 0, PPC2_VSX
)
897 GEN_VSX_HELPER_2(xvmulsp
, 0x00, 0x0A, 0, PPC2_VSX
)
898 GEN_VSX_HELPER_2(xvdivsp
, 0x00, 0x0B, 0, PPC2_VSX
)
899 GEN_VSX_HELPER_2(xvresp
, 0x14, 0x09, 0, PPC2_VSX
)
900 GEN_VSX_HELPER_2(xvsqrtsp
, 0x16, 0x08, 0, PPC2_VSX
)
901 GEN_VSX_HELPER_2(xvrsqrtesp
, 0x14, 0x08, 0, PPC2_VSX
)
902 GEN_VSX_HELPER_2(xvtdivsp
, 0x14, 0x0B, 0, PPC2_VSX
)
903 GEN_VSX_HELPER_2(xvtsqrtsp
, 0x14, 0x0A, 0, PPC2_VSX
)
904 GEN_VSX_HELPER_2(xvmaddasp
, 0x04, 0x08, 0, PPC2_VSX
)
905 GEN_VSX_HELPER_2(xvmaddmsp
, 0x04, 0x09, 0, PPC2_VSX
)
906 GEN_VSX_HELPER_2(xvmsubasp
, 0x04, 0x0A, 0, PPC2_VSX
)
907 GEN_VSX_HELPER_2(xvmsubmsp
, 0x04, 0x0B, 0, PPC2_VSX
)
908 GEN_VSX_HELPER_2(xvnmaddasp
, 0x04, 0x18, 0, PPC2_VSX
)
909 GEN_VSX_HELPER_2(xvnmaddmsp
, 0x04, 0x19, 0, PPC2_VSX
)
910 GEN_VSX_HELPER_2(xvnmsubasp
, 0x04, 0x1A, 0, PPC2_VSX
)
911 GEN_VSX_HELPER_2(xvnmsubmsp
, 0x04, 0x1B, 0, PPC2_VSX
)
912 GEN_VSX_HELPER_2(xvmaxsp
, 0x00, 0x18, 0, PPC2_VSX
)
913 GEN_VSX_HELPER_2(xvminsp
, 0x00, 0x19, 0, PPC2_VSX
)
914 GEN_VSX_HELPER_2(xvcmpeqsp
, 0x0C, 0x08, 0, PPC2_VSX
)
915 GEN_VSX_HELPER_2(xvcmpgtsp
, 0x0C, 0x09, 0, PPC2_VSX
)
916 GEN_VSX_HELPER_2(xvcmpgesp
, 0x0C, 0x0A, 0, PPC2_VSX
)
917 GEN_VSX_HELPER_2(xvcmpnesp
, 0x0C, 0x0B, 0, PPC2_VSX
)
918 GEN_VSX_HELPER_2(xvcvspdp
, 0x12, 0x1C, 0, PPC2_VSX
)
919 GEN_VSX_HELPER_2(xvcvhpsp
, 0x16, 0x1D, 0x18, PPC2_ISA300
)
920 GEN_VSX_HELPER_2(xvcvsphp
, 0x16, 0x1D, 0x19, PPC2_ISA300
)
921 GEN_VSX_HELPER_2(xvcvspsxds
, 0x10, 0x19, 0, PPC2_VSX
)
922 GEN_VSX_HELPER_2(xvcvspsxws
, 0x10, 0x09, 0, PPC2_VSX
)
923 GEN_VSX_HELPER_2(xvcvspuxds
, 0x10, 0x18, 0, PPC2_VSX
)
924 GEN_VSX_HELPER_2(xvcvspuxws
, 0x10, 0x08, 0, PPC2_VSX
)
925 GEN_VSX_HELPER_2(xvcvsxdsp
, 0x10, 0x1B, 0, PPC2_VSX
)
926 GEN_VSX_HELPER_2(xvcvuxdsp
, 0x10, 0x1A, 0, PPC2_VSX
)
927 GEN_VSX_HELPER_2(xvcvsxwsp
, 0x10, 0x0B, 0, PPC2_VSX
)
928 GEN_VSX_HELPER_2(xvcvuxwsp
, 0x10, 0x0A, 0, PPC2_VSX
)
929 GEN_VSX_HELPER_2(xvrspi
, 0x12, 0x08, 0, PPC2_VSX
)
930 GEN_VSX_HELPER_2(xvrspic
, 0x16, 0x0A, 0, PPC2_VSX
)
931 GEN_VSX_HELPER_2(xvrspim
, 0x12, 0x0B, 0, PPC2_VSX
)
932 GEN_VSX_HELPER_2(xvrspip
, 0x12, 0x0A, 0, PPC2_VSX
)
933 GEN_VSX_HELPER_2(xvrspiz
, 0x12, 0x09, 0, PPC2_VSX
)
934 GEN_VSX_HELPER_2(xvtstdcsp
, 0x14, 0x1A, 0, PPC2_VSX
)
935 GEN_VSX_HELPER_2(xvtstdcdp
, 0x14, 0x1E, 0, PPC2_VSX
)
936 GEN_VSX_HELPER_2(xxperm
, 0x08, 0x03, 0, PPC2_ISA300
)
937 GEN_VSX_HELPER_2(xxpermr
, 0x08, 0x07, 0, PPC2_ISA300
)
939 static void gen_xxbrd(DisasContext
*ctx
)
941 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
942 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
943 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
944 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
946 if (unlikely(!ctx
->vsx_enabled
)) {
947 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
950 tcg_gen_bswap64_i64(xth
, xbh
);
951 tcg_gen_bswap64_i64(xtl
, xbl
);
954 static void gen_xxbrh(DisasContext
*ctx
)
956 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
957 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
958 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
959 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
961 if (unlikely(!ctx
->vsx_enabled
)) {
962 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
965 gen_bswap16x8(xth
, xtl
, xbh
, xbl
);
968 static void gen_xxbrq(DisasContext
*ctx
)
970 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
971 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
972 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
973 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
974 TCGv_i64 t0
= tcg_temp_new_i64();
976 if (unlikely(!ctx
->vsx_enabled
)) {
977 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
980 tcg_gen_bswap64_i64(t0
, xbl
);
981 tcg_gen_bswap64_i64(xtl
, xbh
);
982 tcg_gen_mov_i64(xth
, t0
);
983 tcg_temp_free_i64(t0
);
986 static void gen_xxbrw(DisasContext
*ctx
)
988 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
989 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
990 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
991 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
993 if (unlikely(!ctx
->vsx_enabled
)) {
994 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
997 gen_bswap32x4(xth
, xtl
, xbh
, xbl
);
1000 #define VSX_LOGICAL(name, tcg_op) \
1001 static void glue(gen_, name)(DisasContext * ctx) \
1003 if (unlikely(!ctx->vsx_enabled)) { \
1004 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1007 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
1008 cpu_vsrh(xB(ctx->opcode))); \
1009 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
1010 cpu_vsrl(xB(ctx->opcode))); \
1013 VSX_LOGICAL(xxland
, tcg_gen_and_i64
)
1014 VSX_LOGICAL(xxlandc
, tcg_gen_andc_i64
)
1015 VSX_LOGICAL(xxlor
, tcg_gen_or_i64
)
1016 VSX_LOGICAL(xxlxor
, tcg_gen_xor_i64
)
1017 VSX_LOGICAL(xxlnor
, tcg_gen_nor_i64
)
1018 VSX_LOGICAL(xxleqv
, tcg_gen_eqv_i64
)
1019 VSX_LOGICAL(xxlnand
, tcg_gen_nand_i64
)
1020 VSX_LOGICAL(xxlorc
, tcg_gen_orc_i64
)
1022 #define VSX_XXMRG(name, high) \
1023 static void glue(gen_, name)(DisasContext * ctx) \
1025 TCGv_i64 a0, a1, b0, b1; \
1026 if (unlikely(!ctx->vsx_enabled)) { \
1027 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1030 a0 = tcg_temp_new_i64(); \
1031 a1 = tcg_temp_new_i64(); \
1032 b0 = tcg_temp_new_i64(); \
1033 b1 = tcg_temp_new_i64(); \
1035 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
1036 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
1037 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
1038 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
1040 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
1041 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
1042 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
1043 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
1045 tcg_gen_shri_i64(a0, a0, 32); \
1046 tcg_gen_shri_i64(b0, b0, 32); \
1047 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
1049 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
1051 tcg_temp_free_i64(a0); \
1052 tcg_temp_free_i64(a1); \
1053 tcg_temp_free_i64(b0); \
1054 tcg_temp_free_i64(b1); \
1057 VSX_XXMRG(xxmrghw
, 1)
1058 VSX_XXMRG(xxmrglw
, 0)
1060 static void gen_xxsel(DisasContext
* ctx
)
1063 if (unlikely(!ctx
->vsx_enabled
)) {
1064 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1067 a
= tcg_temp_new_i64();
1068 b
= tcg_temp_new_i64();
1069 c
= tcg_temp_new_i64();
1071 tcg_gen_mov_i64(a
, cpu_vsrh(xA(ctx
->opcode
)));
1072 tcg_gen_mov_i64(b
, cpu_vsrh(xB(ctx
->opcode
)));
1073 tcg_gen_mov_i64(c
, cpu_vsrh(xC(ctx
->opcode
)));
1075 tcg_gen_and_i64(b
, b
, c
);
1076 tcg_gen_andc_i64(a
, a
, c
);
1077 tcg_gen_or_i64(cpu_vsrh(xT(ctx
->opcode
)), a
, b
);
1079 tcg_gen_mov_i64(a
, cpu_vsrl(xA(ctx
->opcode
)));
1080 tcg_gen_mov_i64(b
, cpu_vsrl(xB(ctx
->opcode
)));
1081 tcg_gen_mov_i64(c
, cpu_vsrl(xC(ctx
->opcode
)));
1083 tcg_gen_and_i64(b
, b
, c
);
1084 tcg_gen_andc_i64(a
, a
, c
);
1085 tcg_gen_or_i64(cpu_vsrl(xT(ctx
->opcode
)), a
, b
);
1087 tcg_temp_free_i64(a
);
1088 tcg_temp_free_i64(b
);
1089 tcg_temp_free_i64(c
);
1092 static void gen_xxspltw(DisasContext
*ctx
)
1095 TCGv_i64 vsr
= (UIM(ctx
->opcode
) & 2) ?
1096 cpu_vsrl(xB(ctx
->opcode
)) :
1097 cpu_vsrh(xB(ctx
->opcode
));
1099 if (unlikely(!ctx
->vsx_enabled
)) {
1100 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1104 b
= tcg_temp_new_i64();
1105 b2
= tcg_temp_new_i64();
1107 if (UIM(ctx
->opcode
) & 1) {
1108 tcg_gen_ext32u_i64(b
, vsr
);
1110 tcg_gen_shri_i64(b
, vsr
, 32);
1113 tcg_gen_shli_i64(b2
, b
, 32);
1114 tcg_gen_or_i64(cpu_vsrh(xT(ctx
->opcode
)), b
, b2
);
1115 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xT(ctx
->opcode
)));
1117 tcg_temp_free_i64(b
);
1118 tcg_temp_free_i64(b2
);
1121 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1123 static void gen_xxspltib(DisasContext
*ctx
)
1125 unsigned char uim8
= IMM8(ctx
->opcode
);
1126 if (xS(ctx
->opcode
) < 32) {
1127 if (unlikely(!ctx
->altivec_enabled
)) {
1128 gen_exception(ctx
, POWERPC_EXCP_VPU
);
1132 if (unlikely(!ctx
->vsx_enabled
)) {
1133 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1137 tcg_gen_movi_i64(cpu_vsrh(xT(ctx
->opcode
)), pattern(uim8
));
1138 tcg_gen_movi_i64(cpu_vsrl(xT(ctx
->opcode
)), pattern(uim8
));
1141 static void gen_xxsldwi(DisasContext
*ctx
)
1144 if (unlikely(!ctx
->vsx_enabled
)) {
1145 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1148 xth
= tcg_temp_new_i64();
1149 xtl
= tcg_temp_new_i64();
1151 switch (SHW(ctx
->opcode
)) {
1153 tcg_gen_mov_i64(xth
, cpu_vsrh(xA(ctx
->opcode
)));
1154 tcg_gen_mov_i64(xtl
, cpu_vsrl(xA(ctx
->opcode
)));
1158 TCGv_i64 t0
= tcg_temp_new_i64();
1159 tcg_gen_mov_i64(xth
, cpu_vsrh(xA(ctx
->opcode
)));
1160 tcg_gen_shli_i64(xth
, xth
, 32);
1161 tcg_gen_mov_i64(t0
, cpu_vsrl(xA(ctx
->opcode
)));
1162 tcg_gen_shri_i64(t0
, t0
, 32);
1163 tcg_gen_or_i64(xth
, xth
, t0
);
1164 tcg_gen_mov_i64(xtl
, cpu_vsrl(xA(ctx
->opcode
)));
1165 tcg_gen_shli_i64(xtl
, xtl
, 32);
1166 tcg_gen_mov_i64(t0
, cpu_vsrh(xB(ctx
->opcode
)));
1167 tcg_gen_shri_i64(t0
, t0
, 32);
1168 tcg_gen_or_i64(xtl
, xtl
, t0
);
1169 tcg_temp_free_i64(t0
);
1173 tcg_gen_mov_i64(xth
, cpu_vsrl(xA(ctx
->opcode
)));
1174 tcg_gen_mov_i64(xtl
, cpu_vsrh(xB(ctx
->opcode
)));
1178 TCGv_i64 t0
= tcg_temp_new_i64();
1179 tcg_gen_mov_i64(xth
, cpu_vsrl(xA(ctx
->opcode
)));
1180 tcg_gen_shli_i64(xth
, xth
, 32);
1181 tcg_gen_mov_i64(t0
, cpu_vsrh(xB(ctx
->opcode
)));
1182 tcg_gen_shri_i64(t0
, t0
, 32);
1183 tcg_gen_or_i64(xth
, xth
, t0
);
1184 tcg_gen_mov_i64(xtl
, cpu_vsrh(xB(ctx
->opcode
)));
1185 tcg_gen_shli_i64(xtl
, xtl
, 32);
1186 tcg_gen_mov_i64(t0
, cpu_vsrl(xB(ctx
->opcode
)));
1187 tcg_gen_shri_i64(t0
, t0
, 32);
1188 tcg_gen_or_i64(xtl
, xtl
, t0
);
1189 tcg_temp_free_i64(t0
);
1194 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), xth
);
1195 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), xtl
);
1197 tcg_temp_free_i64(xth
);
1198 tcg_temp_free_i64(xtl
);
1201 #define VSX_EXTRACT_INSERT(name) \
1202 static void gen_##name(DisasContext *ctx) \
1205 TCGv_i32 t0 = tcg_temp_new_i32(); \
1206 uint8_t uimm = UIMM4(ctx->opcode); \
1208 if (unlikely(!ctx->vsx_enabled)) { \
1209 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1212 xt = tcg_const_tl(xT(ctx->opcode)); \
1213 xb = tcg_const_tl(xB(ctx->opcode)); \
1214 /* uimm > 15 out of bound and for \
1215 * uimm > 12 handle as per hardware in helper \
1218 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0); \
1219 tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), 0); \
1222 tcg_gen_movi_i32(t0, uimm); \
1223 gen_helper_##name(cpu_env, xt, xb, t0); \
1224 tcg_temp_free(xb); \
1225 tcg_temp_free(xt); \
1226 tcg_temp_free_i32(t0); \
1229 VSX_EXTRACT_INSERT(xxextractuw
)
1230 VSX_EXTRACT_INSERT(xxinsertw
)
1233 static void gen_xsxexpdp(DisasContext
*ctx
)
1235 TCGv rt
= cpu_gpr
[rD(ctx
->opcode
)];
1236 if (unlikely(!ctx
->vsx_enabled
)) {
1237 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1240 tcg_gen_shri_i64(rt
, cpu_vsrh(xB(ctx
->opcode
)), 52);
1241 tcg_gen_andi_i64(rt
, rt
, 0x7FF);
1244 static void gen_xsxexpqp(DisasContext
*ctx
)
1246 TCGv_i64 xth
= cpu_vsrh(rD(ctx
->opcode
) + 32);
1247 TCGv_i64 xtl
= cpu_vsrl(rD(ctx
->opcode
) + 32);
1248 TCGv_i64 xbh
= cpu_vsrh(rB(ctx
->opcode
) + 32);
1250 if (unlikely(!ctx
->vsx_enabled
)) {
1251 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1254 tcg_gen_shri_i64(xth
, xbh
, 48);
1255 tcg_gen_andi_i64(xth
, xth
, 0x7FFF);
1256 tcg_gen_movi_i64(xtl
, 0);
1259 static void gen_xsiexpdp(DisasContext
*ctx
)
1261 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
1262 TCGv ra
= cpu_gpr
[rA(ctx
->opcode
)];
1263 TCGv rb
= cpu_gpr
[rB(ctx
->opcode
)];
1266 if (unlikely(!ctx
->vsx_enabled
)) {
1267 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1270 t0
= tcg_temp_new_i64();
1271 tcg_gen_andi_i64(xth
, ra
, 0x800FFFFFFFFFFFFF);
1272 tcg_gen_andi_i64(t0
, rb
, 0x7FF);
1273 tcg_gen_shli_i64(t0
, t0
, 52);
1274 tcg_gen_or_i64(xth
, xth
, t0
);
1275 /* dword[1] is undefined */
1276 tcg_temp_free_i64(t0
);
1279 static void gen_xsiexpqp(DisasContext
*ctx
)
1281 TCGv_i64 xth
= cpu_vsrh(rD(ctx
->opcode
) + 32);
1282 TCGv_i64 xtl
= cpu_vsrl(rD(ctx
->opcode
) + 32);
1283 TCGv_i64 xah
= cpu_vsrh(rA(ctx
->opcode
) + 32);
1284 TCGv_i64 xal
= cpu_vsrl(rA(ctx
->opcode
) + 32);
1285 TCGv_i64 xbh
= cpu_vsrh(rB(ctx
->opcode
) + 32);
1288 if (unlikely(!ctx
->vsx_enabled
)) {
1289 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1292 t0
= tcg_temp_new_i64();
1293 tcg_gen_andi_i64(xth
, xah
, 0x8000FFFFFFFFFFFF);
1294 tcg_gen_andi_i64(t0
, xbh
, 0x7FFF);
1295 tcg_gen_shli_i64(t0
, t0
, 48);
1296 tcg_gen_or_i64(xth
, xth
, t0
);
1297 tcg_gen_mov_i64(xtl
, xal
);
1298 tcg_temp_free_i64(t0
);
1301 static void gen_xsxsigdp(DisasContext
*ctx
)
1303 TCGv rt
= cpu_gpr
[rD(ctx
->opcode
)];
1304 TCGv_i64 t0
, zr
, nan
, exp
;
1306 if (unlikely(!ctx
->vsx_enabled
)) {
1307 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1310 exp
= tcg_temp_new_i64();
1311 t0
= tcg_temp_new_i64();
1312 zr
= tcg_const_i64(0);
1313 nan
= tcg_const_i64(2047);
1315 tcg_gen_shri_i64(exp
, cpu_vsrh(xB(ctx
->opcode
)), 52);
1316 tcg_gen_andi_i64(exp
, exp
, 0x7FF);
1317 tcg_gen_movi_i64(t0
, 0x0010000000000000);
1318 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1319 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1320 tcg_gen_andi_i64(rt
, cpu_vsrh(xB(ctx
->opcode
)), 0x000FFFFFFFFFFFFF);
1321 tcg_gen_or_i64(rt
, rt
, t0
);
1323 tcg_temp_free_i64(t0
);
1324 tcg_temp_free_i64(exp
);
1325 tcg_temp_free_i64(zr
);
1326 tcg_temp_free_i64(nan
);
1329 static void gen_xsxsigqp(DisasContext
*ctx
)
1331 TCGv_i64 t0
, zr
, nan
, exp
;
1332 TCGv_i64 xth
= cpu_vsrh(rD(ctx
->opcode
) + 32);
1333 TCGv_i64 xtl
= cpu_vsrl(rD(ctx
->opcode
) + 32);
1335 if (unlikely(!ctx
->vsx_enabled
)) {
1336 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1339 exp
= tcg_temp_new_i64();
1340 t0
= tcg_temp_new_i64();
1341 zr
= tcg_const_i64(0);
1342 nan
= tcg_const_i64(32767);
1344 tcg_gen_shri_i64(exp
, cpu_vsrh(rB(ctx
->opcode
) + 32), 48);
1345 tcg_gen_andi_i64(exp
, exp
, 0x7FFF);
1346 tcg_gen_movi_i64(t0
, 0x0001000000000000);
1347 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1348 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1349 tcg_gen_andi_i64(xth
, cpu_vsrh(rB(ctx
->opcode
) + 32), 0x0000FFFFFFFFFFFF);
1350 tcg_gen_or_i64(xth
, xth
, t0
);
1351 tcg_gen_mov_i64(xtl
, cpu_vsrl(rB(ctx
->opcode
) + 32));
1353 tcg_temp_free_i64(t0
);
1354 tcg_temp_free_i64(exp
);
1355 tcg_temp_free_i64(zr
);
1356 tcg_temp_free_i64(nan
);
1360 static void gen_xviexpsp(DisasContext
*ctx
)
1362 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
1363 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
1364 TCGv_i64 xah
= cpu_vsrh(xA(ctx
->opcode
));
1365 TCGv_i64 xal
= cpu_vsrl(xA(ctx
->opcode
));
1366 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
1367 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
1370 if (unlikely(!ctx
->vsx_enabled
)) {
1371 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1374 t0
= tcg_temp_new_i64();
1375 tcg_gen_andi_i64(xth
, xah
, 0x807FFFFF807FFFFF);
1376 tcg_gen_andi_i64(t0
, xbh
, 0xFF000000FF);
1377 tcg_gen_shli_i64(t0
, t0
, 23);
1378 tcg_gen_or_i64(xth
, xth
, t0
);
1379 tcg_gen_andi_i64(xtl
, xal
, 0x807FFFFF807FFFFF);
1380 tcg_gen_andi_i64(t0
, xbl
, 0xFF000000FF);
1381 tcg_gen_shli_i64(t0
, t0
, 23);
1382 tcg_gen_or_i64(xtl
, xtl
, t0
);
1383 tcg_temp_free_i64(t0
);
1386 static void gen_xviexpdp(DisasContext
*ctx
)
1388 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
1389 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
1390 TCGv_i64 xah
= cpu_vsrh(xA(ctx
->opcode
));
1391 TCGv_i64 xal
= cpu_vsrl(xA(ctx
->opcode
));
1392 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
1393 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
1396 if (unlikely(!ctx
->vsx_enabled
)) {
1397 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1400 t0
= tcg_temp_new_i64();
1401 tcg_gen_andi_i64(xth
, xah
, 0x800FFFFFFFFFFFFF);
1402 tcg_gen_andi_i64(t0
, xbh
, 0x7FF);
1403 tcg_gen_shli_i64(t0
, t0
, 52);
1404 tcg_gen_or_i64(xth
, xth
, t0
);
1405 tcg_gen_andi_i64(xtl
, xal
, 0x800FFFFFFFFFFFFF);
1406 tcg_gen_andi_i64(t0
, xbl
, 0x7FF);
1407 tcg_gen_shli_i64(t0
, t0
, 52);
1408 tcg_gen_or_i64(xtl
, xtl
, t0
);
1409 tcg_temp_free_i64(t0
);
1412 static void gen_xvxexpsp(DisasContext
*ctx
)
1414 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
1415 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
1416 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
1417 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
1419 if (unlikely(!ctx
->vsx_enabled
)) {
1420 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1423 tcg_gen_shri_i64(xth
, xbh
, 23);
1424 tcg_gen_andi_i64(xth
, xth
, 0xFF000000FF);
1425 tcg_gen_shri_i64(xtl
, xbl
, 23);
1426 tcg_gen_andi_i64(xtl
, xtl
, 0xFF000000FF);
1429 static void gen_xvxexpdp(DisasContext
*ctx
)
1431 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
1432 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
1433 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
1434 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
1436 if (unlikely(!ctx
->vsx_enabled
)) {
1437 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1440 tcg_gen_shri_i64(xth
, xbh
, 52);
1441 tcg_gen_andi_i64(xth
, xth
, 0x7FF);
1442 tcg_gen_shri_i64(xtl
, xbl
, 52);
1443 tcg_gen_andi_i64(xtl
, xtl
, 0x7FF);
1446 GEN_VSX_HELPER_2(xvxsigsp
, 0x00, 0x04, 0, PPC2_ISA300
)
1448 static void gen_xvxsigdp(DisasContext
*ctx
)
1450 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
1451 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
1452 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
1453 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
1455 TCGv_i64 t0
, zr
, nan
, exp
;
1457 if (unlikely(!ctx
->vsx_enabled
)) {
1458 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1461 exp
= tcg_temp_new_i64();
1462 t0
= tcg_temp_new_i64();
1463 zr
= tcg_const_i64(0);
1464 nan
= tcg_const_i64(2047);
1466 tcg_gen_shri_i64(exp
, xbh
, 52);
1467 tcg_gen_andi_i64(exp
, exp
, 0x7FF);
1468 tcg_gen_movi_i64(t0
, 0x0010000000000000);
1469 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1470 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1471 tcg_gen_andi_i64(xth
, xbh
, 0x000FFFFFFFFFFFFF);
1472 tcg_gen_or_i64(xth
, xth
, t0
);
1474 tcg_gen_shri_i64(exp
, xbl
, 52);
1475 tcg_gen_andi_i64(exp
, exp
, 0x7FF);
1476 tcg_gen_movi_i64(t0
, 0x0010000000000000);
1477 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1478 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1479 tcg_gen_andi_i64(xtl
, xbl
, 0x000FFFFFFFFFFFFF);
1480 tcg_gen_or_i64(xtl
, xtl
, t0
);
1482 tcg_temp_free_i64(t0
);
1483 tcg_temp_free_i64(exp
);
1484 tcg_temp_free_i64(zr
);
1485 tcg_temp_free_i64(nan
);
1491 #undef GEN_XX3_RC_FORM
1492 #undef GEN_XX3FORM_DM