]>
git.proxmox.com Git - mirror_qemu.git/blob - target-ppc/translate/vsx-impl.inc.c
1 /*** VSX extension ***/
3 static inline TCGv_i64
cpu_vsrh(int n
)
12 static inline TCGv_i64
cpu_vsrl(int n
)
17 return cpu_avrl
[n
-32];
21 #define VSX_LOAD_SCALAR(name, operation) \
22 static void gen_##name(DisasContext *ctx) \
25 if (unlikely(!ctx->vsx_enabled)) { \
26 gen_exception(ctx, POWERPC_EXCP_VSXU); \
29 gen_set_access_type(ctx, ACCESS_INT); \
30 EA = tcg_temp_new(); \
31 gen_addr_reg_index(ctx, EA); \
32 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33 /* NOTE: cpu_vsrl is undefined */ \
37 VSX_LOAD_SCALAR(lxsdx
, ld64_i64
)
38 VSX_LOAD_SCALAR(lxsiwax
, ld32s_i64
)
39 VSX_LOAD_SCALAR(lxsibzx
, ld8u_i64
)
40 VSX_LOAD_SCALAR(lxsihzx
, ld16u_i64
)
41 VSX_LOAD_SCALAR(lxsiwzx
, ld32u_i64
)
42 VSX_LOAD_SCALAR(lxsspx
, ld32fs
)
44 static void gen_lxvd2x(DisasContext
*ctx
)
47 if (unlikely(!ctx
->vsx_enabled
)) {
48 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
51 gen_set_access_type(ctx
, ACCESS_INT
);
53 gen_addr_reg_index(ctx
, EA
);
54 gen_qemu_ld64_i64(ctx
, cpu_vsrh(xT(ctx
->opcode
)), EA
);
55 tcg_gen_addi_tl(EA
, EA
, 8);
56 gen_qemu_ld64_i64(ctx
, cpu_vsrl(xT(ctx
->opcode
)), EA
);
60 static void gen_lxvdsx(DisasContext
*ctx
)
63 if (unlikely(!ctx
->vsx_enabled
)) {
64 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
67 gen_set_access_type(ctx
, ACCESS_INT
);
69 gen_addr_reg_index(ctx
, EA
);
70 gen_qemu_ld64_i64(ctx
, cpu_vsrh(xT(ctx
->opcode
)), EA
);
71 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xT(ctx
->opcode
)));
75 static void gen_lxvw4x(DisasContext
*ctx
)
78 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
79 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
80 if (unlikely(!ctx
->vsx_enabled
)) {
81 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
84 gen_set_access_type(ctx
, ACCESS_INT
);
87 gen_addr_reg_index(ctx
, EA
);
89 TCGv_i64 t0
= tcg_temp_new_i64();
90 TCGv_i64 t1
= tcg_temp_new_i64();
92 tcg_gen_qemu_ld_i64(t0
, EA
, ctx
->mem_idx
, MO_LEQ
);
93 tcg_gen_shri_i64(t1
, t0
, 32);
94 tcg_gen_deposit_i64(xth
, t1
, t0
, 32, 32);
95 tcg_gen_addi_tl(EA
, EA
, 8);
96 tcg_gen_qemu_ld_i64(t0
, EA
, ctx
->mem_idx
, MO_LEQ
);
97 tcg_gen_shri_i64(t1
, t0
, 32);
98 tcg_gen_deposit_i64(xtl
, t1
, t0
, 32, 32);
99 tcg_temp_free_i64(t0
);
100 tcg_temp_free_i64(t1
);
102 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
103 tcg_gen_addi_tl(EA
, EA
, 8);
104 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
109 #define VSX_STORE_SCALAR(name, operation) \
110 static void gen_##name(DisasContext *ctx) \
113 if (unlikely(!ctx->vsx_enabled)) { \
114 gen_exception(ctx, POWERPC_EXCP_VSXU); \
117 gen_set_access_type(ctx, ACCESS_INT); \
118 EA = tcg_temp_new(); \
119 gen_addr_reg_index(ctx, EA); \
120 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
124 VSX_STORE_SCALAR(stxsdx
, st64_i64
)
126 VSX_STORE_SCALAR(stxsibx
, st8_i64
)
127 VSX_STORE_SCALAR(stxsihx
, st16_i64
)
128 VSX_STORE_SCALAR(stxsiwx
, st32_i64
)
129 VSX_STORE_SCALAR(stxsspx
, st32fs
)
131 static void gen_stxvd2x(DisasContext
*ctx
)
134 if (unlikely(!ctx
->vsx_enabled
)) {
135 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
138 gen_set_access_type(ctx
, ACCESS_INT
);
140 gen_addr_reg_index(ctx
, EA
);
141 gen_qemu_st64_i64(ctx
, cpu_vsrh(xS(ctx
->opcode
)), EA
);
142 tcg_gen_addi_tl(EA
, EA
, 8);
143 gen_qemu_st64_i64(ctx
, cpu_vsrl(xS(ctx
->opcode
)), EA
);
147 static void gen_stxvw4x(DisasContext
*ctx
)
149 TCGv_i64 xsh
= cpu_vsrh(xS(ctx
->opcode
));
150 TCGv_i64 xsl
= cpu_vsrl(xS(ctx
->opcode
));
152 if (unlikely(!ctx
->vsx_enabled
)) {
153 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
156 gen_set_access_type(ctx
, ACCESS_INT
);
158 gen_addr_reg_index(ctx
, EA
);
160 TCGv_i64 t0
= tcg_temp_new_i64();
161 TCGv_i64 t1
= tcg_temp_new_i64();
163 tcg_gen_shri_i64(t0
, xsh
, 32);
164 tcg_gen_deposit_i64(t1
, t0
, xsh
, 32, 32);
165 tcg_gen_qemu_st_i64(t1
, EA
, ctx
->mem_idx
, MO_LEQ
);
166 tcg_gen_addi_tl(EA
, EA
, 8);
167 tcg_gen_shri_i64(t0
, xsl
, 32);
168 tcg_gen_deposit_i64(t1
, t0
, xsl
, 32, 32);
169 tcg_gen_qemu_st_i64(t1
, EA
, ctx
->mem_idx
, MO_LEQ
);
170 tcg_temp_free_i64(t0
);
171 tcg_temp_free_i64(t1
);
173 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
174 tcg_gen_addi_tl(EA
, EA
, 8);
175 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
180 #define MV_VSRW(name, tcgop1, tcgop2, target, source) \
181 static void gen_##name(DisasContext *ctx) \
183 if (xS(ctx->opcode) < 32) { \
184 if (unlikely(!ctx->fpu_enabled)) { \
185 gen_exception(ctx, POWERPC_EXCP_FPU); \
189 if (unlikely(!ctx->altivec_enabled)) { \
190 gen_exception(ctx, POWERPC_EXCP_VPU); \
194 TCGv_i64 tmp = tcg_temp_new_i64(); \
195 tcg_gen_##tcgop1(tmp, source); \
196 tcg_gen_##tcgop2(target, tmp); \
197 tcg_temp_free_i64(tmp); \
201 MV_VSRW(mfvsrwz
, ext32u_i64
, trunc_i64_tl
, cpu_gpr
[rA(ctx
->opcode
)], \
202 cpu_vsrh(xS(ctx
->opcode
)))
203 MV_VSRW(mtvsrwa
, extu_tl_i64
, ext32s_i64
, cpu_vsrh(xT(ctx
->opcode
)), \
204 cpu_gpr
[rA(ctx
->opcode
)])
205 MV_VSRW(mtvsrwz
, extu_tl_i64
, ext32u_i64
, cpu_vsrh(xT(ctx
->opcode
)), \
206 cpu_gpr
[rA(ctx
->opcode
)])
208 #if defined(TARGET_PPC64)
209 #define MV_VSRD(name, target, source) \
210 static void gen_##name(DisasContext *ctx) \
212 if (xS(ctx->opcode) < 32) { \
213 if (unlikely(!ctx->fpu_enabled)) { \
214 gen_exception(ctx, POWERPC_EXCP_FPU); \
218 if (unlikely(!ctx->altivec_enabled)) { \
219 gen_exception(ctx, POWERPC_EXCP_VPU); \
223 tcg_gen_mov_i64(target, source); \
226 MV_VSRD(mfvsrd
, cpu_gpr
[rA(ctx
->opcode
)], cpu_vsrh(xS(ctx
->opcode
)))
227 MV_VSRD(mtvsrd
, cpu_vsrh(xT(ctx
->opcode
)), cpu_gpr
[rA(ctx
->opcode
)])
229 static void gen_mfvsrld(DisasContext
*ctx
)
231 if (xS(ctx
->opcode
) < 32) {
232 if (unlikely(!ctx
->vsx_enabled
)) {
233 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
237 if (unlikely(!ctx
->altivec_enabled
)) {
238 gen_exception(ctx
, POWERPC_EXCP_VPU
);
243 tcg_gen_mov_i64(cpu_gpr
[rA(ctx
->opcode
)], cpu_vsrl(xS(ctx
->opcode
)));
246 static void gen_mtvsrdd(DisasContext
*ctx
)
248 if (xT(ctx
->opcode
) < 32) {
249 if (unlikely(!ctx
->vsx_enabled
)) {
250 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
254 if (unlikely(!ctx
->altivec_enabled
)) {
255 gen_exception(ctx
, POWERPC_EXCP_VPU
);
260 if (!rA(ctx
->opcode
)) {
261 tcg_gen_movi_i64(cpu_vsrh(xT(ctx
->opcode
)), 0);
263 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_gpr
[rA(ctx
->opcode
)]);
266 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_gpr
[rB(ctx
->opcode
)]);
271 static void gen_xxpermdi(DisasContext
*ctx
)
273 if (unlikely(!ctx
->vsx_enabled
)) {
274 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
278 if (unlikely((xT(ctx
->opcode
) == xA(ctx
->opcode
)) ||
279 (xT(ctx
->opcode
) == xB(ctx
->opcode
)))) {
282 xh
= tcg_temp_new_i64();
283 xl
= tcg_temp_new_i64();
285 if ((DM(ctx
->opcode
) & 2) == 0) {
286 tcg_gen_mov_i64(xh
, cpu_vsrh(xA(ctx
->opcode
)));
288 tcg_gen_mov_i64(xh
, cpu_vsrl(xA(ctx
->opcode
)));
290 if ((DM(ctx
->opcode
) & 1) == 0) {
291 tcg_gen_mov_i64(xl
, cpu_vsrh(xB(ctx
->opcode
)));
293 tcg_gen_mov_i64(xl
, cpu_vsrl(xB(ctx
->opcode
)));
296 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), xh
);
297 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), xl
);
299 tcg_temp_free_i64(xh
);
300 tcg_temp_free_i64(xl
);
302 if ((DM(ctx
->opcode
) & 2) == 0) {
303 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrh(xA(ctx
->opcode
)));
305 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrl(xA(ctx
->opcode
)));
307 if ((DM(ctx
->opcode
) & 1) == 0) {
308 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xB(ctx
->opcode
)));
310 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrl(xB(ctx
->opcode
)));
319 #define SGN_MASK_DP 0x8000000000000000ull
320 #define SGN_MASK_SP 0x8000000080000000ull
322 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
323 static void glue(gen_, name)(DisasContext * ctx) \
326 if (unlikely(!ctx->vsx_enabled)) { \
327 gen_exception(ctx, POWERPC_EXCP_VSXU); \
330 xb = tcg_temp_new_i64(); \
331 sgm = tcg_temp_new_i64(); \
332 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
333 tcg_gen_movi_i64(sgm, sgn_mask); \
336 tcg_gen_andc_i64(xb, xb, sgm); \
340 tcg_gen_or_i64(xb, xb, sgm); \
344 tcg_gen_xor_i64(xb, xb, sgm); \
348 TCGv_i64 xa = tcg_temp_new_i64(); \
349 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
350 tcg_gen_and_i64(xa, xa, sgm); \
351 tcg_gen_andc_i64(xb, xb, sgm); \
352 tcg_gen_or_i64(xb, xb, xa); \
353 tcg_temp_free_i64(xa); \
357 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
358 tcg_temp_free_i64(xb); \
359 tcg_temp_free_i64(sgm); \
362 VSX_SCALAR_MOVE(xsabsdp
, OP_ABS
, SGN_MASK_DP
)
363 VSX_SCALAR_MOVE(xsnabsdp
, OP_NABS
, SGN_MASK_DP
)
364 VSX_SCALAR_MOVE(xsnegdp
, OP_NEG
, SGN_MASK_DP
)
365 VSX_SCALAR_MOVE(xscpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
367 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
368 static void glue(gen_, name)(DisasContext * ctx) \
370 TCGv_i64 xbh, xbl, sgm; \
371 if (unlikely(!ctx->vsx_enabled)) { \
372 gen_exception(ctx, POWERPC_EXCP_VSXU); \
375 xbh = tcg_temp_new_i64(); \
376 xbl = tcg_temp_new_i64(); \
377 sgm = tcg_temp_new_i64(); \
378 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
379 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
380 tcg_gen_movi_i64(sgm, sgn_mask); \
383 tcg_gen_andc_i64(xbh, xbh, sgm); \
384 tcg_gen_andc_i64(xbl, xbl, sgm); \
388 tcg_gen_or_i64(xbh, xbh, sgm); \
389 tcg_gen_or_i64(xbl, xbl, sgm); \
393 tcg_gen_xor_i64(xbh, xbh, sgm); \
394 tcg_gen_xor_i64(xbl, xbl, sgm); \
398 TCGv_i64 xah = tcg_temp_new_i64(); \
399 TCGv_i64 xal = tcg_temp_new_i64(); \
400 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
401 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
402 tcg_gen_and_i64(xah, xah, sgm); \
403 tcg_gen_and_i64(xal, xal, sgm); \
404 tcg_gen_andc_i64(xbh, xbh, sgm); \
405 tcg_gen_andc_i64(xbl, xbl, sgm); \
406 tcg_gen_or_i64(xbh, xbh, xah); \
407 tcg_gen_or_i64(xbl, xbl, xal); \
408 tcg_temp_free_i64(xah); \
409 tcg_temp_free_i64(xal); \
413 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
414 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
415 tcg_temp_free_i64(xbh); \
416 tcg_temp_free_i64(xbl); \
417 tcg_temp_free_i64(sgm); \
420 VSX_VECTOR_MOVE(xvabsdp
, OP_ABS
, SGN_MASK_DP
)
421 VSX_VECTOR_MOVE(xvnabsdp
, OP_NABS
, SGN_MASK_DP
)
422 VSX_VECTOR_MOVE(xvnegdp
, OP_NEG
, SGN_MASK_DP
)
423 VSX_VECTOR_MOVE(xvcpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
424 VSX_VECTOR_MOVE(xvabssp
, OP_ABS
, SGN_MASK_SP
)
425 VSX_VECTOR_MOVE(xvnabssp
, OP_NABS
, SGN_MASK_SP
)
426 VSX_VECTOR_MOVE(xvnegsp
, OP_NEG
, SGN_MASK_SP
)
427 VSX_VECTOR_MOVE(xvcpsgnsp
, OP_CPSGN
, SGN_MASK_SP
)
429 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
430 static void gen_##name(DisasContext * ctx) \
433 if (unlikely(!ctx->vsx_enabled)) { \
434 gen_exception(ctx, POWERPC_EXCP_VSXU); \
437 opc = tcg_const_i32(ctx->opcode); \
438 gen_helper_##name(cpu_env, opc); \
439 tcg_temp_free_i32(opc); \
442 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
443 static void gen_##name(DisasContext * ctx) \
445 if (unlikely(!ctx->vsx_enabled)) { \
446 gen_exception(ctx, POWERPC_EXCP_VSXU); \
449 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
450 cpu_vsrh(xB(ctx->opcode))); \
453 GEN_VSX_HELPER_2(xsadddp
, 0x00, 0x04, 0, PPC2_VSX
)
454 GEN_VSX_HELPER_2(xssubdp
, 0x00, 0x05, 0, PPC2_VSX
)
455 GEN_VSX_HELPER_2(xsmuldp
, 0x00, 0x06, 0, PPC2_VSX
)
456 GEN_VSX_HELPER_2(xsdivdp
, 0x00, 0x07, 0, PPC2_VSX
)
457 GEN_VSX_HELPER_2(xsredp
, 0x14, 0x05, 0, PPC2_VSX
)
458 GEN_VSX_HELPER_2(xssqrtdp
, 0x16, 0x04, 0, PPC2_VSX
)
459 GEN_VSX_HELPER_2(xsrsqrtedp
, 0x14, 0x04, 0, PPC2_VSX
)
460 GEN_VSX_HELPER_2(xstdivdp
, 0x14, 0x07, 0, PPC2_VSX
)
461 GEN_VSX_HELPER_2(xstsqrtdp
, 0x14, 0x06, 0, PPC2_VSX
)
462 GEN_VSX_HELPER_2(xsmaddadp
, 0x04, 0x04, 0, PPC2_VSX
)
463 GEN_VSX_HELPER_2(xsmaddmdp
, 0x04, 0x05, 0, PPC2_VSX
)
464 GEN_VSX_HELPER_2(xsmsubadp
, 0x04, 0x06, 0, PPC2_VSX
)
465 GEN_VSX_HELPER_2(xsmsubmdp
, 0x04, 0x07, 0, PPC2_VSX
)
466 GEN_VSX_HELPER_2(xsnmaddadp
, 0x04, 0x14, 0, PPC2_VSX
)
467 GEN_VSX_HELPER_2(xsnmaddmdp
, 0x04, 0x15, 0, PPC2_VSX
)
468 GEN_VSX_HELPER_2(xsnmsubadp
, 0x04, 0x16, 0, PPC2_VSX
)
469 GEN_VSX_HELPER_2(xsnmsubmdp
, 0x04, 0x17, 0, PPC2_VSX
)
470 GEN_VSX_HELPER_2(xscmpodp
, 0x0C, 0x05, 0, PPC2_VSX
)
471 GEN_VSX_HELPER_2(xscmpudp
, 0x0C, 0x04, 0, PPC2_VSX
)
472 GEN_VSX_HELPER_2(xsmaxdp
, 0x00, 0x14, 0, PPC2_VSX
)
473 GEN_VSX_HELPER_2(xsmindp
, 0x00, 0x15, 0, PPC2_VSX
)
474 GEN_VSX_HELPER_2(xscvdpsp
, 0x12, 0x10, 0, PPC2_VSX
)
475 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn
, 0x16, 0x10, 0, PPC2_VSX207
)
476 GEN_VSX_HELPER_2(xscvspdp
, 0x12, 0x14, 0, PPC2_VSX
)
477 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn
, 0x16, 0x14, 0, PPC2_VSX207
)
478 GEN_VSX_HELPER_2(xscvdpsxds
, 0x10, 0x15, 0, PPC2_VSX
)
479 GEN_VSX_HELPER_2(xscvdpsxws
, 0x10, 0x05, 0, PPC2_VSX
)
480 GEN_VSX_HELPER_2(xscvdpuxds
, 0x10, 0x14, 0, PPC2_VSX
)
481 GEN_VSX_HELPER_2(xscvdpuxws
, 0x10, 0x04, 0, PPC2_VSX
)
482 GEN_VSX_HELPER_2(xscvsxddp
, 0x10, 0x17, 0, PPC2_VSX
)
483 GEN_VSX_HELPER_2(xscvuxddp
, 0x10, 0x16, 0, PPC2_VSX
)
484 GEN_VSX_HELPER_2(xsrdpi
, 0x12, 0x04, 0, PPC2_VSX
)
485 GEN_VSX_HELPER_2(xsrdpic
, 0x16, 0x06, 0, PPC2_VSX
)
486 GEN_VSX_HELPER_2(xsrdpim
, 0x12, 0x07, 0, PPC2_VSX
)
487 GEN_VSX_HELPER_2(xsrdpip
, 0x12, 0x06, 0, PPC2_VSX
)
488 GEN_VSX_HELPER_2(xsrdpiz
, 0x12, 0x05, 0, PPC2_VSX
)
489 GEN_VSX_HELPER_XT_XB_ENV(xsrsp
, 0x12, 0x11, 0, PPC2_VSX207
)
491 GEN_VSX_HELPER_2(xsaddsp
, 0x00, 0x00, 0, PPC2_VSX207
)
492 GEN_VSX_HELPER_2(xssubsp
, 0x00, 0x01, 0, PPC2_VSX207
)
493 GEN_VSX_HELPER_2(xsmulsp
, 0x00, 0x02, 0, PPC2_VSX207
)
494 GEN_VSX_HELPER_2(xsdivsp
, 0x00, 0x03, 0, PPC2_VSX207
)
495 GEN_VSX_HELPER_2(xsresp
, 0x14, 0x01, 0, PPC2_VSX207
)
496 GEN_VSX_HELPER_2(xssqrtsp
, 0x16, 0x00, 0, PPC2_VSX207
)
497 GEN_VSX_HELPER_2(xsrsqrtesp
, 0x14, 0x00, 0, PPC2_VSX207
)
498 GEN_VSX_HELPER_2(xsmaddasp
, 0x04, 0x00, 0, PPC2_VSX207
)
499 GEN_VSX_HELPER_2(xsmaddmsp
, 0x04, 0x01, 0, PPC2_VSX207
)
500 GEN_VSX_HELPER_2(xsmsubasp
, 0x04, 0x02, 0, PPC2_VSX207
)
501 GEN_VSX_HELPER_2(xsmsubmsp
, 0x04, 0x03, 0, PPC2_VSX207
)
502 GEN_VSX_HELPER_2(xsnmaddasp
, 0x04, 0x10, 0, PPC2_VSX207
)
503 GEN_VSX_HELPER_2(xsnmaddmsp
, 0x04, 0x11, 0, PPC2_VSX207
)
504 GEN_VSX_HELPER_2(xsnmsubasp
, 0x04, 0x12, 0, PPC2_VSX207
)
505 GEN_VSX_HELPER_2(xsnmsubmsp
, 0x04, 0x13, 0, PPC2_VSX207
)
506 GEN_VSX_HELPER_2(xscvsxdsp
, 0x10, 0x13, 0, PPC2_VSX207
)
507 GEN_VSX_HELPER_2(xscvuxdsp
, 0x10, 0x12, 0, PPC2_VSX207
)
509 GEN_VSX_HELPER_2(xvadddp
, 0x00, 0x0C, 0, PPC2_VSX
)
510 GEN_VSX_HELPER_2(xvsubdp
, 0x00, 0x0D, 0, PPC2_VSX
)
511 GEN_VSX_HELPER_2(xvmuldp
, 0x00, 0x0E, 0, PPC2_VSX
)
512 GEN_VSX_HELPER_2(xvdivdp
, 0x00, 0x0F, 0, PPC2_VSX
)
513 GEN_VSX_HELPER_2(xvredp
, 0x14, 0x0D, 0, PPC2_VSX
)
514 GEN_VSX_HELPER_2(xvsqrtdp
, 0x16, 0x0C, 0, PPC2_VSX
)
515 GEN_VSX_HELPER_2(xvrsqrtedp
, 0x14, 0x0C, 0, PPC2_VSX
)
516 GEN_VSX_HELPER_2(xvtdivdp
, 0x14, 0x0F, 0, PPC2_VSX
)
517 GEN_VSX_HELPER_2(xvtsqrtdp
, 0x14, 0x0E, 0, PPC2_VSX
)
518 GEN_VSX_HELPER_2(xvmaddadp
, 0x04, 0x0C, 0, PPC2_VSX
)
519 GEN_VSX_HELPER_2(xvmaddmdp
, 0x04, 0x0D, 0, PPC2_VSX
)
520 GEN_VSX_HELPER_2(xvmsubadp
, 0x04, 0x0E, 0, PPC2_VSX
)
521 GEN_VSX_HELPER_2(xvmsubmdp
, 0x04, 0x0F, 0, PPC2_VSX
)
522 GEN_VSX_HELPER_2(xvnmaddadp
, 0x04, 0x1C, 0, PPC2_VSX
)
523 GEN_VSX_HELPER_2(xvnmaddmdp
, 0x04, 0x1D, 0, PPC2_VSX
)
524 GEN_VSX_HELPER_2(xvnmsubadp
, 0x04, 0x1E, 0, PPC2_VSX
)
525 GEN_VSX_HELPER_2(xvnmsubmdp
, 0x04, 0x1F, 0, PPC2_VSX
)
526 GEN_VSX_HELPER_2(xvmaxdp
, 0x00, 0x1C, 0, PPC2_VSX
)
527 GEN_VSX_HELPER_2(xvmindp
, 0x00, 0x1D, 0, PPC2_VSX
)
528 GEN_VSX_HELPER_2(xvcmpeqdp
, 0x0C, 0x0C, 0, PPC2_VSX
)
529 GEN_VSX_HELPER_2(xvcmpgtdp
, 0x0C, 0x0D, 0, PPC2_VSX
)
530 GEN_VSX_HELPER_2(xvcmpgedp
, 0x0C, 0x0E, 0, PPC2_VSX
)
531 GEN_VSX_HELPER_2(xvcvdpsp
, 0x12, 0x18, 0, PPC2_VSX
)
532 GEN_VSX_HELPER_2(xvcvdpsxds
, 0x10, 0x1D, 0, PPC2_VSX
)
533 GEN_VSX_HELPER_2(xvcvdpsxws
, 0x10, 0x0D, 0, PPC2_VSX
)
534 GEN_VSX_HELPER_2(xvcvdpuxds
, 0x10, 0x1C, 0, PPC2_VSX
)
535 GEN_VSX_HELPER_2(xvcvdpuxws
, 0x10, 0x0C, 0, PPC2_VSX
)
536 GEN_VSX_HELPER_2(xvcvsxddp
, 0x10, 0x1F, 0, PPC2_VSX
)
537 GEN_VSX_HELPER_2(xvcvuxddp
, 0x10, 0x1E, 0, PPC2_VSX
)
538 GEN_VSX_HELPER_2(xvcvsxwdp
, 0x10, 0x0F, 0, PPC2_VSX
)
539 GEN_VSX_HELPER_2(xvcvuxwdp
, 0x10, 0x0E, 0, PPC2_VSX
)
540 GEN_VSX_HELPER_2(xvrdpi
, 0x12, 0x0C, 0, PPC2_VSX
)
541 GEN_VSX_HELPER_2(xvrdpic
, 0x16, 0x0E, 0, PPC2_VSX
)
542 GEN_VSX_HELPER_2(xvrdpim
, 0x12, 0x0F, 0, PPC2_VSX
)
543 GEN_VSX_HELPER_2(xvrdpip
, 0x12, 0x0E, 0, PPC2_VSX
)
544 GEN_VSX_HELPER_2(xvrdpiz
, 0x12, 0x0D, 0, PPC2_VSX
)
546 GEN_VSX_HELPER_2(xvaddsp
, 0x00, 0x08, 0, PPC2_VSX
)
547 GEN_VSX_HELPER_2(xvsubsp
, 0x00, 0x09, 0, PPC2_VSX
)
548 GEN_VSX_HELPER_2(xvmulsp
, 0x00, 0x0A, 0, PPC2_VSX
)
549 GEN_VSX_HELPER_2(xvdivsp
, 0x00, 0x0B, 0, PPC2_VSX
)
550 GEN_VSX_HELPER_2(xvresp
, 0x14, 0x09, 0, PPC2_VSX
)
551 GEN_VSX_HELPER_2(xvsqrtsp
, 0x16, 0x08, 0, PPC2_VSX
)
552 GEN_VSX_HELPER_2(xvrsqrtesp
, 0x14, 0x08, 0, PPC2_VSX
)
553 GEN_VSX_HELPER_2(xvtdivsp
, 0x14, 0x0B, 0, PPC2_VSX
)
554 GEN_VSX_HELPER_2(xvtsqrtsp
, 0x14, 0x0A, 0, PPC2_VSX
)
555 GEN_VSX_HELPER_2(xvmaddasp
, 0x04, 0x08, 0, PPC2_VSX
)
556 GEN_VSX_HELPER_2(xvmaddmsp
, 0x04, 0x09, 0, PPC2_VSX
)
557 GEN_VSX_HELPER_2(xvmsubasp
, 0x04, 0x0A, 0, PPC2_VSX
)
558 GEN_VSX_HELPER_2(xvmsubmsp
, 0x04, 0x0B, 0, PPC2_VSX
)
559 GEN_VSX_HELPER_2(xvnmaddasp
, 0x04, 0x18, 0, PPC2_VSX
)
560 GEN_VSX_HELPER_2(xvnmaddmsp
, 0x04, 0x19, 0, PPC2_VSX
)
561 GEN_VSX_HELPER_2(xvnmsubasp
, 0x04, 0x1A, 0, PPC2_VSX
)
562 GEN_VSX_HELPER_2(xvnmsubmsp
, 0x04, 0x1B, 0, PPC2_VSX
)
563 GEN_VSX_HELPER_2(xvmaxsp
, 0x00, 0x18, 0, PPC2_VSX
)
564 GEN_VSX_HELPER_2(xvminsp
, 0x00, 0x19, 0, PPC2_VSX
)
565 GEN_VSX_HELPER_2(xvcmpeqsp
, 0x0C, 0x08, 0, PPC2_VSX
)
566 GEN_VSX_HELPER_2(xvcmpgtsp
, 0x0C, 0x09, 0, PPC2_VSX
)
567 GEN_VSX_HELPER_2(xvcmpgesp
, 0x0C, 0x0A, 0, PPC2_VSX
)
568 GEN_VSX_HELPER_2(xvcvspdp
, 0x12, 0x1C, 0, PPC2_VSX
)
569 GEN_VSX_HELPER_2(xvcvspsxds
, 0x10, 0x19, 0, PPC2_VSX
)
570 GEN_VSX_HELPER_2(xvcvspsxws
, 0x10, 0x09, 0, PPC2_VSX
)
571 GEN_VSX_HELPER_2(xvcvspuxds
, 0x10, 0x18, 0, PPC2_VSX
)
572 GEN_VSX_HELPER_2(xvcvspuxws
, 0x10, 0x08, 0, PPC2_VSX
)
573 GEN_VSX_HELPER_2(xvcvsxdsp
, 0x10, 0x1B, 0, PPC2_VSX
)
574 GEN_VSX_HELPER_2(xvcvuxdsp
, 0x10, 0x1A, 0, PPC2_VSX
)
575 GEN_VSX_HELPER_2(xvcvsxwsp
, 0x10, 0x0B, 0, PPC2_VSX
)
576 GEN_VSX_HELPER_2(xvcvuxwsp
, 0x10, 0x0A, 0, PPC2_VSX
)
577 GEN_VSX_HELPER_2(xvrspi
, 0x12, 0x08, 0, PPC2_VSX
)
578 GEN_VSX_HELPER_2(xvrspic
, 0x16, 0x0A, 0, PPC2_VSX
)
579 GEN_VSX_HELPER_2(xvrspim
, 0x12, 0x0B, 0, PPC2_VSX
)
580 GEN_VSX_HELPER_2(xvrspip
, 0x12, 0x0A, 0, PPC2_VSX
)
581 GEN_VSX_HELPER_2(xvrspiz
, 0x12, 0x09, 0, PPC2_VSX
)
583 #define VSX_LOGICAL(name, tcg_op) \
584 static void glue(gen_, name)(DisasContext * ctx) \
586 if (unlikely(!ctx->vsx_enabled)) { \
587 gen_exception(ctx, POWERPC_EXCP_VSXU); \
590 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
591 cpu_vsrh(xB(ctx->opcode))); \
592 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
593 cpu_vsrl(xB(ctx->opcode))); \
596 VSX_LOGICAL(xxland
, tcg_gen_and_i64
)
597 VSX_LOGICAL(xxlandc
, tcg_gen_andc_i64
)
598 VSX_LOGICAL(xxlor
, tcg_gen_or_i64
)
599 VSX_LOGICAL(xxlxor
, tcg_gen_xor_i64
)
600 VSX_LOGICAL(xxlnor
, tcg_gen_nor_i64
)
601 VSX_LOGICAL(xxleqv
, tcg_gen_eqv_i64
)
602 VSX_LOGICAL(xxlnand
, tcg_gen_nand_i64
)
603 VSX_LOGICAL(xxlorc
, tcg_gen_orc_i64
)
605 #define VSX_XXMRG(name, high) \
606 static void glue(gen_, name)(DisasContext * ctx) \
608 TCGv_i64 a0, a1, b0, b1; \
609 if (unlikely(!ctx->vsx_enabled)) { \
610 gen_exception(ctx, POWERPC_EXCP_VSXU); \
613 a0 = tcg_temp_new_i64(); \
614 a1 = tcg_temp_new_i64(); \
615 b0 = tcg_temp_new_i64(); \
616 b1 = tcg_temp_new_i64(); \
618 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
619 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
620 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
621 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
623 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
624 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
625 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
626 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
628 tcg_gen_shri_i64(a0, a0, 32); \
629 tcg_gen_shri_i64(b0, b0, 32); \
630 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
632 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
634 tcg_temp_free_i64(a0); \
635 tcg_temp_free_i64(a1); \
636 tcg_temp_free_i64(b0); \
637 tcg_temp_free_i64(b1); \
640 VSX_XXMRG(xxmrghw
, 1)
641 VSX_XXMRG(xxmrglw
, 0)
643 static void gen_xxsel(DisasContext
* ctx
)
646 if (unlikely(!ctx
->vsx_enabled
)) {
647 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
650 a
= tcg_temp_new_i64();
651 b
= tcg_temp_new_i64();
652 c
= tcg_temp_new_i64();
654 tcg_gen_mov_i64(a
, cpu_vsrh(xA(ctx
->opcode
)));
655 tcg_gen_mov_i64(b
, cpu_vsrh(xB(ctx
->opcode
)));
656 tcg_gen_mov_i64(c
, cpu_vsrh(xC(ctx
->opcode
)));
658 tcg_gen_and_i64(b
, b
, c
);
659 tcg_gen_andc_i64(a
, a
, c
);
660 tcg_gen_or_i64(cpu_vsrh(xT(ctx
->opcode
)), a
, b
);
662 tcg_gen_mov_i64(a
, cpu_vsrl(xA(ctx
->opcode
)));
663 tcg_gen_mov_i64(b
, cpu_vsrl(xB(ctx
->opcode
)));
664 tcg_gen_mov_i64(c
, cpu_vsrl(xC(ctx
->opcode
)));
666 tcg_gen_and_i64(b
, b
, c
);
667 tcg_gen_andc_i64(a
, a
, c
);
668 tcg_gen_or_i64(cpu_vsrl(xT(ctx
->opcode
)), a
, b
);
670 tcg_temp_free_i64(a
);
671 tcg_temp_free_i64(b
);
672 tcg_temp_free_i64(c
);
675 static void gen_xxspltw(DisasContext
*ctx
)
678 TCGv_i64 vsr
= (UIM(ctx
->opcode
) & 2) ?
679 cpu_vsrl(xB(ctx
->opcode
)) :
680 cpu_vsrh(xB(ctx
->opcode
));
682 if (unlikely(!ctx
->vsx_enabled
)) {
683 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
687 b
= tcg_temp_new_i64();
688 b2
= tcg_temp_new_i64();
690 if (UIM(ctx
->opcode
) & 1) {
691 tcg_gen_ext32u_i64(b
, vsr
);
693 tcg_gen_shri_i64(b
, vsr
, 32);
696 tcg_gen_shli_i64(b2
, b
, 32);
697 tcg_gen_or_i64(cpu_vsrh(xT(ctx
->opcode
)), b
, b2
);
698 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xT(ctx
->opcode
)));
700 tcg_temp_free_i64(b
);
701 tcg_temp_free_i64(b2
);
704 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
706 static void gen_xxspltib(DisasContext
*ctx
)
708 unsigned char uim8
= IMM8(ctx
->opcode
);
709 if (xS(ctx
->opcode
) < 32) {
710 if (unlikely(!ctx
->altivec_enabled
)) {
711 gen_exception(ctx
, POWERPC_EXCP_VPU
);
715 if (unlikely(!ctx
->vsx_enabled
)) {
716 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
720 tcg_gen_movi_i64(cpu_vsrh(xT(ctx
->opcode
)), pattern(uim8
));
721 tcg_gen_movi_i64(cpu_vsrl(xT(ctx
->opcode
)), pattern(uim8
));
724 static void gen_xxsldwi(DisasContext
*ctx
)
727 if (unlikely(!ctx
->vsx_enabled
)) {
728 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
731 xth
= tcg_temp_new_i64();
732 xtl
= tcg_temp_new_i64();
734 switch (SHW(ctx
->opcode
)) {
736 tcg_gen_mov_i64(xth
, cpu_vsrh(xA(ctx
->opcode
)));
737 tcg_gen_mov_i64(xtl
, cpu_vsrl(xA(ctx
->opcode
)));
741 TCGv_i64 t0
= tcg_temp_new_i64();
742 tcg_gen_mov_i64(xth
, cpu_vsrh(xA(ctx
->opcode
)));
743 tcg_gen_shli_i64(xth
, xth
, 32);
744 tcg_gen_mov_i64(t0
, cpu_vsrl(xA(ctx
->opcode
)));
745 tcg_gen_shri_i64(t0
, t0
, 32);
746 tcg_gen_or_i64(xth
, xth
, t0
);
747 tcg_gen_mov_i64(xtl
, cpu_vsrl(xA(ctx
->opcode
)));
748 tcg_gen_shli_i64(xtl
, xtl
, 32);
749 tcg_gen_mov_i64(t0
, cpu_vsrh(xB(ctx
->opcode
)));
750 tcg_gen_shri_i64(t0
, t0
, 32);
751 tcg_gen_or_i64(xtl
, xtl
, t0
);
752 tcg_temp_free_i64(t0
);
756 tcg_gen_mov_i64(xth
, cpu_vsrl(xA(ctx
->opcode
)));
757 tcg_gen_mov_i64(xtl
, cpu_vsrh(xB(ctx
->opcode
)));
761 TCGv_i64 t0
= tcg_temp_new_i64();
762 tcg_gen_mov_i64(xth
, cpu_vsrl(xA(ctx
->opcode
)));
763 tcg_gen_shli_i64(xth
, xth
, 32);
764 tcg_gen_mov_i64(t0
, cpu_vsrh(xB(ctx
->opcode
)));
765 tcg_gen_shri_i64(t0
, t0
, 32);
766 tcg_gen_or_i64(xth
, xth
, t0
);
767 tcg_gen_mov_i64(xtl
, cpu_vsrh(xB(ctx
->opcode
)));
768 tcg_gen_shli_i64(xtl
, xtl
, 32);
769 tcg_gen_mov_i64(t0
, cpu_vsrl(xB(ctx
->opcode
)));
770 tcg_gen_shri_i64(t0
, t0
, 32);
771 tcg_gen_or_i64(xtl
, xtl
, t0
);
772 tcg_temp_free_i64(t0
);
777 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), xth
);
778 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), xtl
);
780 tcg_temp_free_i64(xth
);
781 tcg_temp_free_i64(xtl
);
787 #undef GEN_XX3_RC_FORM
788 #undef GEN_XX3FORM_DM