]> git.proxmox.com Git - mirror_qemu.git/blame - target/ppc/translate/vsx-impl.inc.c
target-ppc: implement lxvl instruction
[mirror_qemu.git] / target / ppc / translate / vsx-impl.inc.c
CommitLineData
3014427a
BH
1/*** VSX extension ***/
2
3static inline TCGv_i64 cpu_vsrh(int n)
4{
5 if (n < 32) {
6 return cpu_fpr[n];
7 } else {
8 return cpu_avrh[n-32];
9 }
10}
11
12static inline TCGv_i64 cpu_vsrl(int n)
13{
14 if (n < 32) {
15 return cpu_vsr[n];
16 } else {
17 return cpu_avrl[n-32];
18 }
19}
20
21#define VSX_LOAD_SCALAR(name, operation) \
22static void gen_##name(DisasContext *ctx) \
23{ \
24 TCGv EA; \
25 if (unlikely(!ctx->vsx_enabled)) { \
26 gen_exception(ctx, POWERPC_EXCP_VSXU); \
27 return; \
28 } \
29 gen_set_access_type(ctx, ACCESS_INT); \
30 EA = tcg_temp_new(); \
31 gen_addr_reg_index(ctx, EA); \
32 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33 /* NOTE: cpu_vsrl is undefined */ \
34 tcg_temp_free(EA); \
35}
36
4f364fe7 37VSX_LOAD_SCALAR(lxsdx, ld64_i64)
3014427a 38VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
740ae9a2
ND
39VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
40VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
3014427a
BH
41VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
42VSX_LOAD_SCALAR(lxsspx, ld32fs)
43
44static void gen_lxvd2x(DisasContext *ctx)
45{
46 TCGv EA;
47 if (unlikely(!ctx->vsx_enabled)) {
48 gen_exception(ctx, POWERPC_EXCP_VSXU);
49 return;
50 }
51 gen_set_access_type(ctx, ACCESS_INT);
52 EA = tcg_temp_new();
53 gen_addr_reg_index(ctx, EA);
4f364fe7 54 gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
3014427a 55 tcg_gen_addi_tl(EA, EA, 8);
4f364fe7 56 gen_qemu_ld64_i64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
3014427a
BH
57 tcg_temp_free(EA);
58}
59
60static void gen_lxvdsx(DisasContext *ctx)
61{
62 TCGv EA;
63 if (unlikely(!ctx->vsx_enabled)) {
64 gen_exception(ctx, POWERPC_EXCP_VSXU);
65 return;
66 }
67 gen_set_access_type(ctx, ACCESS_INT);
68 EA = tcg_temp_new();
69 gen_addr_reg_index(ctx, EA);
4f364fe7 70 gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
3014427a
BH
71 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
72 tcg_temp_free(EA);
73}
74
75static void gen_lxvw4x(DisasContext *ctx)
76{
77 TCGv EA;
3014427a
BH
78 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
79 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
80 if (unlikely(!ctx->vsx_enabled)) {
81 gen_exception(ctx, POWERPC_EXCP_VSXU);
82 return;
83 }
84 gen_set_access_type(ctx, ACCESS_INT);
85 EA = tcg_temp_new();
3014427a
BH
86
87 gen_addr_reg_index(ctx, EA);
f34001ec
ND
88 if (ctx->le_mode) {
89 TCGv_i64 t0 = tcg_temp_new_i64();
90 TCGv_i64 t1 = tcg_temp_new_i64();
3014427a 91
f34001ec
ND
92 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
93 tcg_gen_shri_i64(t1, t0, 32);
94 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
95 tcg_gen_addi_tl(EA, EA, 8);
96 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
97 tcg_gen_shri_i64(t1, t0, 32);
98 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
99 tcg_temp_free_i64(t0);
100 tcg_temp_free_i64(t1);
101 } else {
102 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
103 tcg_gen_addi_tl(EA, EA, 8);
104 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
105 }
3014427a 106 tcg_temp_free(EA);
3014427a
BH
107}
108
1c074419
ND
109static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
110 TCGv_i64 inh, TCGv_i64 inl)
111{
112 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
113 TCGv_i64 t0 = tcg_temp_new_i64();
114 TCGv_i64 t1 = tcg_temp_new_i64();
115
116 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
117 tcg_gen_and_i64(t0, inh, mask);
118 tcg_gen_shli_i64(t0, t0, 8);
119 tcg_gen_shri_i64(t1, inh, 8);
120 tcg_gen_and_i64(t1, t1, mask);
121 tcg_gen_or_i64(outh, t0, t1);
122
123 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
124 tcg_gen_and_i64(t0, inl, mask);
125 tcg_gen_shli_i64(t0, t0, 8);
126 tcg_gen_shri_i64(t1, inl, 8);
127 tcg_gen_and_i64(t1, t1, mask);
128 tcg_gen_or_i64(outl, t0, t1);
129
130 tcg_temp_free_i64(t0);
131 tcg_temp_free_i64(t1);
132 tcg_temp_free_i64(mask);
133}
134
14fd8ab2
ND
135static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
136 TCGv_i64 inh, TCGv_i64 inl)
137{
138 TCGv_i64 hi = tcg_temp_new_i64();
139 TCGv_i64 lo = tcg_temp_new_i64();
140
141 tcg_gen_bswap64_i64(hi, inh);
142 tcg_gen_bswap64_i64(lo, inl);
143 tcg_gen_shri_i64(outh, hi, 32);
144 tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
145 tcg_gen_shri_i64(outl, lo, 32);
146 tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
147
148 tcg_temp_free_i64(hi);
149 tcg_temp_free_i64(lo);
150}
1c074419
ND
151static void gen_lxvh8x(DisasContext *ctx)
152{
153 TCGv EA;
154 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
155 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
156
157 if (unlikely(!ctx->vsx_enabled)) {
158 gen_exception(ctx, POWERPC_EXCP_VSXU);
159 return;
160 }
161 gen_set_access_type(ctx, ACCESS_INT);
162
163 EA = tcg_temp_new();
164 gen_addr_reg_index(ctx, EA);
165 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
166 tcg_gen_addi_tl(EA, EA, 8);
167 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
168 if (ctx->le_mode) {
169 gen_bswap16x8(xth, xtl, xth, xtl);
170 }
171 tcg_temp_free(EA);
172}
173
8ee38fac
ND
174static void gen_lxvb16x(DisasContext *ctx)
175{
176 TCGv EA;
177 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
178 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
179
180 if (unlikely(!ctx->vsx_enabled)) {
181 gen_exception(ctx, POWERPC_EXCP_VSXU);
182 return;
183 }
184 gen_set_access_type(ctx, ACCESS_INT);
185 EA = tcg_temp_new();
186 gen_addr_reg_index(ctx, EA);
187 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
188 tcg_gen_addi_tl(EA, EA, 8);
189 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
190 tcg_temp_free(EA);
191}
192
d59ba583
ND
193#define VSX_VECTOR_LOAD_STORE(name, op, indexed) \
194static void gen_##name(DisasContext *ctx) \
195{ \
196 int xt; \
197 TCGv EA; \
198 TCGv_i64 xth, xtl; \
199 \
200 if (indexed) { \
201 xt = xT(ctx->opcode); \
202 } else { \
203 xt = DQxT(ctx->opcode); \
204 } \
205 xth = cpu_vsrh(xt); \
206 xtl = cpu_vsrl(xt); \
207 \
208 if (xt < 32) { \
209 if (unlikely(!ctx->vsx_enabled)) { \
210 gen_exception(ctx, POWERPC_EXCP_VSXU); \
211 return; \
212 } \
213 } else { \
214 if (unlikely(!ctx->altivec_enabled)) { \
215 gen_exception(ctx, POWERPC_EXCP_VPU); \
216 return; \
217 } \
218 } \
219 gen_set_access_type(ctx, ACCESS_INT); \
220 EA = tcg_temp_new(); \
221 if (indexed) { \
222 gen_addr_reg_index(ctx, EA); \
223 } else { \
224 gen_addr_imm_index(ctx, EA, 0x0F); \
225 } \
226 if (ctx->le_mode) { \
227 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
228 tcg_gen_addi_tl(EA, EA, 8); \
229 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
230 } else { \
231 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
232 tcg_gen_addi_tl(EA, EA, 8); \
233 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
234 } \
235 tcg_temp_free(EA); \
236}
237
238VSX_VECTOR_LOAD_STORE(lxv, ld_i64, 0)
239VSX_VECTOR_LOAD_STORE(stxv, st_i64, 0)
240VSX_VECTOR_LOAD_STORE(lxvx, ld_i64, 1)
241VSX_VECTOR_LOAD_STORE(stxvx, st_i64, 1)
242
6914bc4f
ND
243#ifdef TARGET_PPC64
244#define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
245static void gen_##name(DisasContext *ctx) \
246{ \
247 TCGv EA, xt; \
248 \
249 if (xT(ctx->opcode) < 32) { \
250 if (unlikely(!ctx->vsx_enabled)) { \
251 gen_exception(ctx, POWERPC_EXCP_VSXU); \
252 return; \
253 } \
254 } else { \
255 if (unlikely(!ctx->altivec_enabled)) { \
256 gen_exception(ctx, POWERPC_EXCP_VPU); \
257 return; \
258 } \
259 } \
260 EA = tcg_temp_new(); \
261 xt = tcg_const_tl(xT(ctx->opcode)); \
262 gen_set_access_type(ctx, ACCESS_INT); \
263 gen_addr_register(ctx, EA); \
264 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
265 tcg_temp_free(EA); \
266 tcg_temp_free(xt); \
267}
268
269VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
270#endif
271
5cb091a4
ND
272#define VSX_LOAD_SCALAR_DS(name, operation) \
273static void gen_##name(DisasContext *ctx) \
274{ \
275 TCGv EA; \
276 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32); \
277 \
278 if (unlikely(!ctx->altivec_enabled)) { \
279 gen_exception(ctx, POWERPC_EXCP_VPU); \
280 return; \
281 } \
282 gen_set_access_type(ctx, ACCESS_INT); \
283 EA = tcg_temp_new(); \
284 gen_addr_imm_index(ctx, EA, 0x03); \
285 gen_qemu_##operation(ctx, xth, EA); \
286 /* NOTE: cpu_vsrl is undefined */ \
287 tcg_temp_free(EA); \
288}
289
290VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
291VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
292
3014427a
BH
293#define VSX_STORE_SCALAR(name, operation) \
294static void gen_##name(DisasContext *ctx) \
295{ \
296 TCGv EA; \
297 if (unlikely(!ctx->vsx_enabled)) { \
298 gen_exception(ctx, POWERPC_EXCP_VSXU); \
299 return; \
300 } \
301 gen_set_access_type(ctx, ACCESS_INT); \
302 EA = tcg_temp_new(); \
303 gen_addr_reg_index(ctx, EA); \
304 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
305 tcg_temp_free(EA); \
306}
307
2468f23d 308VSX_STORE_SCALAR(stxsdx, st64_i64)
ddb9ac50
ND
309
310VSX_STORE_SCALAR(stxsibx, st8_i64)
311VSX_STORE_SCALAR(stxsihx, st16_i64)
3014427a
BH
312VSX_STORE_SCALAR(stxsiwx, st32_i64)
313VSX_STORE_SCALAR(stxsspx, st32fs)
314
315static void gen_stxvd2x(DisasContext *ctx)
316{
317 TCGv EA;
318 if (unlikely(!ctx->vsx_enabled)) {
319 gen_exception(ctx, POWERPC_EXCP_VSXU);
320 return;
321 }
322 gen_set_access_type(ctx, ACCESS_INT);
323 EA = tcg_temp_new();
324 gen_addr_reg_index(ctx, EA);
2468f23d 325 gen_qemu_st64_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
3014427a 326 tcg_gen_addi_tl(EA, EA, 8);
2468f23d 327 gen_qemu_st64_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
3014427a
BH
328 tcg_temp_free(EA);
329}
330
331static void gen_stxvw4x(DisasContext *ctx)
332{
0aec21d8
ND
333 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
334 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
3014427a
BH
335 TCGv EA;
336 if (unlikely(!ctx->vsx_enabled)) {
337 gen_exception(ctx, POWERPC_EXCP_VSXU);
338 return;
339 }
340 gen_set_access_type(ctx, ACCESS_INT);
341 EA = tcg_temp_new();
342 gen_addr_reg_index(ctx, EA);
0aec21d8
ND
343 if (ctx->le_mode) {
344 TCGv_i64 t0 = tcg_temp_new_i64();
345 TCGv_i64 t1 = tcg_temp_new_i64();
3014427a 346
0aec21d8
ND
347 tcg_gen_shri_i64(t0, xsh, 32);
348 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
349 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
350 tcg_gen_addi_tl(EA, EA, 8);
351 tcg_gen_shri_i64(t0, xsl, 32);
352 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
353 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
354 tcg_temp_free_i64(t0);
355 tcg_temp_free_i64(t1);
356 } else {
357 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
358 tcg_gen_addi_tl(EA, EA, 8);
0b8ac648
ND
359 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
360 }
361 tcg_temp_free(EA);
362}
363
364static void gen_stxvh8x(DisasContext *ctx)
365{
366 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
367 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
368 TCGv EA;
369
370 if (unlikely(!ctx->vsx_enabled)) {
371 gen_exception(ctx, POWERPC_EXCP_VSXU);
372 return;
373 }
374 gen_set_access_type(ctx, ACCESS_INT);
375 EA = tcg_temp_new();
376 gen_addr_reg_index(ctx, EA);
377 if (ctx->le_mode) {
378 TCGv_i64 outh = tcg_temp_new_i64();
379 TCGv_i64 outl = tcg_temp_new_i64();
380
381 gen_bswap16x8(outh, outl, xsh, xsl);
382 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
383 tcg_gen_addi_tl(EA, EA, 8);
384 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
385 tcg_temp_free_i64(outh);
386 tcg_temp_free_i64(outl);
387 } else {
388 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
389 tcg_gen_addi_tl(EA, EA, 8);
0aec21d8
ND
390 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
391 }
3014427a 392 tcg_temp_free(EA);
3014427a
BH
393}
394
f3333ce0
ND
395static void gen_stxvb16x(DisasContext *ctx)
396{
397 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
398 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
399 TCGv EA;
400
401 if (unlikely(!ctx->vsx_enabled)) {
402 gen_exception(ctx, POWERPC_EXCP_VSXU);
403 return;
404 }
405 gen_set_access_type(ctx, ACCESS_INT);
406 EA = tcg_temp_new();
407 gen_addr_reg_index(ctx, EA);
408 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
409 tcg_gen_addi_tl(EA, EA, 8);
410 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
411 tcg_temp_free(EA);
412}
413
e3001664
ND
414#define VSX_STORE_SCALAR_DS(name, operation) \
415static void gen_##name(DisasContext *ctx) \
416{ \
417 TCGv EA; \
418 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32); \
419 \
420 if (unlikely(!ctx->altivec_enabled)) { \
421 gen_exception(ctx, POWERPC_EXCP_VPU); \
422 return; \
423 } \
424 gen_set_access_type(ctx, ACCESS_INT); \
425 EA = tcg_temp_new(); \
426 gen_addr_imm_index(ctx, EA, 0x03); \
427 gen_qemu_##operation(ctx, xth, EA); \
428 /* NOTE: cpu_vsrl is undefined */ \
429 tcg_temp_free(EA); \
430}
431
432VSX_LOAD_SCALAR_DS(stxsd, st64_i64)
433VSX_LOAD_SCALAR_DS(stxssp, st32fs)
434
3014427a
BH
435#define MV_VSRW(name, tcgop1, tcgop2, target, source) \
436static void gen_##name(DisasContext *ctx) \
437{ \
438 if (xS(ctx->opcode) < 32) { \
439 if (unlikely(!ctx->fpu_enabled)) { \
440 gen_exception(ctx, POWERPC_EXCP_FPU); \
441 return; \
442 } \
443 } else { \
444 if (unlikely(!ctx->altivec_enabled)) { \
445 gen_exception(ctx, POWERPC_EXCP_VPU); \
446 return; \
447 } \
448 } \
449 TCGv_i64 tmp = tcg_temp_new_i64(); \
450 tcg_gen_##tcgop1(tmp, source); \
451 tcg_gen_##tcgop2(target, tmp); \
452 tcg_temp_free_i64(tmp); \
453}
454
455
456MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
457 cpu_vsrh(xS(ctx->opcode)))
458MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
459 cpu_gpr[rA(ctx->opcode)])
460MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
461 cpu_gpr[rA(ctx->opcode)])
462
463#if defined(TARGET_PPC64)
464#define MV_VSRD(name, target, source) \
465static void gen_##name(DisasContext *ctx) \
466{ \
467 if (xS(ctx->opcode) < 32) { \
468 if (unlikely(!ctx->fpu_enabled)) { \
469 gen_exception(ctx, POWERPC_EXCP_FPU); \
470 return; \
471 } \
472 } else { \
473 if (unlikely(!ctx->altivec_enabled)) { \
474 gen_exception(ctx, POWERPC_EXCP_VPU); \
475 return; \
476 } \
477 } \
478 tcg_gen_mov_i64(target, source); \
479}
480
481MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
482MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
483
63583202
RB
484static void gen_mfvsrld(DisasContext *ctx)
485{
486 if (xS(ctx->opcode) < 32) {
487 if (unlikely(!ctx->vsx_enabled)) {
488 gen_exception(ctx, POWERPC_EXCP_VSXU);
489 return;
490 }
491 } else {
492 if (unlikely(!ctx->altivec_enabled)) {
493 gen_exception(ctx, POWERPC_EXCP_VPU);
494 return;
495 }
496 }
497
498 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], cpu_vsrl(xS(ctx->opcode)));
499}
500
b9731075
RB
501static void gen_mtvsrdd(DisasContext *ctx)
502{
503 if (xT(ctx->opcode) < 32) {
504 if (unlikely(!ctx->vsx_enabled)) {
505 gen_exception(ctx, POWERPC_EXCP_VSXU);
506 return;
507 }
508 } else {
509 if (unlikely(!ctx->altivec_enabled)) {
510 gen_exception(ctx, POWERPC_EXCP_VPU);
511 return;
512 }
513 }
514
515 if (!rA(ctx->opcode)) {
516 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0);
517 } else {
518 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)]);
519 }
520
521 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rB(ctx->opcode)]);
522}
523
1a136cdc
RB
524static void gen_mtvsrws(DisasContext *ctx)
525{
526 if (xT(ctx->opcode) < 32) {
527 if (unlikely(!ctx->vsx_enabled)) {
528 gen_exception(ctx, POWERPC_EXCP_VSXU);
529 return;
530 }
531 } else {
532 if (unlikely(!ctx->altivec_enabled)) {
533 gen_exception(ctx, POWERPC_EXCP_VPU);
534 return;
535 }
536 }
537
538 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)],
539 cpu_gpr[rA(ctx->opcode)], 32, 32);
540 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xT(ctx->opcode)));
541}
542
3014427a
BH
543#endif
544
545static void gen_xxpermdi(DisasContext *ctx)
546{
547 if (unlikely(!ctx->vsx_enabled)) {
548 gen_exception(ctx, POWERPC_EXCP_VSXU);
549 return;
550 }
551
552 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
553 (xT(ctx->opcode) == xB(ctx->opcode)))) {
554 TCGv_i64 xh, xl;
555
556 xh = tcg_temp_new_i64();
557 xl = tcg_temp_new_i64();
558
559 if ((DM(ctx->opcode) & 2) == 0) {
560 tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode)));
561 } else {
562 tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode)));
563 }
564 if ((DM(ctx->opcode) & 1) == 0) {
565 tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode)));
566 } else {
567 tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode)));
568 }
569
570 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh);
571 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl);
572
573 tcg_temp_free_i64(xh);
574 tcg_temp_free_i64(xl);
575 } else {
576 if ((DM(ctx->opcode) & 2) == 0) {
577 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
578 } else {
579 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
580 }
581 if ((DM(ctx->opcode) & 1) == 0) {
582 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
583 } else {
584 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
585 }
586 }
587}
588
589#define OP_ABS 1
590#define OP_NABS 2
591#define OP_NEG 3
592#define OP_CPSGN 4
593#define SGN_MASK_DP 0x8000000000000000ull
594#define SGN_MASK_SP 0x8000000080000000ull
595
596#define VSX_SCALAR_MOVE(name, op, sgn_mask) \
597static void glue(gen_, name)(DisasContext * ctx) \
598 { \
599 TCGv_i64 xb, sgm; \
600 if (unlikely(!ctx->vsx_enabled)) { \
601 gen_exception(ctx, POWERPC_EXCP_VSXU); \
602 return; \
603 } \
604 xb = tcg_temp_new_i64(); \
605 sgm = tcg_temp_new_i64(); \
606 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
607 tcg_gen_movi_i64(sgm, sgn_mask); \
608 switch (op) { \
609 case OP_ABS: { \
610 tcg_gen_andc_i64(xb, xb, sgm); \
611 break; \
612 } \
613 case OP_NABS: { \
614 tcg_gen_or_i64(xb, xb, sgm); \
615 break; \
616 } \
617 case OP_NEG: { \
618 tcg_gen_xor_i64(xb, xb, sgm); \
619 break; \
620 } \
621 case OP_CPSGN: { \
622 TCGv_i64 xa = tcg_temp_new_i64(); \
623 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
624 tcg_gen_and_i64(xa, xa, sgm); \
625 tcg_gen_andc_i64(xb, xb, sgm); \
626 tcg_gen_or_i64(xb, xb, xa); \
627 tcg_temp_free_i64(xa); \
628 break; \
629 } \
630 } \
631 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
632 tcg_temp_free_i64(xb); \
633 tcg_temp_free_i64(sgm); \
634 }
635
636VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
637VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
638VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
639VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
640
3259dbd9
DG
641#define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
642static void glue(gen_, name)(DisasContext *ctx) \
643{ \
014ed3bb 644 int xa; \
3259dbd9
DG
645 int xt = rD(ctx->opcode) + 32; \
646 int xb = rB(ctx->opcode) + 32; \
014ed3bb 647 TCGv_i64 xah, xbh, xbl, sgm; \
3259dbd9
DG
648 \
649 if (unlikely(!ctx->vsx_enabled)) { \
650 gen_exception(ctx, POWERPC_EXCP_VSXU); \
651 return; \
652 } \
653 xbh = tcg_temp_new_i64(); \
654 xbl = tcg_temp_new_i64(); \
655 sgm = tcg_temp_new_i64(); \
656 tcg_gen_mov_i64(xbh, cpu_vsrh(xb)); \
657 tcg_gen_mov_i64(xbl, cpu_vsrl(xb)); \
658 tcg_gen_movi_i64(sgm, sgn_mask); \
659 switch (op) { \
660 case OP_ABS: \
661 tcg_gen_andc_i64(xbh, xbh, sgm); \
662 break; \
663 case OP_NABS: \
664 tcg_gen_or_i64(xbh, xbh, sgm); \
665 break; \
8497d7fc
ND
666 case OP_NEG: \
667 tcg_gen_xor_i64(xbh, xbh, sgm); \
668 break; \
014ed3bb
ND
669 case OP_CPSGN: \
670 xah = tcg_temp_new_i64(); \
671 xa = rA(ctx->opcode) + 32; \
672 tcg_gen_and_i64(xah, cpu_vsrh(xa), sgm); \
673 tcg_gen_andc_i64(xbh, xbh, sgm); \
674 tcg_gen_or_i64(xbh, xbh, xah); \
675 tcg_temp_free_i64(xah); \
676 break; \
3259dbd9
DG
677 } \
678 tcg_gen_mov_i64(cpu_vsrh(xt), xbh); \
679 tcg_gen_mov_i64(cpu_vsrl(xt), xbl); \
680 tcg_temp_free_i64(xbl); \
681 tcg_temp_free_i64(xbh); \
682 tcg_temp_free_i64(sgm); \
683}
684
685VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
686VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
8497d7fc 687VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
014ed3bb 688VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
3259dbd9 689
3014427a
BH
690#define VSX_VECTOR_MOVE(name, op, sgn_mask) \
691static void glue(gen_, name)(DisasContext * ctx) \
692 { \
693 TCGv_i64 xbh, xbl, sgm; \
694 if (unlikely(!ctx->vsx_enabled)) { \
695 gen_exception(ctx, POWERPC_EXCP_VSXU); \
696 return; \
697 } \
698 xbh = tcg_temp_new_i64(); \
699 xbl = tcg_temp_new_i64(); \
700 sgm = tcg_temp_new_i64(); \
701 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
702 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
703 tcg_gen_movi_i64(sgm, sgn_mask); \
704 switch (op) { \
705 case OP_ABS: { \
706 tcg_gen_andc_i64(xbh, xbh, sgm); \
707 tcg_gen_andc_i64(xbl, xbl, sgm); \
708 break; \
709 } \
710 case OP_NABS: { \
711 tcg_gen_or_i64(xbh, xbh, sgm); \
712 tcg_gen_or_i64(xbl, xbl, sgm); \
713 break; \
714 } \
715 case OP_NEG: { \
716 tcg_gen_xor_i64(xbh, xbh, sgm); \
717 tcg_gen_xor_i64(xbl, xbl, sgm); \
718 break; \
719 } \
720 case OP_CPSGN: { \
721 TCGv_i64 xah = tcg_temp_new_i64(); \
722 TCGv_i64 xal = tcg_temp_new_i64(); \
723 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
724 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
725 tcg_gen_and_i64(xah, xah, sgm); \
726 tcg_gen_and_i64(xal, xal, sgm); \
727 tcg_gen_andc_i64(xbh, xbh, sgm); \
728 tcg_gen_andc_i64(xbl, xbl, sgm); \
729 tcg_gen_or_i64(xbh, xbh, xah); \
730 tcg_gen_or_i64(xbl, xbl, xal); \
731 tcg_temp_free_i64(xah); \
732 tcg_temp_free_i64(xal); \
733 break; \
734 } \
735 } \
736 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
737 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
738 tcg_temp_free_i64(xbh); \
739 tcg_temp_free_i64(xbl); \
740 tcg_temp_free_i64(sgm); \
741 }
742
743VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
744VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
745VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
746VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
747VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
748VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
749VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
750VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
751
752#define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
753static void gen_##name(DisasContext * ctx) \
754{ \
755 TCGv_i32 opc; \
756 if (unlikely(!ctx->vsx_enabled)) { \
757 gen_exception(ctx, POWERPC_EXCP_VSXU); \
758 return; \
759 } \
3014427a
BH
760 opc = tcg_const_i32(ctx->opcode); \
761 gen_helper_##name(cpu_env, opc); \
762 tcg_temp_free_i32(opc); \
763}
764
765#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
766static void gen_##name(DisasContext * ctx) \
767{ \
768 if (unlikely(!ctx->vsx_enabled)) { \
769 gen_exception(ctx, POWERPC_EXCP_VSXU); \
770 return; \
771 } \
3014427a
BH
772 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
773 cpu_vsrh(xB(ctx->opcode))); \
774}
775
776GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
777GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
778GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
779GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
780GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
781GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
782GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
783GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
784GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
785GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
786GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
787GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
788GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
789GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
790GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
791GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
792GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
6d1ff9a7
SD
793GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
794GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
795GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
796GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
3a20d11d
BR
797GEN_VSX_HELPER_2(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
798GEN_VSX_HELPER_2(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
3014427a
BH
799GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
800GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
be0a4faf
BR
801GEN_VSX_HELPER_2(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
802GEN_VSX_HELPER_2(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
3014427a
BH
803GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
804GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
805GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
806GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
807GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
808GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
809GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
810GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
811GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
812GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
813GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
814GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
815GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
816GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
817GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
818GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
819GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
820GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
821
822GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
823GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
824GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
825GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
826GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
827GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
828GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
829GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
830GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
831GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
832GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
833GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
834GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
835GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
836GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
837GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
838GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
839
840GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
841GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
842GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
843GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
844GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
845GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
846GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
847GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
848GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
849GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
850GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
851GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
852GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
853GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
854GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
855GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
856GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
857GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
858GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
859GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
860GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
861GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
6db246f9 862GEN_VSX_HELPER_2(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
3014427a
BH
863GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
864GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
865GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
866GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
867GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
868GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
869GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
870GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
871GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
872GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
873GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
874GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
875GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
876GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
877
878GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
879GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
880GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
881GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
882GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
883GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
884GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
885GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
886GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
887GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
888GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
889GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
890GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
891GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
892GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
893GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
894GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
895GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
896GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
897GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
898GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
899GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
6db246f9 900GEN_VSX_HELPER_2(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
3014427a
BH
901GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
902GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
903GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
904GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
905GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
906GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
907GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
908GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
909GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
910GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
911GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
912GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
913GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
914GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
234068ab
BR
915GEN_VSX_HELPER_2(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
916GEN_VSX_HELPER_2(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
3014427a 917
14fd8ab2
ND
918static void gen_xxbrd(DisasContext *ctx)
919{
920 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
921 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
922 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
923 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
924
925 if (unlikely(!ctx->vsx_enabled)) {
926 gen_exception(ctx, POWERPC_EXCP_VSXU);
927 return;
928 }
929 tcg_gen_bswap64_i64(xth, xbh);
930 tcg_gen_bswap64_i64(xtl, xbl);
931}
932
933static void gen_xxbrh(DisasContext *ctx)
934{
935 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
936 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
937 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
938 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
939
940 if (unlikely(!ctx->vsx_enabled)) {
941 gen_exception(ctx, POWERPC_EXCP_VSXU);
942 return;
943 }
944 gen_bswap16x8(xth, xtl, xbh, xbl);
945}
946
947static void gen_xxbrq(DisasContext *ctx)
948{
949 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
950 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
951 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
952 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
953 TCGv_i64 t0 = tcg_temp_new_i64();
954
955 if (unlikely(!ctx->vsx_enabled)) {
956 gen_exception(ctx, POWERPC_EXCP_VSXU);
957 return;
958 }
959 tcg_gen_bswap64_i64(t0, xbl);
960 tcg_gen_bswap64_i64(xtl, xbh);
961 tcg_gen_mov_i64(xth, t0);
962 tcg_temp_free_i64(t0);
963}
964
965static void gen_xxbrw(DisasContext *ctx)
966{
967 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
968 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
969 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
970 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
971
972 if (unlikely(!ctx->vsx_enabled)) {
973 gen_exception(ctx, POWERPC_EXCP_VSXU);
974 return;
975 }
976 gen_bswap32x4(xth, xtl, xbh, xbl);
977}
978
3014427a
BH
979#define VSX_LOGICAL(name, tcg_op) \
980static void glue(gen_, name)(DisasContext * ctx) \
981 { \
982 if (unlikely(!ctx->vsx_enabled)) { \
983 gen_exception(ctx, POWERPC_EXCP_VSXU); \
984 return; \
985 } \
986 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
987 cpu_vsrh(xB(ctx->opcode))); \
988 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
989 cpu_vsrl(xB(ctx->opcode))); \
990 }
991
992VSX_LOGICAL(xxland, tcg_gen_and_i64)
993VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
994VSX_LOGICAL(xxlor, tcg_gen_or_i64)
995VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
996VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
997VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
998VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
999VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
1000
1001#define VSX_XXMRG(name, high) \
1002static void glue(gen_, name)(DisasContext * ctx) \
1003 { \
1004 TCGv_i64 a0, a1, b0, b1; \
1005 if (unlikely(!ctx->vsx_enabled)) { \
1006 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1007 return; \
1008 } \
1009 a0 = tcg_temp_new_i64(); \
1010 a1 = tcg_temp_new_i64(); \
1011 b0 = tcg_temp_new_i64(); \
1012 b1 = tcg_temp_new_i64(); \
1013 if (high) { \
1014 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
1015 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
1016 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
1017 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
1018 } else { \
1019 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
1020 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
1021 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
1022 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
1023 } \
1024 tcg_gen_shri_i64(a0, a0, 32); \
1025 tcg_gen_shri_i64(b0, b0, 32); \
1026 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
1027 b0, a0, 32, 32); \
1028 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
1029 b1, a1, 32, 32); \
1030 tcg_temp_free_i64(a0); \
1031 tcg_temp_free_i64(a1); \
1032 tcg_temp_free_i64(b0); \
1033 tcg_temp_free_i64(b1); \
1034 }
1035
1036VSX_XXMRG(xxmrghw, 1)
1037VSX_XXMRG(xxmrglw, 0)
1038
1039static void gen_xxsel(DisasContext * ctx)
1040{
1041 TCGv_i64 a, b, c;
1042 if (unlikely(!ctx->vsx_enabled)) {
1043 gen_exception(ctx, POWERPC_EXCP_VSXU);
1044 return;
1045 }
1046 a = tcg_temp_new_i64();
1047 b = tcg_temp_new_i64();
1048 c = tcg_temp_new_i64();
1049
1050 tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
1051 tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
1052 tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
1053
1054 tcg_gen_and_i64(b, b, c);
1055 tcg_gen_andc_i64(a, a, c);
1056 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
1057
1058 tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
1059 tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
1060 tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
1061
1062 tcg_gen_and_i64(b, b, c);
1063 tcg_gen_andc_i64(a, a, c);
1064 tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
1065
1066 tcg_temp_free_i64(a);
1067 tcg_temp_free_i64(b);
1068 tcg_temp_free_i64(c);
1069}
1070
1071static void gen_xxspltw(DisasContext *ctx)
1072{
1073 TCGv_i64 b, b2;
1074 TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
1075 cpu_vsrl(xB(ctx->opcode)) :
1076 cpu_vsrh(xB(ctx->opcode));
1077
1078 if (unlikely(!ctx->vsx_enabled)) {
1079 gen_exception(ctx, POWERPC_EXCP_VSXU);
1080 return;
1081 }
1082
1083 b = tcg_temp_new_i64();
1084 b2 = tcg_temp_new_i64();
1085
1086 if (UIM(ctx->opcode) & 1) {
1087 tcg_gen_ext32u_i64(b, vsr);
1088 } else {
1089 tcg_gen_shri_i64(b, vsr, 32);
1090 }
1091
1092 tcg_gen_shli_i64(b2, b, 32);
1093 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
1094 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
1095
1096 tcg_temp_free_i64(b);
1097 tcg_temp_free_i64(b2);
1098}
1099
f1132835
ND
1100#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1101
1102static void gen_xxspltib(DisasContext *ctx)
1103{
1104 unsigned char uim8 = IMM8(ctx->opcode);
1105 if (xS(ctx->opcode) < 32) {
1106 if (unlikely(!ctx->altivec_enabled)) {
1107 gen_exception(ctx, POWERPC_EXCP_VPU);
1108 return;
1109 }
1110 } else {
1111 if (unlikely(!ctx->vsx_enabled)) {
1112 gen_exception(ctx, POWERPC_EXCP_VSXU);
1113 return;
1114 }
1115 }
1116 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), pattern(uim8));
1117 tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), pattern(uim8));
1118}
1119
3014427a
BH
1120static void gen_xxsldwi(DisasContext *ctx)
1121{
1122 TCGv_i64 xth, xtl;
1123 if (unlikely(!ctx->vsx_enabled)) {
1124 gen_exception(ctx, POWERPC_EXCP_VSXU);
1125 return;
1126 }
1127 xth = tcg_temp_new_i64();
1128 xtl = tcg_temp_new_i64();
1129
1130 switch (SHW(ctx->opcode)) {
1131 case 0: {
1132 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
1133 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
1134 break;
1135 }
1136 case 1: {
1137 TCGv_i64 t0 = tcg_temp_new_i64();
1138 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
1139 tcg_gen_shli_i64(xth, xth, 32);
1140 tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
1141 tcg_gen_shri_i64(t0, t0, 32);
1142 tcg_gen_or_i64(xth, xth, t0);
1143 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
1144 tcg_gen_shli_i64(xtl, xtl, 32);
1145 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
1146 tcg_gen_shri_i64(t0, t0, 32);
1147 tcg_gen_or_i64(xtl, xtl, t0);
1148 tcg_temp_free_i64(t0);
1149 break;
1150 }
1151 case 2: {
1152 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
1153 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
1154 break;
1155 }
1156 case 3: {
1157 TCGv_i64 t0 = tcg_temp_new_i64();
1158 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
1159 tcg_gen_shli_i64(xth, xth, 32);
1160 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
1161 tcg_gen_shri_i64(t0, t0, 32);
1162 tcg_gen_or_i64(xth, xth, t0);
1163 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
1164 tcg_gen_shli_i64(xtl, xtl, 32);
1165 tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
1166 tcg_gen_shri_i64(t0, t0, 32);
1167 tcg_gen_or_i64(xtl, xtl, t0);
1168 tcg_temp_free_i64(t0);
1169 break;
1170 }
1171 }
1172
1173 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
1174 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
1175
1176 tcg_temp_free_i64(xth);
1177 tcg_temp_free_i64(xtl);
1178}
1179
1180#undef GEN_XX2FORM
1181#undef GEN_XX3FORM
1182#undef GEN_XX2IFORM
1183#undef GEN_XX3_RC_FORM
1184#undef GEN_XX3FORM_DM
1185#undef VSX_LOGICAL