]> git.proxmox.com Git - mirror_qemu.git/blob - target/ppc/translate/vsx-impl.inc.c
s390x/css: handle format-0 TIC CCW correctly
[mirror_qemu.git] / target / ppc / translate / vsx-impl.inc.c
1 /*** VSX extension ***/
2
3 static inline TCGv_i64 cpu_vsrh(int n)
4 {
5 if (n < 32) {
6 return cpu_fpr[n];
7 } else {
8 return cpu_avrh[n-32];
9 }
10 }
11
12 static inline TCGv_i64 cpu_vsrl(int n)
13 {
14 if (n < 32) {
15 return cpu_vsr[n];
16 } else {
17 return cpu_avrl[n-32];
18 }
19 }
20
21 #define VSX_LOAD_SCALAR(name, operation) \
22 static void gen_##name(DisasContext *ctx) \
23 { \
24 TCGv EA; \
25 if (unlikely(!ctx->vsx_enabled)) { \
26 gen_exception(ctx, POWERPC_EXCP_VSXU); \
27 return; \
28 } \
29 gen_set_access_type(ctx, ACCESS_INT); \
30 EA = tcg_temp_new(); \
31 gen_addr_reg_index(ctx, EA); \
32 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33 /* NOTE: cpu_vsrl is undefined */ \
34 tcg_temp_free(EA); \
35 }
36
37 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
38 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
39 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
40 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
41 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
42 VSX_LOAD_SCALAR(lxsspx, ld32fs)
43
44 static void gen_lxvd2x(DisasContext *ctx)
45 {
46 TCGv EA;
47 if (unlikely(!ctx->vsx_enabled)) {
48 gen_exception(ctx, POWERPC_EXCP_VSXU);
49 return;
50 }
51 gen_set_access_type(ctx, ACCESS_INT);
52 EA = tcg_temp_new();
53 gen_addr_reg_index(ctx, EA);
54 gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
55 tcg_gen_addi_tl(EA, EA, 8);
56 gen_qemu_ld64_i64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
57 tcg_temp_free(EA);
58 }
59
60 static void gen_lxvdsx(DisasContext *ctx)
61 {
62 TCGv EA;
63 if (unlikely(!ctx->vsx_enabled)) {
64 gen_exception(ctx, POWERPC_EXCP_VSXU);
65 return;
66 }
67 gen_set_access_type(ctx, ACCESS_INT);
68 EA = tcg_temp_new();
69 gen_addr_reg_index(ctx, EA);
70 gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
71 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
72 tcg_temp_free(EA);
73 }
74
75 static void gen_lxvw4x(DisasContext *ctx)
76 {
77 TCGv EA;
78 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
79 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
80 if (unlikely(!ctx->vsx_enabled)) {
81 gen_exception(ctx, POWERPC_EXCP_VSXU);
82 return;
83 }
84 gen_set_access_type(ctx, ACCESS_INT);
85 EA = tcg_temp_new();
86
87 gen_addr_reg_index(ctx, EA);
88 if (ctx->le_mode) {
89 TCGv_i64 t0 = tcg_temp_new_i64();
90 TCGv_i64 t1 = tcg_temp_new_i64();
91
92 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
93 tcg_gen_shri_i64(t1, t0, 32);
94 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
95 tcg_gen_addi_tl(EA, EA, 8);
96 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
97 tcg_gen_shri_i64(t1, t0, 32);
98 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
99 tcg_temp_free_i64(t0);
100 tcg_temp_free_i64(t1);
101 } else {
102 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
103 tcg_gen_addi_tl(EA, EA, 8);
104 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
105 }
106 tcg_temp_free(EA);
107 }
108
109 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
110 TCGv_i64 inh, TCGv_i64 inl)
111 {
112 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
113 TCGv_i64 t0 = tcg_temp_new_i64();
114 TCGv_i64 t1 = tcg_temp_new_i64();
115
116 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
117 tcg_gen_and_i64(t0, inh, mask);
118 tcg_gen_shli_i64(t0, t0, 8);
119 tcg_gen_shri_i64(t1, inh, 8);
120 tcg_gen_and_i64(t1, t1, mask);
121 tcg_gen_or_i64(outh, t0, t1);
122
123 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
124 tcg_gen_and_i64(t0, inl, mask);
125 tcg_gen_shli_i64(t0, t0, 8);
126 tcg_gen_shri_i64(t1, inl, 8);
127 tcg_gen_and_i64(t1, t1, mask);
128 tcg_gen_or_i64(outl, t0, t1);
129
130 tcg_temp_free_i64(t0);
131 tcg_temp_free_i64(t1);
132 tcg_temp_free_i64(mask);
133 }
134
135 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
136 TCGv_i64 inh, TCGv_i64 inl)
137 {
138 TCGv_i64 hi = tcg_temp_new_i64();
139 TCGv_i64 lo = tcg_temp_new_i64();
140
141 tcg_gen_bswap64_i64(hi, inh);
142 tcg_gen_bswap64_i64(lo, inl);
143 tcg_gen_shri_i64(outh, hi, 32);
144 tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
145 tcg_gen_shri_i64(outl, lo, 32);
146 tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
147
148 tcg_temp_free_i64(hi);
149 tcg_temp_free_i64(lo);
150 }
151 static void gen_lxvh8x(DisasContext *ctx)
152 {
153 TCGv EA;
154 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
155 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
156
157 if (unlikely(!ctx->vsx_enabled)) {
158 gen_exception(ctx, POWERPC_EXCP_VSXU);
159 return;
160 }
161 gen_set_access_type(ctx, ACCESS_INT);
162
163 EA = tcg_temp_new();
164 gen_addr_reg_index(ctx, EA);
165 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
166 tcg_gen_addi_tl(EA, EA, 8);
167 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
168 if (ctx->le_mode) {
169 gen_bswap16x8(xth, xtl, xth, xtl);
170 }
171 tcg_temp_free(EA);
172 }
173
174 static void gen_lxvb16x(DisasContext *ctx)
175 {
176 TCGv EA;
177 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
178 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
179
180 if (unlikely(!ctx->vsx_enabled)) {
181 gen_exception(ctx, POWERPC_EXCP_VSXU);
182 return;
183 }
184 gen_set_access_type(ctx, ACCESS_INT);
185 EA = tcg_temp_new();
186 gen_addr_reg_index(ctx, EA);
187 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
188 tcg_gen_addi_tl(EA, EA, 8);
189 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
190 tcg_temp_free(EA);
191 }
192
193 #define VSX_VECTOR_LOAD_STORE(name, op, indexed) \
194 static void gen_##name(DisasContext *ctx) \
195 { \
196 int xt; \
197 TCGv EA; \
198 TCGv_i64 xth, xtl; \
199 \
200 if (indexed) { \
201 xt = xT(ctx->opcode); \
202 } else { \
203 xt = DQxT(ctx->opcode); \
204 } \
205 xth = cpu_vsrh(xt); \
206 xtl = cpu_vsrl(xt); \
207 \
208 if (xt < 32) { \
209 if (unlikely(!ctx->vsx_enabled)) { \
210 gen_exception(ctx, POWERPC_EXCP_VSXU); \
211 return; \
212 } \
213 } else { \
214 if (unlikely(!ctx->altivec_enabled)) { \
215 gen_exception(ctx, POWERPC_EXCP_VPU); \
216 return; \
217 } \
218 } \
219 gen_set_access_type(ctx, ACCESS_INT); \
220 EA = tcg_temp_new(); \
221 if (indexed) { \
222 gen_addr_reg_index(ctx, EA); \
223 } else { \
224 gen_addr_imm_index(ctx, EA, 0x0F); \
225 } \
226 if (ctx->le_mode) { \
227 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
228 tcg_gen_addi_tl(EA, EA, 8); \
229 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
230 } else { \
231 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
232 tcg_gen_addi_tl(EA, EA, 8); \
233 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
234 } \
235 tcg_temp_free(EA); \
236 }
237
238 VSX_VECTOR_LOAD_STORE(lxv, ld_i64, 0)
239 VSX_VECTOR_LOAD_STORE(stxv, st_i64, 0)
240 VSX_VECTOR_LOAD_STORE(lxvx, ld_i64, 1)
241 VSX_VECTOR_LOAD_STORE(stxvx, st_i64, 1)
242
243 #ifdef TARGET_PPC64
244 #define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
245 static void gen_##name(DisasContext *ctx) \
246 { \
247 TCGv EA, xt; \
248 \
249 if (xT(ctx->opcode) < 32) { \
250 if (unlikely(!ctx->vsx_enabled)) { \
251 gen_exception(ctx, POWERPC_EXCP_VSXU); \
252 return; \
253 } \
254 } else { \
255 if (unlikely(!ctx->altivec_enabled)) { \
256 gen_exception(ctx, POWERPC_EXCP_VPU); \
257 return; \
258 } \
259 } \
260 EA = tcg_temp_new(); \
261 xt = tcg_const_tl(xT(ctx->opcode)); \
262 gen_set_access_type(ctx, ACCESS_INT); \
263 gen_addr_register(ctx, EA); \
264 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
265 tcg_temp_free(EA); \
266 tcg_temp_free(xt); \
267 }
268
269 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
270 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
271 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
272 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
273 #endif
274
275 #define VSX_LOAD_SCALAR_DS(name, operation) \
276 static void gen_##name(DisasContext *ctx) \
277 { \
278 TCGv EA; \
279 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32); \
280 \
281 if (unlikely(!ctx->altivec_enabled)) { \
282 gen_exception(ctx, POWERPC_EXCP_VPU); \
283 return; \
284 } \
285 gen_set_access_type(ctx, ACCESS_INT); \
286 EA = tcg_temp_new(); \
287 gen_addr_imm_index(ctx, EA, 0x03); \
288 gen_qemu_##operation(ctx, xth, EA); \
289 /* NOTE: cpu_vsrl is undefined */ \
290 tcg_temp_free(EA); \
291 }
292
293 VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
294 VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
295
296 #define VSX_STORE_SCALAR(name, operation) \
297 static void gen_##name(DisasContext *ctx) \
298 { \
299 TCGv EA; \
300 if (unlikely(!ctx->vsx_enabled)) { \
301 gen_exception(ctx, POWERPC_EXCP_VSXU); \
302 return; \
303 } \
304 gen_set_access_type(ctx, ACCESS_INT); \
305 EA = tcg_temp_new(); \
306 gen_addr_reg_index(ctx, EA); \
307 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
308 tcg_temp_free(EA); \
309 }
310
311 VSX_STORE_SCALAR(stxsdx, st64_i64)
312
313 VSX_STORE_SCALAR(stxsibx, st8_i64)
314 VSX_STORE_SCALAR(stxsihx, st16_i64)
315 VSX_STORE_SCALAR(stxsiwx, st32_i64)
316 VSX_STORE_SCALAR(stxsspx, st32fs)
317
318 static void gen_stxvd2x(DisasContext *ctx)
319 {
320 TCGv EA;
321 if (unlikely(!ctx->vsx_enabled)) {
322 gen_exception(ctx, POWERPC_EXCP_VSXU);
323 return;
324 }
325 gen_set_access_type(ctx, ACCESS_INT);
326 EA = tcg_temp_new();
327 gen_addr_reg_index(ctx, EA);
328 gen_qemu_st64_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
329 tcg_gen_addi_tl(EA, EA, 8);
330 gen_qemu_st64_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
331 tcg_temp_free(EA);
332 }
333
334 static void gen_stxvw4x(DisasContext *ctx)
335 {
336 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
337 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
338 TCGv EA;
339 if (unlikely(!ctx->vsx_enabled)) {
340 gen_exception(ctx, POWERPC_EXCP_VSXU);
341 return;
342 }
343 gen_set_access_type(ctx, ACCESS_INT);
344 EA = tcg_temp_new();
345 gen_addr_reg_index(ctx, EA);
346 if (ctx->le_mode) {
347 TCGv_i64 t0 = tcg_temp_new_i64();
348 TCGv_i64 t1 = tcg_temp_new_i64();
349
350 tcg_gen_shri_i64(t0, xsh, 32);
351 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
352 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
353 tcg_gen_addi_tl(EA, EA, 8);
354 tcg_gen_shri_i64(t0, xsl, 32);
355 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
356 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
357 tcg_temp_free_i64(t0);
358 tcg_temp_free_i64(t1);
359 } else {
360 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
361 tcg_gen_addi_tl(EA, EA, 8);
362 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
363 }
364 tcg_temp_free(EA);
365 }
366
367 static void gen_stxvh8x(DisasContext *ctx)
368 {
369 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
370 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
371 TCGv EA;
372
373 if (unlikely(!ctx->vsx_enabled)) {
374 gen_exception(ctx, POWERPC_EXCP_VSXU);
375 return;
376 }
377 gen_set_access_type(ctx, ACCESS_INT);
378 EA = tcg_temp_new();
379 gen_addr_reg_index(ctx, EA);
380 if (ctx->le_mode) {
381 TCGv_i64 outh = tcg_temp_new_i64();
382 TCGv_i64 outl = tcg_temp_new_i64();
383
384 gen_bswap16x8(outh, outl, xsh, xsl);
385 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
386 tcg_gen_addi_tl(EA, EA, 8);
387 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
388 tcg_temp_free_i64(outh);
389 tcg_temp_free_i64(outl);
390 } else {
391 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
392 tcg_gen_addi_tl(EA, EA, 8);
393 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
394 }
395 tcg_temp_free(EA);
396 }
397
398 static void gen_stxvb16x(DisasContext *ctx)
399 {
400 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
401 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
402 TCGv EA;
403
404 if (unlikely(!ctx->vsx_enabled)) {
405 gen_exception(ctx, POWERPC_EXCP_VSXU);
406 return;
407 }
408 gen_set_access_type(ctx, ACCESS_INT);
409 EA = tcg_temp_new();
410 gen_addr_reg_index(ctx, EA);
411 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
412 tcg_gen_addi_tl(EA, EA, 8);
413 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
414 tcg_temp_free(EA);
415 }
416
417 #define VSX_STORE_SCALAR_DS(name, operation) \
418 static void gen_##name(DisasContext *ctx) \
419 { \
420 TCGv EA; \
421 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32); \
422 \
423 if (unlikely(!ctx->altivec_enabled)) { \
424 gen_exception(ctx, POWERPC_EXCP_VPU); \
425 return; \
426 } \
427 gen_set_access_type(ctx, ACCESS_INT); \
428 EA = tcg_temp_new(); \
429 gen_addr_imm_index(ctx, EA, 0x03); \
430 gen_qemu_##operation(ctx, xth, EA); \
431 /* NOTE: cpu_vsrl is undefined */ \
432 tcg_temp_free(EA); \
433 }
434
435 VSX_LOAD_SCALAR_DS(stxsd, st64_i64)
436 VSX_LOAD_SCALAR_DS(stxssp, st32fs)
437
438 #define MV_VSRW(name, tcgop1, tcgop2, target, source) \
439 static void gen_##name(DisasContext *ctx) \
440 { \
441 if (xS(ctx->opcode) < 32) { \
442 if (unlikely(!ctx->fpu_enabled)) { \
443 gen_exception(ctx, POWERPC_EXCP_FPU); \
444 return; \
445 } \
446 } else { \
447 if (unlikely(!ctx->altivec_enabled)) { \
448 gen_exception(ctx, POWERPC_EXCP_VPU); \
449 return; \
450 } \
451 } \
452 TCGv_i64 tmp = tcg_temp_new_i64(); \
453 tcg_gen_##tcgop1(tmp, source); \
454 tcg_gen_##tcgop2(target, tmp); \
455 tcg_temp_free_i64(tmp); \
456 }
457
458
459 MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
460 cpu_vsrh(xS(ctx->opcode)))
461 MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
462 cpu_gpr[rA(ctx->opcode)])
463 MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
464 cpu_gpr[rA(ctx->opcode)])
465
466 #if defined(TARGET_PPC64)
467 #define MV_VSRD(name, target, source) \
468 static void gen_##name(DisasContext *ctx) \
469 { \
470 if (xS(ctx->opcode) < 32) { \
471 if (unlikely(!ctx->fpu_enabled)) { \
472 gen_exception(ctx, POWERPC_EXCP_FPU); \
473 return; \
474 } \
475 } else { \
476 if (unlikely(!ctx->altivec_enabled)) { \
477 gen_exception(ctx, POWERPC_EXCP_VPU); \
478 return; \
479 } \
480 } \
481 tcg_gen_mov_i64(target, source); \
482 }
483
484 MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
485 MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
486
487 static void gen_mfvsrld(DisasContext *ctx)
488 {
489 if (xS(ctx->opcode) < 32) {
490 if (unlikely(!ctx->vsx_enabled)) {
491 gen_exception(ctx, POWERPC_EXCP_VSXU);
492 return;
493 }
494 } else {
495 if (unlikely(!ctx->altivec_enabled)) {
496 gen_exception(ctx, POWERPC_EXCP_VPU);
497 return;
498 }
499 }
500
501 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], cpu_vsrl(xS(ctx->opcode)));
502 }
503
504 static void gen_mtvsrdd(DisasContext *ctx)
505 {
506 if (xT(ctx->opcode) < 32) {
507 if (unlikely(!ctx->vsx_enabled)) {
508 gen_exception(ctx, POWERPC_EXCP_VSXU);
509 return;
510 }
511 } else {
512 if (unlikely(!ctx->altivec_enabled)) {
513 gen_exception(ctx, POWERPC_EXCP_VPU);
514 return;
515 }
516 }
517
518 if (!rA(ctx->opcode)) {
519 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0);
520 } else {
521 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)]);
522 }
523
524 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rB(ctx->opcode)]);
525 }
526
527 static void gen_mtvsrws(DisasContext *ctx)
528 {
529 if (xT(ctx->opcode) < 32) {
530 if (unlikely(!ctx->vsx_enabled)) {
531 gen_exception(ctx, POWERPC_EXCP_VSXU);
532 return;
533 }
534 } else {
535 if (unlikely(!ctx->altivec_enabled)) {
536 gen_exception(ctx, POWERPC_EXCP_VPU);
537 return;
538 }
539 }
540
541 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)],
542 cpu_gpr[rA(ctx->opcode)], 32, 32);
543 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xT(ctx->opcode)));
544 }
545
546 #endif
547
548 static void gen_xxpermdi(DisasContext *ctx)
549 {
550 if (unlikely(!ctx->vsx_enabled)) {
551 gen_exception(ctx, POWERPC_EXCP_VSXU);
552 return;
553 }
554
555 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
556 (xT(ctx->opcode) == xB(ctx->opcode)))) {
557 TCGv_i64 xh, xl;
558
559 xh = tcg_temp_new_i64();
560 xl = tcg_temp_new_i64();
561
562 if ((DM(ctx->opcode) & 2) == 0) {
563 tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode)));
564 } else {
565 tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode)));
566 }
567 if ((DM(ctx->opcode) & 1) == 0) {
568 tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode)));
569 } else {
570 tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode)));
571 }
572
573 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh);
574 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl);
575
576 tcg_temp_free_i64(xh);
577 tcg_temp_free_i64(xl);
578 } else {
579 if ((DM(ctx->opcode) & 2) == 0) {
580 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
581 } else {
582 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
583 }
584 if ((DM(ctx->opcode) & 1) == 0) {
585 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
586 } else {
587 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
588 }
589 }
590 }
591
592 #define OP_ABS 1
593 #define OP_NABS 2
594 #define OP_NEG 3
595 #define OP_CPSGN 4
596 #define SGN_MASK_DP 0x8000000000000000ull
597 #define SGN_MASK_SP 0x8000000080000000ull
598
599 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
600 static void glue(gen_, name)(DisasContext * ctx) \
601 { \
602 TCGv_i64 xb, sgm; \
603 if (unlikely(!ctx->vsx_enabled)) { \
604 gen_exception(ctx, POWERPC_EXCP_VSXU); \
605 return; \
606 } \
607 xb = tcg_temp_new_i64(); \
608 sgm = tcg_temp_new_i64(); \
609 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
610 tcg_gen_movi_i64(sgm, sgn_mask); \
611 switch (op) { \
612 case OP_ABS: { \
613 tcg_gen_andc_i64(xb, xb, sgm); \
614 break; \
615 } \
616 case OP_NABS: { \
617 tcg_gen_or_i64(xb, xb, sgm); \
618 break; \
619 } \
620 case OP_NEG: { \
621 tcg_gen_xor_i64(xb, xb, sgm); \
622 break; \
623 } \
624 case OP_CPSGN: { \
625 TCGv_i64 xa = tcg_temp_new_i64(); \
626 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
627 tcg_gen_and_i64(xa, xa, sgm); \
628 tcg_gen_andc_i64(xb, xb, sgm); \
629 tcg_gen_or_i64(xb, xb, xa); \
630 tcg_temp_free_i64(xa); \
631 break; \
632 } \
633 } \
634 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
635 tcg_temp_free_i64(xb); \
636 tcg_temp_free_i64(sgm); \
637 }
638
639 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
640 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
641 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
642 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
643
644 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
645 static void glue(gen_, name)(DisasContext *ctx) \
646 { \
647 int xa; \
648 int xt = rD(ctx->opcode) + 32; \
649 int xb = rB(ctx->opcode) + 32; \
650 TCGv_i64 xah, xbh, xbl, sgm; \
651 \
652 if (unlikely(!ctx->vsx_enabled)) { \
653 gen_exception(ctx, POWERPC_EXCP_VSXU); \
654 return; \
655 } \
656 xbh = tcg_temp_new_i64(); \
657 xbl = tcg_temp_new_i64(); \
658 sgm = tcg_temp_new_i64(); \
659 tcg_gen_mov_i64(xbh, cpu_vsrh(xb)); \
660 tcg_gen_mov_i64(xbl, cpu_vsrl(xb)); \
661 tcg_gen_movi_i64(sgm, sgn_mask); \
662 switch (op) { \
663 case OP_ABS: \
664 tcg_gen_andc_i64(xbh, xbh, sgm); \
665 break; \
666 case OP_NABS: \
667 tcg_gen_or_i64(xbh, xbh, sgm); \
668 break; \
669 case OP_NEG: \
670 tcg_gen_xor_i64(xbh, xbh, sgm); \
671 break; \
672 case OP_CPSGN: \
673 xah = tcg_temp_new_i64(); \
674 xa = rA(ctx->opcode) + 32; \
675 tcg_gen_and_i64(xah, cpu_vsrh(xa), sgm); \
676 tcg_gen_andc_i64(xbh, xbh, sgm); \
677 tcg_gen_or_i64(xbh, xbh, xah); \
678 tcg_temp_free_i64(xah); \
679 break; \
680 } \
681 tcg_gen_mov_i64(cpu_vsrh(xt), xbh); \
682 tcg_gen_mov_i64(cpu_vsrl(xt), xbl); \
683 tcg_temp_free_i64(xbl); \
684 tcg_temp_free_i64(xbh); \
685 tcg_temp_free_i64(sgm); \
686 }
687
688 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
689 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
690 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
691 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
692
693 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
694 static void glue(gen_, name)(DisasContext * ctx) \
695 { \
696 TCGv_i64 xbh, xbl, sgm; \
697 if (unlikely(!ctx->vsx_enabled)) { \
698 gen_exception(ctx, POWERPC_EXCP_VSXU); \
699 return; \
700 } \
701 xbh = tcg_temp_new_i64(); \
702 xbl = tcg_temp_new_i64(); \
703 sgm = tcg_temp_new_i64(); \
704 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
705 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
706 tcg_gen_movi_i64(sgm, sgn_mask); \
707 switch (op) { \
708 case OP_ABS: { \
709 tcg_gen_andc_i64(xbh, xbh, sgm); \
710 tcg_gen_andc_i64(xbl, xbl, sgm); \
711 break; \
712 } \
713 case OP_NABS: { \
714 tcg_gen_or_i64(xbh, xbh, sgm); \
715 tcg_gen_or_i64(xbl, xbl, sgm); \
716 break; \
717 } \
718 case OP_NEG: { \
719 tcg_gen_xor_i64(xbh, xbh, sgm); \
720 tcg_gen_xor_i64(xbl, xbl, sgm); \
721 break; \
722 } \
723 case OP_CPSGN: { \
724 TCGv_i64 xah = tcg_temp_new_i64(); \
725 TCGv_i64 xal = tcg_temp_new_i64(); \
726 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
727 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
728 tcg_gen_and_i64(xah, xah, sgm); \
729 tcg_gen_and_i64(xal, xal, sgm); \
730 tcg_gen_andc_i64(xbh, xbh, sgm); \
731 tcg_gen_andc_i64(xbl, xbl, sgm); \
732 tcg_gen_or_i64(xbh, xbh, xah); \
733 tcg_gen_or_i64(xbl, xbl, xal); \
734 tcg_temp_free_i64(xah); \
735 tcg_temp_free_i64(xal); \
736 break; \
737 } \
738 } \
739 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
740 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
741 tcg_temp_free_i64(xbh); \
742 tcg_temp_free_i64(xbl); \
743 tcg_temp_free_i64(sgm); \
744 }
745
746 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
747 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
748 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
749 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
750 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
751 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
752 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
753 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
754
755 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
756 static void gen_##name(DisasContext * ctx) \
757 { \
758 TCGv_i32 opc; \
759 if (unlikely(!ctx->vsx_enabled)) { \
760 gen_exception(ctx, POWERPC_EXCP_VSXU); \
761 return; \
762 } \
763 opc = tcg_const_i32(ctx->opcode); \
764 gen_helper_##name(cpu_env, opc); \
765 tcg_temp_free_i32(opc); \
766 }
767
768 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
769 static void gen_##name(DisasContext * ctx) \
770 { \
771 if (unlikely(!ctx->vsx_enabled)) { \
772 gen_exception(ctx, POWERPC_EXCP_VSXU); \
773 return; \
774 } \
775 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
776 cpu_vsrh(xB(ctx->opcode))); \
777 }
778
779 GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
780 GEN_VSX_HELPER_2(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
781 GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
782 GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
783 GEN_VSX_HELPER_2(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
784 GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
785 GEN_VSX_HELPER_2(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
786 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
787 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
788 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
789 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
790 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
791 GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
792 GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
793 GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
794 GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
795 GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
796 GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
797 GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
798 GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
799 GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
800 GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
801 GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
802 GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
803 GEN_VSX_HELPER_2(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
804 GEN_VSX_HELPER_2(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
805 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
806 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
807 GEN_VSX_HELPER_2(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
808 GEN_VSX_HELPER_2(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
809 GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
810 GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
811 GEN_VSX_HELPER_2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
812 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
813 GEN_VSX_HELPER_2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
814 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
815 GEN_VSX_HELPER_2(xscvqpdp, 0x04, 0x1A, 0x14, PPC2_ISA300)
816 GEN_VSX_HELPER_2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
817 GEN_VSX_HELPER_2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
818 GEN_VSX_HELPER_2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
819 GEN_VSX_HELPER_2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
820 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
821 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
822 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
823 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
824 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
825 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
826 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
827 GEN_VSX_HELPER_2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
828 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
829 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
830 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
831 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
832 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
833 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
834 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
835
836 GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
837 GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
838 GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
839 GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
840 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
841 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
842 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
843 GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
844 GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
845 GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
846 GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
847 GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
848 GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
849 GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
850 GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
851 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
852 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
853 GEN_VSX_HELPER_2(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
854 GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
855 GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
856
857 GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
858 GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
859 GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
860 GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
861 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
862 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
863 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
864 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
865 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
866 GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
867 GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
868 GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
869 GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
870 GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
871 GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
872 GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
873 GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
874 GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
875 GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
876 GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
877 GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
878 GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
879 GEN_VSX_HELPER_2(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
880 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
881 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
882 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
883 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
884 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
885 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
886 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
887 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
888 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
889 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
890 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
891 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
892 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
893 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
894
895 GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
896 GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
897 GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
898 GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
899 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
900 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
901 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
902 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
903 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
904 GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
905 GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
906 GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
907 GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
908 GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
909 GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
910 GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
911 GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
912 GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
913 GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
914 GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
915 GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
916 GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
917 GEN_VSX_HELPER_2(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
918 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
919 GEN_VSX_HELPER_2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
920 GEN_VSX_HELPER_2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
921 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
922 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
923 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
924 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
925 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
926 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
927 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
928 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
929 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
930 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
931 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
932 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
933 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
934 GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
935 GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
936 GEN_VSX_HELPER_2(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
937 GEN_VSX_HELPER_2(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
938
939 static void gen_xxbrd(DisasContext *ctx)
940 {
941 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
942 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
943 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
944 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
945
946 if (unlikely(!ctx->vsx_enabled)) {
947 gen_exception(ctx, POWERPC_EXCP_VSXU);
948 return;
949 }
950 tcg_gen_bswap64_i64(xth, xbh);
951 tcg_gen_bswap64_i64(xtl, xbl);
952 }
953
954 static void gen_xxbrh(DisasContext *ctx)
955 {
956 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
957 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
958 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
959 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
960
961 if (unlikely(!ctx->vsx_enabled)) {
962 gen_exception(ctx, POWERPC_EXCP_VSXU);
963 return;
964 }
965 gen_bswap16x8(xth, xtl, xbh, xbl);
966 }
967
968 static void gen_xxbrq(DisasContext *ctx)
969 {
970 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
971 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
972 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
973 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
974 TCGv_i64 t0 = tcg_temp_new_i64();
975
976 if (unlikely(!ctx->vsx_enabled)) {
977 gen_exception(ctx, POWERPC_EXCP_VSXU);
978 return;
979 }
980 tcg_gen_bswap64_i64(t0, xbl);
981 tcg_gen_bswap64_i64(xtl, xbh);
982 tcg_gen_mov_i64(xth, t0);
983 tcg_temp_free_i64(t0);
984 }
985
986 static void gen_xxbrw(DisasContext *ctx)
987 {
988 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
989 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
990 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
991 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
992
993 if (unlikely(!ctx->vsx_enabled)) {
994 gen_exception(ctx, POWERPC_EXCP_VSXU);
995 return;
996 }
997 gen_bswap32x4(xth, xtl, xbh, xbl);
998 }
999
1000 #define VSX_LOGICAL(name, tcg_op) \
1001 static void glue(gen_, name)(DisasContext * ctx) \
1002 { \
1003 if (unlikely(!ctx->vsx_enabled)) { \
1004 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1005 return; \
1006 } \
1007 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
1008 cpu_vsrh(xB(ctx->opcode))); \
1009 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
1010 cpu_vsrl(xB(ctx->opcode))); \
1011 }
1012
1013 VSX_LOGICAL(xxland, tcg_gen_and_i64)
1014 VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
1015 VSX_LOGICAL(xxlor, tcg_gen_or_i64)
1016 VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
1017 VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
1018 VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
1019 VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
1020 VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
1021
1022 #define VSX_XXMRG(name, high) \
1023 static void glue(gen_, name)(DisasContext * ctx) \
1024 { \
1025 TCGv_i64 a0, a1, b0, b1; \
1026 if (unlikely(!ctx->vsx_enabled)) { \
1027 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1028 return; \
1029 } \
1030 a0 = tcg_temp_new_i64(); \
1031 a1 = tcg_temp_new_i64(); \
1032 b0 = tcg_temp_new_i64(); \
1033 b1 = tcg_temp_new_i64(); \
1034 if (high) { \
1035 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
1036 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
1037 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
1038 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
1039 } else { \
1040 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
1041 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
1042 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
1043 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
1044 } \
1045 tcg_gen_shri_i64(a0, a0, 32); \
1046 tcg_gen_shri_i64(b0, b0, 32); \
1047 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
1048 b0, a0, 32, 32); \
1049 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
1050 b1, a1, 32, 32); \
1051 tcg_temp_free_i64(a0); \
1052 tcg_temp_free_i64(a1); \
1053 tcg_temp_free_i64(b0); \
1054 tcg_temp_free_i64(b1); \
1055 }
1056
1057 VSX_XXMRG(xxmrghw, 1)
1058 VSX_XXMRG(xxmrglw, 0)
1059
1060 static void gen_xxsel(DisasContext * ctx)
1061 {
1062 TCGv_i64 a, b, c;
1063 if (unlikely(!ctx->vsx_enabled)) {
1064 gen_exception(ctx, POWERPC_EXCP_VSXU);
1065 return;
1066 }
1067 a = tcg_temp_new_i64();
1068 b = tcg_temp_new_i64();
1069 c = tcg_temp_new_i64();
1070
1071 tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
1072 tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
1073 tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
1074
1075 tcg_gen_and_i64(b, b, c);
1076 tcg_gen_andc_i64(a, a, c);
1077 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
1078
1079 tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
1080 tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
1081 tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
1082
1083 tcg_gen_and_i64(b, b, c);
1084 tcg_gen_andc_i64(a, a, c);
1085 tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
1086
1087 tcg_temp_free_i64(a);
1088 tcg_temp_free_i64(b);
1089 tcg_temp_free_i64(c);
1090 }
1091
1092 static void gen_xxspltw(DisasContext *ctx)
1093 {
1094 TCGv_i64 b, b2;
1095 TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
1096 cpu_vsrl(xB(ctx->opcode)) :
1097 cpu_vsrh(xB(ctx->opcode));
1098
1099 if (unlikely(!ctx->vsx_enabled)) {
1100 gen_exception(ctx, POWERPC_EXCP_VSXU);
1101 return;
1102 }
1103
1104 b = tcg_temp_new_i64();
1105 b2 = tcg_temp_new_i64();
1106
1107 if (UIM(ctx->opcode) & 1) {
1108 tcg_gen_ext32u_i64(b, vsr);
1109 } else {
1110 tcg_gen_shri_i64(b, vsr, 32);
1111 }
1112
1113 tcg_gen_shli_i64(b2, b, 32);
1114 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
1115 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
1116
1117 tcg_temp_free_i64(b);
1118 tcg_temp_free_i64(b2);
1119 }
1120
1121 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1122
1123 static void gen_xxspltib(DisasContext *ctx)
1124 {
1125 unsigned char uim8 = IMM8(ctx->opcode);
1126 if (xS(ctx->opcode) < 32) {
1127 if (unlikely(!ctx->altivec_enabled)) {
1128 gen_exception(ctx, POWERPC_EXCP_VPU);
1129 return;
1130 }
1131 } else {
1132 if (unlikely(!ctx->vsx_enabled)) {
1133 gen_exception(ctx, POWERPC_EXCP_VSXU);
1134 return;
1135 }
1136 }
1137 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), pattern(uim8));
1138 tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), pattern(uim8));
1139 }
1140
1141 static void gen_xxsldwi(DisasContext *ctx)
1142 {
1143 TCGv_i64 xth, xtl;
1144 if (unlikely(!ctx->vsx_enabled)) {
1145 gen_exception(ctx, POWERPC_EXCP_VSXU);
1146 return;
1147 }
1148 xth = tcg_temp_new_i64();
1149 xtl = tcg_temp_new_i64();
1150
1151 switch (SHW(ctx->opcode)) {
1152 case 0: {
1153 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
1154 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
1155 break;
1156 }
1157 case 1: {
1158 TCGv_i64 t0 = tcg_temp_new_i64();
1159 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
1160 tcg_gen_shli_i64(xth, xth, 32);
1161 tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
1162 tcg_gen_shri_i64(t0, t0, 32);
1163 tcg_gen_or_i64(xth, xth, t0);
1164 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
1165 tcg_gen_shli_i64(xtl, xtl, 32);
1166 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
1167 tcg_gen_shri_i64(t0, t0, 32);
1168 tcg_gen_or_i64(xtl, xtl, t0);
1169 tcg_temp_free_i64(t0);
1170 break;
1171 }
1172 case 2: {
1173 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
1174 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
1175 break;
1176 }
1177 case 3: {
1178 TCGv_i64 t0 = tcg_temp_new_i64();
1179 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
1180 tcg_gen_shli_i64(xth, xth, 32);
1181 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
1182 tcg_gen_shri_i64(t0, t0, 32);
1183 tcg_gen_or_i64(xth, xth, t0);
1184 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
1185 tcg_gen_shli_i64(xtl, xtl, 32);
1186 tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
1187 tcg_gen_shri_i64(t0, t0, 32);
1188 tcg_gen_or_i64(xtl, xtl, t0);
1189 tcg_temp_free_i64(t0);
1190 break;
1191 }
1192 }
1193
1194 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
1195 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
1196
1197 tcg_temp_free_i64(xth);
1198 tcg_temp_free_i64(xtl);
1199 }
1200
1201 #define VSX_EXTRACT_INSERT(name) \
1202 static void gen_##name(DisasContext *ctx) \
1203 { \
1204 TCGv xt, xb; \
1205 TCGv_i32 t0 = tcg_temp_new_i32(); \
1206 uint8_t uimm = UIMM4(ctx->opcode); \
1207 \
1208 if (unlikely(!ctx->vsx_enabled)) { \
1209 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1210 return; \
1211 } \
1212 xt = tcg_const_tl(xT(ctx->opcode)); \
1213 xb = tcg_const_tl(xB(ctx->opcode)); \
1214 /* uimm > 15 out of bound and for \
1215 * uimm > 12 handle as per hardware in helper \
1216 */ \
1217 if (uimm > 15) { \
1218 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0); \
1219 tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), 0); \
1220 return; \
1221 } \
1222 tcg_gen_movi_i32(t0, uimm); \
1223 gen_helper_##name(cpu_env, xt, xb, t0); \
1224 tcg_temp_free(xb); \
1225 tcg_temp_free(xt); \
1226 tcg_temp_free_i32(t0); \
1227 }
1228
1229 VSX_EXTRACT_INSERT(xxextractuw)
1230 VSX_EXTRACT_INSERT(xxinsertw)
1231
1232 #ifdef TARGET_PPC64
1233 static void gen_xsxexpdp(DisasContext *ctx)
1234 {
1235 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1236 if (unlikely(!ctx->vsx_enabled)) {
1237 gen_exception(ctx, POWERPC_EXCP_VSXU);
1238 return;
1239 }
1240 tcg_gen_shri_i64(rt, cpu_vsrh(xB(ctx->opcode)), 52);
1241 tcg_gen_andi_i64(rt, rt, 0x7FF);
1242 }
1243
1244 static void gen_xsxexpqp(DisasContext *ctx)
1245 {
1246 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32);
1247 TCGv_i64 xtl = cpu_vsrl(rD(ctx->opcode) + 32);
1248 TCGv_i64 xbh = cpu_vsrh(rB(ctx->opcode) + 32);
1249
1250 if (unlikely(!ctx->vsx_enabled)) {
1251 gen_exception(ctx, POWERPC_EXCP_VSXU);
1252 return;
1253 }
1254 tcg_gen_shri_i64(xth, xbh, 48);
1255 tcg_gen_andi_i64(xth, xth, 0x7FFF);
1256 tcg_gen_movi_i64(xtl, 0);
1257 }
1258
1259 static void gen_xsiexpdp(DisasContext *ctx)
1260 {
1261 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
1262 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1263 TCGv rb = cpu_gpr[rB(ctx->opcode)];
1264 TCGv_i64 t0;
1265
1266 if (unlikely(!ctx->vsx_enabled)) {
1267 gen_exception(ctx, POWERPC_EXCP_VSXU);
1268 return;
1269 }
1270 t0 = tcg_temp_new_i64();
1271 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1272 tcg_gen_andi_i64(t0, rb, 0x7FF);
1273 tcg_gen_shli_i64(t0, t0, 52);
1274 tcg_gen_or_i64(xth, xth, t0);
1275 /* dword[1] is undefined */
1276 tcg_temp_free_i64(t0);
1277 }
1278
1279 static void gen_xsiexpqp(DisasContext *ctx)
1280 {
1281 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32);
1282 TCGv_i64 xtl = cpu_vsrl(rD(ctx->opcode) + 32);
1283 TCGv_i64 xah = cpu_vsrh(rA(ctx->opcode) + 32);
1284 TCGv_i64 xal = cpu_vsrl(rA(ctx->opcode) + 32);
1285 TCGv_i64 xbh = cpu_vsrh(rB(ctx->opcode) + 32);
1286 TCGv_i64 t0;
1287
1288 if (unlikely(!ctx->vsx_enabled)) {
1289 gen_exception(ctx, POWERPC_EXCP_VSXU);
1290 return;
1291 }
1292 t0 = tcg_temp_new_i64();
1293 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1294 tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1295 tcg_gen_shli_i64(t0, t0, 48);
1296 tcg_gen_or_i64(xth, xth, t0);
1297 tcg_gen_mov_i64(xtl, xal);
1298 tcg_temp_free_i64(t0);
1299 }
1300
1301 static void gen_xsxsigdp(DisasContext *ctx)
1302 {
1303 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1304 TCGv_i64 t0, zr, nan, exp;
1305
1306 if (unlikely(!ctx->vsx_enabled)) {
1307 gen_exception(ctx, POWERPC_EXCP_VSXU);
1308 return;
1309 }
1310 exp = tcg_temp_new_i64();
1311 t0 = tcg_temp_new_i64();
1312 zr = tcg_const_i64(0);
1313 nan = tcg_const_i64(2047);
1314
1315 tcg_gen_shri_i64(exp, cpu_vsrh(xB(ctx->opcode)), 52);
1316 tcg_gen_andi_i64(exp, exp, 0x7FF);
1317 tcg_gen_movi_i64(t0, 0x0010000000000000);
1318 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1319 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1320 tcg_gen_andi_i64(rt, cpu_vsrh(xB(ctx->opcode)), 0x000FFFFFFFFFFFFF);
1321 tcg_gen_or_i64(rt, rt, t0);
1322
1323 tcg_temp_free_i64(t0);
1324 tcg_temp_free_i64(exp);
1325 tcg_temp_free_i64(zr);
1326 tcg_temp_free_i64(nan);
1327 }
1328
1329 static void gen_xsxsigqp(DisasContext *ctx)
1330 {
1331 TCGv_i64 t0, zr, nan, exp;
1332 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32);
1333 TCGv_i64 xtl = cpu_vsrl(rD(ctx->opcode) + 32);
1334
1335 if (unlikely(!ctx->vsx_enabled)) {
1336 gen_exception(ctx, POWERPC_EXCP_VSXU);
1337 return;
1338 }
1339 exp = tcg_temp_new_i64();
1340 t0 = tcg_temp_new_i64();
1341 zr = tcg_const_i64(0);
1342 nan = tcg_const_i64(32767);
1343
1344 tcg_gen_shri_i64(exp, cpu_vsrh(rB(ctx->opcode) + 32), 48);
1345 tcg_gen_andi_i64(exp, exp, 0x7FFF);
1346 tcg_gen_movi_i64(t0, 0x0001000000000000);
1347 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1348 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1349 tcg_gen_andi_i64(xth, cpu_vsrh(rB(ctx->opcode) + 32), 0x0000FFFFFFFFFFFF);
1350 tcg_gen_or_i64(xth, xth, t0);
1351 tcg_gen_mov_i64(xtl, cpu_vsrl(rB(ctx->opcode) + 32));
1352
1353 tcg_temp_free_i64(t0);
1354 tcg_temp_free_i64(exp);
1355 tcg_temp_free_i64(zr);
1356 tcg_temp_free_i64(nan);
1357 }
1358 #endif
1359
1360 static void gen_xviexpsp(DisasContext *ctx)
1361 {
1362 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
1363 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
1364 TCGv_i64 xah = cpu_vsrh(xA(ctx->opcode));
1365 TCGv_i64 xal = cpu_vsrl(xA(ctx->opcode));
1366 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
1367 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
1368 TCGv_i64 t0;
1369
1370 if (unlikely(!ctx->vsx_enabled)) {
1371 gen_exception(ctx, POWERPC_EXCP_VSXU);
1372 return;
1373 }
1374 t0 = tcg_temp_new_i64();
1375 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1376 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1377 tcg_gen_shli_i64(t0, t0, 23);
1378 tcg_gen_or_i64(xth, xth, t0);
1379 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1380 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1381 tcg_gen_shli_i64(t0, t0, 23);
1382 tcg_gen_or_i64(xtl, xtl, t0);
1383 tcg_temp_free_i64(t0);
1384 }
1385
1386 static void gen_xviexpdp(DisasContext *ctx)
1387 {
1388 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
1389 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
1390 TCGv_i64 xah = cpu_vsrh(xA(ctx->opcode));
1391 TCGv_i64 xal = cpu_vsrl(xA(ctx->opcode));
1392 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
1393 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
1394 TCGv_i64 t0;
1395
1396 if (unlikely(!ctx->vsx_enabled)) {
1397 gen_exception(ctx, POWERPC_EXCP_VSXU);
1398 return;
1399 }
1400 t0 = tcg_temp_new_i64();
1401 tcg_gen_andi_i64(xth, xah, 0x800FFFFFFFFFFFFF);
1402 tcg_gen_andi_i64(t0, xbh, 0x7FF);
1403 tcg_gen_shli_i64(t0, t0, 52);
1404 tcg_gen_or_i64(xth, xth, t0);
1405 tcg_gen_andi_i64(xtl, xal, 0x800FFFFFFFFFFFFF);
1406 tcg_gen_andi_i64(t0, xbl, 0x7FF);
1407 tcg_gen_shli_i64(t0, t0, 52);
1408 tcg_gen_or_i64(xtl, xtl, t0);
1409 tcg_temp_free_i64(t0);
1410 }
1411
1412 static void gen_xvxexpsp(DisasContext *ctx)
1413 {
1414 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
1415 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
1416 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
1417 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
1418
1419 if (unlikely(!ctx->vsx_enabled)) {
1420 gen_exception(ctx, POWERPC_EXCP_VSXU);
1421 return;
1422 }
1423 tcg_gen_shri_i64(xth, xbh, 23);
1424 tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1425 tcg_gen_shri_i64(xtl, xbl, 23);
1426 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1427 }
1428
1429 static void gen_xvxexpdp(DisasContext *ctx)
1430 {
1431 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
1432 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
1433 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
1434 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
1435
1436 if (unlikely(!ctx->vsx_enabled)) {
1437 gen_exception(ctx, POWERPC_EXCP_VSXU);
1438 return;
1439 }
1440 tcg_gen_shri_i64(xth, xbh, 52);
1441 tcg_gen_andi_i64(xth, xth, 0x7FF);
1442 tcg_gen_shri_i64(xtl, xbl, 52);
1443 tcg_gen_andi_i64(xtl, xtl, 0x7FF);
1444 }
1445
1446 GEN_VSX_HELPER_2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
1447
1448 static void gen_xvxsigdp(DisasContext *ctx)
1449 {
1450 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
1451 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
1452 TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
1453 TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
1454
1455 TCGv_i64 t0, zr, nan, exp;
1456
1457 if (unlikely(!ctx->vsx_enabled)) {
1458 gen_exception(ctx, POWERPC_EXCP_VSXU);
1459 return;
1460 }
1461 exp = tcg_temp_new_i64();
1462 t0 = tcg_temp_new_i64();
1463 zr = tcg_const_i64(0);
1464 nan = tcg_const_i64(2047);
1465
1466 tcg_gen_shri_i64(exp, xbh, 52);
1467 tcg_gen_andi_i64(exp, exp, 0x7FF);
1468 tcg_gen_movi_i64(t0, 0x0010000000000000);
1469 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1470 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1471 tcg_gen_andi_i64(xth, xbh, 0x000FFFFFFFFFFFFF);
1472 tcg_gen_or_i64(xth, xth, t0);
1473
1474 tcg_gen_shri_i64(exp, xbl, 52);
1475 tcg_gen_andi_i64(exp, exp, 0x7FF);
1476 tcg_gen_movi_i64(t0, 0x0010000000000000);
1477 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1478 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1479 tcg_gen_andi_i64(xtl, xbl, 0x000FFFFFFFFFFFFF);
1480 tcg_gen_or_i64(xtl, xtl, t0);
1481
1482 tcg_temp_free_i64(t0);
1483 tcg_temp_free_i64(exp);
1484 tcg_temp_free_i64(zr);
1485 tcg_temp_free_i64(nan);
1486 }
1487
1488 #undef GEN_XX2FORM
1489 #undef GEN_XX3FORM
1490 #undef GEN_XX2IFORM
1491 #undef GEN_XX3_RC_FORM
1492 #undef GEN_XX3FORM_DM
1493 #undef VSX_LOGICAL