]> git.proxmox.com Git - mirror_qemu.git/blob - target/ppc/translate/vsx-impl.inc.c
target/ppc: move FP and VMX registers into aligned vsr register array
[mirror_qemu.git] / target / ppc / translate / vsx-impl.inc.c
1 /*** VSX extension ***/
2
3 static inline void get_vsr(TCGv_i64 dst, int n)
4 {
5 tcg_gen_ld_i64(dst, cpu_env, offsetof(CPUPPCState, vsr[n].u64[1]));
6 }
7
8 static inline void set_vsr(int n, TCGv_i64 src)
9 {
10 tcg_gen_st_i64(src, cpu_env, offsetof(CPUPPCState, vsr[n].u64[1]));
11 }
12
13 static inline void get_cpu_vsrh(TCGv_i64 dst, int n)
14 {
15 if (n < 32) {
16 get_fpr(dst, n);
17 } else {
18 get_avr64(dst, n - 32, true);
19 }
20 }
21
22 static inline void get_cpu_vsrl(TCGv_i64 dst, int n)
23 {
24 if (n < 32) {
25 get_vsr(dst, n);
26 } else {
27 get_avr64(dst, n - 32, false);
28 }
29 }
30
31 static inline void set_cpu_vsrh(int n, TCGv_i64 src)
32 {
33 if (n < 32) {
34 set_fpr(n, src);
35 } else {
36 set_avr64(n - 32, src, true);
37 }
38 }
39
40 static inline void set_cpu_vsrl(int n, TCGv_i64 src)
41 {
42 if (n < 32) {
43 set_vsr(n, src);
44 } else {
45 set_avr64(n - 32, src, false);
46 }
47 }
48
49 #define VSX_LOAD_SCALAR(name, operation) \
50 static void gen_##name(DisasContext *ctx) \
51 { \
52 TCGv EA; \
53 TCGv_i64 t0; \
54 if (unlikely(!ctx->vsx_enabled)) { \
55 gen_exception(ctx, POWERPC_EXCP_VSXU); \
56 return; \
57 } \
58 t0 = tcg_temp_new_i64(); \
59 gen_set_access_type(ctx, ACCESS_INT); \
60 EA = tcg_temp_new(); \
61 gen_addr_reg_index(ctx, EA); \
62 gen_qemu_##operation(ctx, t0, EA); \
63 set_cpu_vsrh(xT(ctx->opcode), t0); \
64 /* NOTE: cpu_vsrl is undefined */ \
65 tcg_temp_free(EA); \
66 tcg_temp_free_i64(t0); \
67 }
68
69 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
70 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
71 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
72 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
73 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
74 VSX_LOAD_SCALAR(lxsspx, ld32fs)
75
76 static void gen_lxvd2x(DisasContext *ctx)
77 {
78 TCGv EA;
79 TCGv_i64 t0;
80 if (unlikely(!ctx->vsx_enabled)) {
81 gen_exception(ctx, POWERPC_EXCP_VSXU);
82 return;
83 }
84 t0 = tcg_temp_new_i64();
85 gen_set_access_type(ctx, ACCESS_INT);
86 EA = tcg_temp_new();
87 gen_addr_reg_index(ctx, EA);
88 gen_qemu_ld64_i64(ctx, t0, EA);
89 set_cpu_vsrh(xT(ctx->opcode), t0);
90 tcg_gen_addi_tl(EA, EA, 8);
91 gen_qemu_ld64_i64(ctx, t0, EA);
92 set_cpu_vsrl(xT(ctx->opcode), t0);
93 tcg_temp_free(EA);
94 tcg_temp_free_i64(t0);
95 }
96
97 static void gen_lxvdsx(DisasContext *ctx)
98 {
99 TCGv EA;
100 TCGv_i64 t0;
101 TCGv_i64 t1;
102 if (unlikely(!ctx->vsx_enabled)) {
103 gen_exception(ctx, POWERPC_EXCP_VSXU);
104 return;
105 }
106 t0 = tcg_temp_new_i64();
107 t1 = tcg_temp_new_i64();
108 gen_set_access_type(ctx, ACCESS_INT);
109 EA = tcg_temp_new();
110 gen_addr_reg_index(ctx, EA);
111 gen_qemu_ld64_i64(ctx, t0, EA);
112 set_cpu_vsrh(xT(ctx->opcode), t0);
113 tcg_gen_mov_i64(t1, t0);
114 set_cpu_vsrl(xT(ctx->opcode), t1);
115 tcg_temp_free(EA);
116 tcg_temp_free_i64(t0);
117 tcg_temp_free_i64(t1);
118 }
119
120 static void gen_lxvw4x(DisasContext *ctx)
121 {
122 TCGv EA;
123 TCGv_i64 xth;
124 TCGv_i64 xtl;
125 if (unlikely(!ctx->vsx_enabled)) {
126 gen_exception(ctx, POWERPC_EXCP_VSXU);
127 return;
128 }
129 xth = tcg_temp_new_i64();
130 xtl = tcg_temp_new_i64();
131 get_cpu_vsrh(xth, xT(ctx->opcode));
132 get_cpu_vsrl(xtl, xT(ctx->opcode));
133 gen_set_access_type(ctx, ACCESS_INT);
134 EA = tcg_temp_new();
135
136 gen_addr_reg_index(ctx, EA);
137 if (ctx->le_mode) {
138 TCGv_i64 t0 = tcg_temp_new_i64();
139 TCGv_i64 t1 = tcg_temp_new_i64();
140
141 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
142 tcg_gen_shri_i64(t1, t0, 32);
143 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
144 tcg_gen_addi_tl(EA, EA, 8);
145 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
146 tcg_gen_shri_i64(t1, t0, 32);
147 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
148 tcg_temp_free_i64(t0);
149 tcg_temp_free_i64(t1);
150 } else {
151 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
152 tcg_gen_addi_tl(EA, EA, 8);
153 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
154 }
155 tcg_temp_free(EA);
156 tcg_temp_free_i64(xth);
157 tcg_temp_free_i64(xtl);
158 }
159
160 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
161 TCGv_i64 inh, TCGv_i64 inl)
162 {
163 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
164 TCGv_i64 t0 = tcg_temp_new_i64();
165 TCGv_i64 t1 = tcg_temp_new_i64();
166
167 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
168 tcg_gen_and_i64(t0, inh, mask);
169 tcg_gen_shli_i64(t0, t0, 8);
170 tcg_gen_shri_i64(t1, inh, 8);
171 tcg_gen_and_i64(t1, t1, mask);
172 tcg_gen_or_i64(outh, t0, t1);
173
174 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
175 tcg_gen_and_i64(t0, inl, mask);
176 tcg_gen_shli_i64(t0, t0, 8);
177 tcg_gen_shri_i64(t1, inl, 8);
178 tcg_gen_and_i64(t1, t1, mask);
179 tcg_gen_or_i64(outl, t0, t1);
180
181 tcg_temp_free_i64(t0);
182 tcg_temp_free_i64(t1);
183 tcg_temp_free_i64(mask);
184 }
185
186 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
187 TCGv_i64 inh, TCGv_i64 inl)
188 {
189 TCGv_i64 hi = tcg_temp_new_i64();
190 TCGv_i64 lo = tcg_temp_new_i64();
191
192 tcg_gen_bswap64_i64(hi, inh);
193 tcg_gen_bswap64_i64(lo, inl);
194 tcg_gen_shri_i64(outh, hi, 32);
195 tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
196 tcg_gen_shri_i64(outl, lo, 32);
197 tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
198
199 tcg_temp_free_i64(hi);
200 tcg_temp_free_i64(lo);
201 }
202 static void gen_lxvh8x(DisasContext *ctx)
203 {
204 TCGv EA;
205 TCGv_i64 xth;
206 TCGv_i64 xtl;
207
208 if (unlikely(!ctx->vsx_enabled)) {
209 gen_exception(ctx, POWERPC_EXCP_VSXU);
210 return;
211 }
212 xth = tcg_temp_new_i64();
213 xtl = tcg_temp_new_i64();
214 get_cpu_vsrh(xth, xT(ctx->opcode));
215 get_cpu_vsrl(xtl, xT(ctx->opcode));
216 gen_set_access_type(ctx, ACCESS_INT);
217
218 EA = tcg_temp_new();
219 gen_addr_reg_index(ctx, EA);
220 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
221 tcg_gen_addi_tl(EA, EA, 8);
222 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
223 if (ctx->le_mode) {
224 gen_bswap16x8(xth, xtl, xth, xtl);
225 }
226 tcg_temp_free(EA);
227 tcg_temp_free_i64(xth);
228 tcg_temp_free_i64(xtl);
229 }
230
231 static void gen_lxvb16x(DisasContext *ctx)
232 {
233 TCGv EA;
234 TCGv_i64 xth;
235 TCGv_i64 xtl;
236
237 if (unlikely(!ctx->vsx_enabled)) {
238 gen_exception(ctx, POWERPC_EXCP_VSXU);
239 return;
240 }
241 xth = tcg_temp_new_i64();
242 xtl = tcg_temp_new_i64();
243 get_cpu_vsrh(xth, xT(ctx->opcode));
244 get_cpu_vsrl(xtl, xT(ctx->opcode));
245 gen_set_access_type(ctx, ACCESS_INT);
246 EA = tcg_temp_new();
247 gen_addr_reg_index(ctx, EA);
248 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
249 tcg_gen_addi_tl(EA, EA, 8);
250 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
251 tcg_temp_free(EA);
252 tcg_temp_free_i64(xth);
253 tcg_temp_free_i64(xtl);
254 }
255
256 #define VSX_VECTOR_LOAD_STORE(name, op, indexed) \
257 static void gen_##name(DisasContext *ctx) \
258 { \
259 int xt; \
260 TCGv EA; \
261 TCGv_i64 xth; \
262 TCGv_i64 xtl; \
263 \
264 if (indexed) { \
265 xt = xT(ctx->opcode); \
266 } else { \
267 xt = DQxT(ctx->opcode); \
268 } \
269 \
270 if (xt < 32) { \
271 if (unlikely(!ctx->vsx_enabled)) { \
272 gen_exception(ctx, POWERPC_EXCP_VSXU); \
273 return; \
274 } \
275 } else { \
276 if (unlikely(!ctx->altivec_enabled)) { \
277 gen_exception(ctx, POWERPC_EXCP_VPU); \
278 return; \
279 } \
280 } \
281 xth = tcg_temp_new_i64(); \
282 xtl = tcg_temp_new_i64(); \
283 get_cpu_vsrh(xth, xt); \
284 get_cpu_vsrl(xtl, xt); \
285 gen_set_access_type(ctx, ACCESS_INT); \
286 EA = tcg_temp_new(); \
287 if (indexed) { \
288 gen_addr_reg_index(ctx, EA); \
289 } else { \
290 gen_addr_imm_index(ctx, EA, 0x0F); \
291 } \
292 if (ctx->le_mode) { \
293 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
294 set_cpu_vsrl(xt, xtl); \
295 tcg_gen_addi_tl(EA, EA, 8); \
296 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
297 set_cpu_vsrh(xt, xth); \
298 } else { \
299 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
300 set_cpu_vsrh(xt, xth); \
301 tcg_gen_addi_tl(EA, EA, 8); \
302 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
303 set_cpu_vsrl(xt, xtl); \
304 } \
305 tcg_temp_free(EA); \
306 tcg_temp_free_i64(xth); \
307 tcg_temp_free_i64(xtl); \
308 }
309
310 VSX_VECTOR_LOAD_STORE(lxv, ld_i64, 0)
311 VSX_VECTOR_LOAD_STORE(stxv, st_i64, 0)
312 VSX_VECTOR_LOAD_STORE(lxvx, ld_i64, 1)
313 VSX_VECTOR_LOAD_STORE(stxvx, st_i64, 1)
314
315 #ifdef TARGET_PPC64
316 #define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
317 static void gen_##name(DisasContext *ctx) \
318 { \
319 TCGv EA, xt; \
320 \
321 if (xT(ctx->opcode) < 32) { \
322 if (unlikely(!ctx->vsx_enabled)) { \
323 gen_exception(ctx, POWERPC_EXCP_VSXU); \
324 return; \
325 } \
326 } else { \
327 if (unlikely(!ctx->altivec_enabled)) { \
328 gen_exception(ctx, POWERPC_EXCP_VPU); \
329 return; \
330 } \
331 } \
332 EA = tcg_temp_new(); \
333 xt = tcg_const_tl(xT(ctx->opcode)); \
334 gen_set_access_type(ctx, ACCESS_INT); \
335 gen_addr_register(ctx, EA); \
336 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
337 tcg_temp_free(EA); \
338 tcg_temp_free(xt); \
339 }
340
341 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
342 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
343 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
344 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
345 #endif
346
347 #define VSX_LOAD_SCALAR_DS(name, operation) \
348 static void gen_##name(DisasContext *ctx) \
349 { \
350 TCGv EA; \
351 TCGv_i64 xth; \
352 \
353 if (unlikely(!ctx->altivec_enabled)) { \
354 gen_exception(ctx, POWERPC_EXCP_VPU); \
355 return; \
356 } \
357 xth = tcg_temp_new_i64(); \
358 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
359 gen_set_access_type(ctx, ACCESS_INT); \
360 EA = tcg_temp_new(); \
361 gen_addr_imm_index(ctx, EA, 0x03); \
362 gen_qemu_##operation(ctx, xth, EA); \
363 set_cpu_vsrh(rD(ctx->opcode) + 32, xth); \
364 /* NOTE: cpu_vsrl is undefined */ \
365 tcg_temp_free(EA); \
366 tcg_temp_free_i64(xth); \
367 }
368
369 VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
370 VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
371
372 #define VSX_STORE_SCALAR(name, operation) \
373 static void gen_##name(DisasContext *ctx) \
374 { \
375 TCGv EA; \
376 TCGv_i64 t0; \
377 if (unlikely(!ctx->vsx_enabled)) { \
378 gen_exception(ctx, POWERPC_EXCP_VSXU); \
379 return; \
380 } \
381 t0 = tcg_temp_new_i64(); \
382 gen_set_access_type(ctx, ACCESS_INT); \
383 EA = tcg_temp_new(); \
384 gen_addr_reg_index(ctx, EA); \
385 gen_qemu_##operation(ctx, t0, EA); \
386 set_cpu_vsrh(xS(ctx->opcode), t0); \
387 tcg_temp_free(EA); \
388 tcg_temp_free_i64(t0); \
389 }
390
391 VSX_STORE_SCALAR(stxsdx, st64_i64)
392
393 VSX_STORE_SCALAR(stxsibx, st8_i64)
394 VSX_STORE_SCALAR(stxsihx, st16_i64)
395 VSX_STORE_SCALAR(stxsiwx, st32_i64)
396 VSX_STORE_SCALAR(stxsspx, st32fs)
397
398 static void gen_stxvd2x(DisasContext *ctx)
399 {
400 TCGv EA;
401 TCGv_i64 t0;
402 if (unlikely(!ctx->vsx_enabled)) {
403 gen_exception(ctx, POWERPC_EXCP_VSXU);
404 return;
405 }
406 t0 = tcg_temp_new_i64();
407 gen_set_access_type(ctx, ACCESS_INT);
408 EA = tcg_temp_new();
409 gen_addr_reg_index(ctx, EA);
410 get_cpu_vsrh(t0, xS(ctx->opcode));
411 gen_qemu_st64_i64(ctx, t0, EA);
412 tcg_gen_addi_tl(EA, EA, 8);
413 get_cpu_vsrl(t0, xS(ctx->opcode));
414 gen_qemu_st64_i64(ctx, t0, EA);
415 tcg_temp_free(EA);
416 tcg_temp_free_i64(t0);
417 }
418
419 static void gen_stxvw4x(DisasContext *ctx)
420 {
421 TCGv EA;
422 TCGv_i64 xsh;
423 TCGv_i64 xsl;
424
425 if (unlikely(!ctx->vsx_enabled)) {
426 gen_exception(ctx, POWERPC_EXCP_VSXU);
427 return;
428 }
429 xsh = tcg_temp_new_i64();
430 xsl = tcg_temp_new_i64();
431 get_cpu_vsrh(xsh, xS(ctx->opcode));
432 get_cpu_vsrl(xsl, xS(ctx->opcode));
433 gen_set_access_type(ctx, ACCESS_INT);
434 EA = tcg_temp_new();
435 gen_addr_reg_index(ctx, EA);
436 if (ctx->le_mode) {
437 TCGv_i64 t0 = tcg_temp_new_i64();
438 TCGv_i64 t1 = tcg_temp_new_i64();
439
440 tcg_gen_shri_i64(t0, xsh, 32);
441 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
442 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
443 tcg_gen_addi_tl(EA, EA, 8);
444 tcg_gen_shri_i64(t0, xsl, 32);
445 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
446 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
447 tcg_temp_free_i64(t0);
448 tcg_temp_free_i64(t1);
449 } else {
450 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
451 tcg_gen_addi_tl(EA, EA, 8);
452 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
453 }
454 tcg_temp_free(EA);
455 tcg_temp_free_i64(xsh);
456 tcg_temp_free_i64(xsl);
457 }
458
459 static void gen_stxvh8x(DisasContext *ctx)
460 {
461 TCGv EA;
462 TCGv_i64 xsh;
463 TCGv_i64 xsl;
464
465 if (unlikely(!ctx->vsx_enabled)) {
466 gen_exception(ctx, POWERPC_EXCP_VSXU);
467 return;
468 }
469 xsh = tcg_temp_new_i64();
470 xsl = tcg_temp_new_i64();
471 get_cpu_vsrh(xsh, xS(ctx->opcode));
472 get_cpu_vsrl(xsl, xS(ctx->opcode));
473 gen_set_access_type(ctx, ACCESS_INT);
474 EA = tcg_temp_new();
475 gen_addr_reg_index(ctx, EA);
476 if (ctx->le_mode) {
477 TCGv_i64 outh = tcg_temp_new_i64();
478 TCGv_i64 outl = tcg_temp_new_i64();
479
480 gen_bswap16x8(outh, outl, xsh, xsl);
481 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
482 tcg_gen_addi_tl(EA, EA, 8);
483 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
484 tcg_temp_free_i64(outh);
485 tcg_temp_free_i64(outl);
486 } else {
487 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
488 tcg_gen_addi_tl(EA, EA, 8);
489 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
490 }
491 tcg_temp_free(EA);
492 tcg_temp_free_i64(xsh);
493 tcg_temp_free_i64(xsl);
494 }
495
496 static void gen_stxvb16x(DisasContext *ctx)
497 {
498 TCGv EA;
499 TCGv_i64 xsh;
500 TCGv_i64 xsl;
501
502 if (unlikely(!ctx->vsx_enabled)) {
503 gen_exception(ctx, POWERPC_EXCP_VSXU);
504 return;
505 }
506 xsh = tcg_temp_new_i64();
507 xsl = tcg_temp_new_i64();
508 get_cpu_vsrh(xsh, xS(ctx->opcode));
509 get_cpu_vsrl(xsl, xS(ctx->opcode));
510 gen_set_access_type(ctx, ACCESS_INT);
511 EA = tcg_temp_new();
512 gen_addr_reg_index(ctx, EA);
513 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
514 tcg_gen_addi_tl(EA, EA, 8);
515 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
516 tcg_temp_free(EA);
517 tcg_temp_free_i64(xsh);
518 tcg_temp_free_i64(xsl);
519 }
520
521 #define VSX_STORE_SCALAR_DS(name, operation) \
522 static void gen_##name(DisasContext *ctx) \
523 { \
524 TCGv EA; \
525 TCGv_i64 xth; \
526 \
527 if (unlikely(!ctx->altivec_enabled)) { \
528 gen_exception(ctx, POWERPC_EXCP_VPU); \
529 return; \
530 } \
531 xth = tcg_temp_new_i64(); \
532 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
533 gen_set_access_type(ctx, ACCESS_INT); \
534 EA = tcg_temp_new(); \
535 gen_addr_imm_index(ctx, EA, 0x03); \
536 gen_qemu_##operation(ctx, xth, EA); \
537 /* NOTE: cpu_vsrl is undefined */ \
538 tcg_temp_free(EA); \
539 tcg_temp_free_i64(xth); \
540 }
541
542 VSX_LOAD_SCALAR_DS(stxsd, st64_i64)
543 VSX_LOAD_SCALAR_DS(stxssp, st32fs)
544
545 static void gen_mfvsrwz(DisasContext *ctx)
546 {
547 if (xS(ctx->opcode) < 32) {
548 if (unlikely(!ctx->fpu_enabled)) {
549 gen_exception(ctx, POWERPC_EXCP_FPU);
550 return;
551 }
552 } else {
553 if (unlikely(!ctx->altivec_enabled)) {
554 gen_exception(ctx, POWERPC_EXCP_VPU);
555 return;
556 }
557 }
558 TCGv_i64 tmp = tcg_temp_new_i64();
559 TCGv_i64 xsh = tcg_temp_new_i64();
560 get_cpu_vsrh(xsh, xS(ctx->opcode));
561 tcg_gen_ext32u_i64(tmp, xsh);
562 tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
563 tcg_temp_free_i64(tmp);
564 tcg_temp_free_i64(xsh);
565 }
566
567 static void gen_mtvsrwa(DisasContext *ctx)
568 {
569 if (xS(ctx->opcode) < 32) {
570 if (unlikely(!ctx->fpu_enabled)) {
571 gen_exception(ctx, POWERPC_EXCP_FPU);
572 return;
573 }
574 } else {
575 if (unlikely(!ctx->altivec_enabled)) {
576 gen_exception(ctx, POWERPC_EXCP_VPU);
577 return;
578 }
579 }
580 TCGv_i64 tmp = tcg_temp_new_i64();
581 TCGv_i64 xsh = tcg_temp_new_i64();
582 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
583 tcg_gen_ext32s_i64(xsh, tmp);
584 set_cpu_vsrh(xT(ctx->opcode), xsh);
585 tcg_temp_free_i64(tmp);
586 tcg_temp_free_i64(xsh);
587 }
588
589 static void gen_mtvsrwz(DisasContext *ctx)
590 {
591 if (xS(ctx->opcode) < 32) {
592 if (unlikely(!ctx->fpu_enabled)) {
593 gen_exception(ctx, POWERPC_EXCP_FPU);
594 return;
595 }
596 } else {
597 if (unlikely(!ctx->altivec_enabled)) {
598 gen_exception(ctx, POWERPC_EXCP_VPU);
599 return;
600 }
601 }
602 TCGv_i64 tmp = tcg_temp_new_i64();
603 TCGv_i64 xsh = tcg_temp_new_i64();
604 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
605 tcg_gen_ext32u_i64(xsh, tmp);
606 set_cpu_vsrh(xT(ctx->opcode), xsh);
607 tcg_temp_free_i64(tmp);
608 tcg_temp_free_i64(xsh);
609 }
610
611 #if defined(TARGET_PPC64)
612 static void gen_mfvsrd(DisasContext *ctx)
613 {
614 TCGv_i64 t0;
615 if (xS(ctx->opcode) < 32) {
616 if (unlikely(!ctx->fpu_enabled)) {
617 gen_exception(ctx, POWERPC_EXCP_FPU);
618 return;
619 }
620 } else {
621 if (unlikely(!ctx->altivec_enabled)) {
622 gen_exception(ctx, POWERPC_EXCP_VPU);
623 return;
624 }
625 }
626 t0 = tcg_temp_new_i64();
627 get_cpu_vsrh(t0, xS(ctx->opcode));
628 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
629 tcg_temp_free_i64(t0);
630 }
631
632 static void gen_mtvsrd(DisasContext *ctx)
633 {
634 TCGv_i64 t0;
635 if (xS(ctx->opcode) < 32) {
636 if (unlikely(!ctx->fpu_enabled)) {
637 gen_exception(ctx, POWERPC_EXCP_FPU);
638 return;
639 }
640 } else {
641 if (unlikely(!ctx->altivec_enabled)) {
642 gen_exception(ctx, POWERPC_EXCP_VPU);
643 return;
644 }
645 }
646 t0 = tcg_temp_new_i64();
647 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
648 set_cpu_vsrh(xT(ctx->opcode), t0);
649 tcg_temp_free_i64(t0);
650 }
651
652 static void gen_mfvsrld(DisasContext *ctx)
653 {
654 TCGv_i64 t0;
655 if (xS(ctx->opcode) < 32) {
656 if (unlikely(!ctx->vsx_enabled)) {
657 gen_exception(ctx, POWERPC_EXCP_VSXU);
658 return;
659 }
660 } else {
661 if (unlikely(!ctx->altivec_enabled)) {
662 gen_exception(ctx, POWERPC_EXCP_VPU);
663 return;
664 }
665 }
666 t0 = tcg_temp_new_i64();
667 get_cpu_vsrl(t0, xS(ctx->opcode));
668 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
669 tcg_temp_free_i64(t0);
670 }
671
672 static void gen_mtvsrdd(DisasContext *ctx)
673 {
674 TCGv_i64 t0;
675 if (xT(ctx->opcode) < 32) {
676 if (unlikely(!ctx->vsx_enabled)) {
677 gen_exception(ctx, POWERPC_EXCP_VSXU);
678 return;
679 }
680 } else {
681 if (unlikely(!ctx->altivec_enabled)) {
682 gen_exception(ctx, POWERPC_EXCP_VPU);
683 return;
684 }
685 }
686
687 t0 = tcg_temp_new_i64();
688 if (!rA(ctx->opcode)) {
689 tcg_gen_movi_i64(t0, 0);
690 } else {
691 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
692 }
693 set_cpu_vsrh(xT(ctx->opcode), t0);
694
695 tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
696 set_cpu_vsrl(xT(ctx->opcode), t0);
697 tcg_temp_free_i64(t0);
698 }
699
700 static void gen_mtvsrws(DisasContext *ctx)
701 {
702 TCGv_i64 t0;
703 if (xT(ctx->opcode) < 32) {
704 if (unlikely(!ctx->vsx_enabled)) {
705 gen_exception(ctx, POWERPC_EXCP_VSXU);
706 return;
707 }
708 } else {
709 if (unlikely(!ctx->altivec_enabled)) {
710 gen_exception(ctx, POWERPC_EXCP_VPU);
711 return;
712 }
713 }
714
715 t0 = tcg_temp_new_i64();
716 tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
717 cpu_gpr[rA(ctx->opcode)], 32, 32);
718 set_cpu_vsrl(xT(ctx->opcode), t0);
719 set_cpu_vsrh(xT(ctx->opcode), t0);
720 tcg_temp_free_i64(t0);
721 }
722
723 #endif
724
725 static void gen_xxpermdi(DisasContext *ctx)
726 {
727 TCGv_i64 xh, xl;
728
729 if (unlikely(!ctx->vsx_enabled)) {
730 gen_exception(ctx, POWERPC_EXCP_VSXU);
731 return;
732 }
733
734 xh = tcg_temp_new_i64();
735 xl = tcg_temp_new_i64();
736
737 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
738 (xT(ctx->opcode) == xB(ctx->opcode)))) {
739 if ((DM(ctx->opcode) & 2) == 0) {
740 get_cpu_vsrh(xh, xA(ctx->opcode));
741 } else {
742 get_cpu_vsrl(xh, xA(ctx->opcode));
743 }
744 if ((DM(ctx->opcode) & 1) == 0) {
745 get_cpu_vsrh(xl, xB(ctx->opcode));
746 } else {
747 get_cpu_vsrl(xl, xB(ctx->opcode));
748 }
749
750 set_cpu_vsrh(xT(ctx->opcode), xh);
751 set_cpu_vsrl(xT(ctx->opcode), xl);
752 } else {
753 if ((DM(ctx->opcode) & 2) == 0) {
754 get_cpu_vsrh(xh, xA(ctx->opcode));
755 set_cpu_vsrh(xT(ctx->opcode), xh);
756 } else {
757 get_cpu_vsrl(xh, xA(ctx->opcode));
758 set_cpu_vsrh(xT(ctx->opcode), xh);
759 }
760 if ((DM(ctx->opcode) & 1) == 0) {
761 get_cpu_vsrh(xl, xB(ctx->opcode));
762 set_cpu_vsrl(xT(ctx->opcode), xl);
763 } else {
764 get_cpu_vsrl(xl, xB(ctx->opcode));
765 set_cpu_vsrl(xT(ctx->opcode), xl);
766 }
767 }
768 tcg_temp_free_i64(xh);
769 tcg_temp_free_i64(xl);
770 }
771
772 #define OP_ABS 1
773 #define OP_NABS 2
774 #define OP_NEG 3
775 #define OP_CPSGN 4
776 #define SGN_MASK_DP 0x8000000000000000ull
777 #define SGN_MASK_SP 0x8000000080000000ull
778
779 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
780 static void glue(gen_, name)(DisasContext * ctx) \
781 { \
782 TCGv_i64 xb, sgm; \
783 if (unlikely(!ctx->vsx_enabled)) { \
784 gen_exception(ctx, POWERPC_EXCP_VSXU); \
785 return; \
786 } \
787 xb = tcg_temp_new_i64(); \
788 sgm = tcg_temp_new_i64(); \
789 get_cpu_vsrh(xb, xB(ctx->opcode)); \
790 tcg_gen_movi_i64(sgm, sgn_mask); \
791 switch (op) { \
792 case OP_ABS: { \
793 tcg_gen_andc_i64(xb, xb, sgm); \
794 break; \
795 } \
796 case OP_NABS: { \
797 tcg_gen_or_i64(xb, xb, sgm); \
798 break; \
799 } \
800 case OP_NEG: { \
801 tcg_gen_xor_i64(xb, xb, sgm); \
802 break; \
803 } \
804 case OP_CPSGN: { \
805 TCGv_i64 xa = tcg_temp_new_i64(); \
806 get_cpu_vsrh(xa, xA(ctx->opcode)); \
807 tcg_gen_and_i64(xa, xa, sgm); \
808 tcg_gen_andc_i64(xb, xb, sgm); \
809 tcg_gen_or_i64(xb, xb, xa); \
810 tcg_temp_free_i64(xa); \
811 break; \
812 } \
813 } \
814 set_cpu_vsrh(xT(ctx->opcode), xb); \
815 tcg_temp_free_i64(xb); \
816 tcg_temp_free_i64(sgm); \
817 }
818
819 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
820 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
821 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
822 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
823
824 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
825 static void glue(gen_, name)(DisasContext *ctx) \
826 { \
827 int xa; \
828 int xt = rD(ctx->opcode) + 32; \
829 int xb = rB(ctx->opcode) + 32; \
830 TCGv_i64 xah, xbh, xbl, sgm, tmp; \
831 \
832 if (unlikely(!ctx->vsx_enabled)) { \
833 gen_exception(ctx, POWERPC_EXCP_VSXU); \
834 return; \
835 } \
836 xbh = tcg_temp_new_i64(); \
837 xbl = tcg_temp_new_i64(); \
838 sgm = tcg_temp_new_i64(); \
839 tmp = tcg_temp_new_i64(); \
840 get_cpu_vsrh(xbh, xb); \
841 get_cpu_vsrl(xbl, xb); \
842 tcg_gen_movi_i64(sgm, sgn_mask); \
843 switch (op) { \
844 case OP_ABS: \
845 tcg_gen_andc_i64(xbh, xbh, sgm); \
846 break; \
847 case OP_NABS: \
848 tcg_gen_or_i64(xbh, xbh, sgm); \
849 break; \
850 case OP_NEG: \
851 tcg_gen_xor_i64(xbh, xbh, sgm); \
852 break; \
853 case OP_CPSGN: \
854 xah = tcg_temp_new_i64(); \
855 xa = rA(ctx->opcode) + 32; \
856 get_cpu_vsrh(tmp, xa); \
857 tcg_gen_and_i64(xah, tmp, sgm); \
858 tcg_gen_andc_i64(xbh, xbh, sgm); \
859 tcg_gen_or_i64(xbh, xbh, xah); \
860 tcg_temp_free_i64(xah); \
861 break; \
862 } \
863 set_cpu_vsrh(xt, xbh); \
864 set_cpu_vsrl(xt, xbl); \
865 tcg_temp_free_i64(xbl); \
866 tcg_temp_free_i64(xbh); \
867 tcg_temp_free_i64(sgm); \
868 tcg_temp_free_i64(tmp); \
869 }
870
871 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
872 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
873 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
874 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
875
876 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
877 static void glue(gen_, name)(DisasContext * ctx) \
878 { \
879 TCGv_i64 xbh, xbl, sgm; \
880 if (unlikely(!ctx->vsx_enabled)) { \
881 gen_exception(ctx, POWERPC_EXCP_VSXU); \
882 return; \
883 } \
884 xbh = tcg_temp_new_i64(); \
885 xbl = tcg_temp_new_i64(); \
886 sgm = tcg_temp_new_i64(); \
887 set_cpu_vsrh(xB(ctx->opcode), xbh); \
888 set_cpu_vsrl(xB(ctx->opcode), xbl); \
889 tcg_gen_movi_i64(sgm, sgn_mask); \
890 switch (op) { \
891 case OP_ABS: { \
892 tcg_gen_andc_i64(xbh, xbh, sgm); \
893 tcg_gen_andc_i64(xbl, xbl, sgm); \
894 break; \
895 } \
896 case OP_NABS: { \
897 tcg_gen_or_i64(xbh, xbh, sgm); \
898 tcg_gen_or_i64(xbl, xbl, sgm); \
899 break; \
900 } \
901 case OP_NEG: { \
902 tcg_gen_xor_i64(xbh, xbh, sgm); \
903 tcg_gen_xor_i64(xbl, xbl, sgm); \
904 break; \
905 } \
906 case OP_CPSGN: { \
907 TCGv_i64 xah = tcg_temp_new_i64(); \
908 TCGv_i64 xal = tcg_temp_new_i64(); \
909 get_cpu_vsrh(xah, xA(ctx->opcode)); \
910 get_cpu_vsrl(xal, xA(ctx->opcode)); \
911 tcg_gen_and_i64(xah, xah, sgm); \
912 tcg_gen_and_i64(xal, xal, sgm); \
913 tcg_gen_andc_i64(xbh, xbh, sgm); \
914 tcg_gen_andc_i64(xbl, xbl, sgm); \
915 tcg_gen_or_i64(xbh, xbh, xah); \
916 tcg_gen_or_i64(xbl, xbl, xal); \
917 tcg_temp_free_i64(xah); \
918 tcg_temp_free_i64(xal); \
919 break; \
920 } \
921 } \
922 set_cpu_vsrh(xT(ctx->opcode), xbh); \
923 set_cpu_vsrl(xT(ctx->opcode), xbl); \
924 tcg_temp_free_i64(xbh); \
925 tcg_temp_free_i64(xbl); \
926 tcg_temp_free_i64(sgm); \
927 }
928
929 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
930 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
931 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
932 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
933 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
934 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
935 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
936 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
937
938 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
939 static void gen_##name(DisasContext * ctx) \
940 { \
941 TCGv_i32 opc; \
942 if (unlikely(!ctx->vsx_enabled)) { \
943 gen_exception(ctx, POWERPC_EXCP_VSXU); \
944 return; \
945 } \
946 opc = tcg_const_i32(ctx->opcode); \
947 gen_helper_##name(cpu_env, opc); \
948 tcg_temp_free_i32(opc); \
949 }
950
951 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
952 static void gen_##name(DisasContext * ctx) \
953 { \
954 TCGv_i64 t0; \
955 TCGv_i64 t1; \
956 if (unlikely(!ctx->vsx_enabled)) { \
957 gen_exception(ctx, POWERPC_EXCP_VSXU); \
958 return; \
959 } \
960 t0 = tcg_temp_new_i64(); \
961 t1 = tcg_temp_new_i64(); \
962 get_cpu_vsrh(t0, xB(ctx->opcode)); \
963 gen_helper_##name(t1, cpu_env, t0); \
964 set_cpu_vsrh(xT(ctx->opcode), t1); \
965 tcg_temp_free_i64(t0); \
966 tcg_temp_free_i64(t1); \
967 }
968
969 GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
970 GEN_VSX_HELPER_2(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
971 GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
972 GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
973 GEN_VSX_HELPER_2(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
974 GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
975 GEN_VSX_HELPER_2(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
976 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
977 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
978 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
979 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
980 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
981 GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
982 GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
983 GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
984 GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
985 GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
986 GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
987 GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
988 GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
989 GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
990 GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
991 GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
992 GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
993 GEN_VSX_HELPER_2(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
994 GEN_VSX_HELPER_2(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
995 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
996 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
997 GEN_VSX_HELPER_2(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
998 GEN_VSX_HELPER_2(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
999 GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1000 GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1001 GEN_VSX_HELPER_2(xsmaxcdp, 0x00, 0x10, 0, PPC2_ISA300)
1002 GEN_VSX_HELPER_2(xsmincdp, 0x00, 0x11, 0, PPC2_ISA300)
1003 GEN_VSX_HELPER_2(xsmaxjdp, 0x00, 0x12, 0, PPC2_ISA300)
1004 GEN_VSX_HELPER_2(xsminjdp, 0x00, 0x12, 0, PPC2_ISA300)
1005 GEN_VSX_HELPER_2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1006 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1007 GEN_VSX_HELPER_2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1008 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1009 GEN_VSX_HELPER_2(xscvqpdp, 0x04, 0x1A, 0x14, PPC2_ISA300)
1010 GEN_VSX_HELPER_2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1011 GEN_VSX_HELPER_2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1012 GEN_VSX_HELPER_2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1013 GEN_VSX_HELPER_2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1014 GEN_VSX_HELPER_2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1015 GEN_VSX_HELPER_2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1016 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1017 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
1018 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1019 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1020 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1021 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1022 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1023 GEN_VSX_HELPER_2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1024 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1025 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1026 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1027 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1028 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1029 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1030 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1031
1032 GEN_VSX_HELPER_2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1033 GEN_VSX_HELPER_2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1034 GEN_VSX_HELPER_2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1035 GEN_VSX_HELPER_2(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1036
1037 GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1038 GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1039 GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1040 GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1041 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1042 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1043 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1044 GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
1045 GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
1046 GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
1047 GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
1048 GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
1049 GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
1050 GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
1051 GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
1052 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1053 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1054 GEN_VSX_HELPER_2(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1055 GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1056 GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1057
1058 GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1059 GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1060 GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1061 GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1062 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1063 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1064 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1065 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1066 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1067 GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
1068 GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
1069 GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
1070 GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
1071 GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
1072 GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
1073 GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
1074 GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
1075 GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1076 GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1077 GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
1078 GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
1079 GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
1080 GEN_VSX_HELPER_2(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
1081 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1082 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1083 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1084 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1085 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1086 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1087 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1088 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1089 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1090 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1091 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1092 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1093 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1094 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1095
1096 GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1097 GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1098 GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1099 GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1100 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1101 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1102 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1103 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1104 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1105 GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
1106 GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
1107 GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
1108 GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
1109 GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
1110 GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
1111 GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
1112 GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
1113 GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1114 GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1115 GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
1116 GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
1117 GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
1118 GEN_VSX_HELPER_2(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
1119 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1120 GEN_VSX_HELPER_2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1121 GEN_VSX_HELPER_2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1122 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1123 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1124 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1125 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1126 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1127 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1128 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1129 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1130 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1131 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1132 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1133 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1134 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1135 GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1136 GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1137 GEN_VSX_HELPER_2(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
1138 GEN_VSX_HELPER_2(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
1139
1140 static void gen_xxbrd(DisasContext *ctx)
1141 {
1142 TCGv_i64 xth;
1143 TCGv_i64 xtl;
1144 TCGv_i64 xbh;
1145 TCGv_i64 xbl;
1146
1147 if (unlikely(!ctx->vsx_enabled)) {
1148 gen_exception(ctx, POWERPC_EXCP_VSXU);
1149 return;
1150 }
1151 xth = tcg_temp_new_i64();
1152 xtl = tcg_temp_new_i64();
1153 xbh = tcg_temp_new_i64();
1154 xbl = tcg_temp_new_i64();
1155 get_cpu_vsrh(xbh, xB(ctx->opcode));
1156 get_cpu_vsrl(xbl, xB(ctx->opcode));
1157
1158 tcg_gen_bswap64_i64(xth, xbh);
1159 tcg_gen_bswap64_i64(xtl, xbl);
1160 set_cpu_vsrh(xT(ctx->opcode), xth);
1161 set_cpu_vsrl(xT(ctx->opcode), xtl);
1162
1163 tcg_temp_free_i64(xth);
1164 tcg_temp_free_i64(xtl);
1165 tcg_temp_free_i64(xbh);
1166 tcg_temp_free_i64(xbl);
1167 }
1168
1169 static void gen_xxbrh(DisasContext *ctx)
1170 {
1171 TCGv_i64 xth;
1172 TCGv_i64 xtl;
1173 TCGv_i64 xbh;
1174 TCGv_i64 xbl;
1175
1176 if (unlikely(!ctx->vsx_enabled)) {
1177 gen_exception(ctx, POWERPC_EXCP_VSXU);
1178 return;
1179 }
1180 xth = tcg_temp_new_i64();
1181 xtl = tcg_temp_new_i64();
1182 xbh = tcg_temp_new_i64();
1183 xbl = tcg_temp_new_i64();
1184 get_cpu_vsrh(xbh, xB(ctx->opcode));
1185 get_cpu_vsrl(xbl, xB(ctx->opcode));
1186
1187 gen_bswap16x8(xth, xtl, xbh, xbl);
1188 set_cpu_vsrh(xT(ctx->opcode), xth);
1189 set_cpu_vsrl(xT(ctx->opcode), xtl);
1190
1191 tcg_temp_free_i64(xth);
1192 tcg_temp_free_i64(xtl);
1193 tcg_temp_free_i64(xbh);
1194 tcg_temp_free_i64(xbl);
1195 }
1196
1197 static void gen_xxbrq(DisasContext *ctx)
1198 {
1199 TCGv_i64 xth;
1200 TCGv_i64 xtl;
1201 TCGv_i64 xbh;
1202 TCGv_i64 xbl;
1203 TCGv_i64 t0;
1204
1205 if (unlikely(!ctx->vsx_enabled)) {
1206 gen_exception(ctx, POWERPC_EXCP_VSXU);
1207 return;
1208 }
1209 xth = tcg_temp_new_i64();
1210 xtl = tcg_temp_new_i64();
1211 xbh = tcg_temp_new_i64();
1212 xbl = tcg_temp_new_i64();
1213 get_cpu_vsrh(xbh, xB(ctx->opcode));
1214 get_cpu_vsrl(xbl, xB(ctx->opcode));
1215 t0 = tcg_temp_new_i64();
1216
1217 tcg_gen_bswap64_i64(t0, xbl);
1218 tcg_gen_bswap64_i64(xtl, xbh);
1219 set_cpu_vsrl(xT(ctx->opcode), xtl);
1220 tcg_gen_mov_i64(xth, t0);
1221 set_cpu_vsrl(xT(ctx->opcode), xth);
1222
1223 tcg_temp_free_i64(t0);
1224 tcg_temp_free_i64(xth);
1225 tcg_temp_free_i64(xtl);
1226 tcg_temp_free_i64(xbh);
1227 tcg_temp_free_i64(xbl);
1228 }
1229
1230 static void gen_xxbrw(DisasContext *ctx)
1231 {
1232 TCGv_i64 xth;
1233 TCGv_i64 xtl;
1234 TCGv_i64 xbh;
1235 TCGv_i64 xbl;
1236
1237 if (unlikely(!ctx->vsx_enabled)) {
1238 gen_exception(ctx, POWERPC_EXCP_VSXU);
1239 return;
1240 }
1241 xth = tcg_temp_new_i64();
1242 xtl = tcg_temp_new_i64();
1243 xbh = tcg_temp_new_i64();
1244 xbl = tcg_temp_new_i64();
1245 get_cpu_vsrh(xbh, xB(ctx->opcode));
1246 get_cpu_vsrl(xbl, xB(ctx->opcode));
1247
1248 gen_bswap32x4(xth, xtl, xbh, xbl);
1249 set_cpu_vsrl(xT(ctx->opcode), xth);
1250 set_cpu_vsrl(xT(ctx->opcode), xtl);
1251
1252 tcg_temp_free_i64(xth);
1253 tcg_temp_free_i64(xtl);
1254 tcg_temp_free_i64(xbh);
1255 tcg_temp_free_i64(xbl);
1256 }
1257
1258 #define VSX_LOGICAL(name, tcg_op) \
1259 static void glue(gen_, name)(DisasContext * ctx) \
1260 { \
1261 TCGv_i64 t0; \
1262 TCGv_i64 t1; \
1263 TCGv_i64 t2; \
1264 if (unlikely(!ctx->vsx_enabled)) { \
1265 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1266 return; \
1267 } \
1268 t0 = tcg_temp_new_i64(); \
1269 t1 = tcg_temp_new_i64(); \
1270 t2 = tcg_temp_new_i64(); \
1271 get_cpu_vsrh(t0, xA(ctx->opcode)); \
1272 get_cpu_vsrh(t1, xB(ctx->opcode)); \
1273 tcg_op(t2, t0, t1); \
1274 set_cpu_vsrh(xT(ctx->opcode), t2); \
1275 get_cpu_vsrl(t0, xA(ctx->opcode)); \
1276 get_cpu_vsrl(t1, xB(ctx->opcode)); \
1277 tcg_op(t2, t0, t1); \
1278 set_cpu_vsrl(xT(ctx->opcode), t2); \
1279 tcg_temp_free_i64(t0); \
1280 tcg_temp_free_i64(t1); \
1281 tcg_temp_free_i64(t2); \
1282 }
1283
1284 VSX_LOGICAL(xxland, tcg_gen_and_i64)
1285 VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
1286 VSX_LOGICAL(xxlor, tcg_gen_or_i64)
1287 VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
1288 VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
1289 VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
1290 VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
1291 VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
1292
1293 #define VSX_XXMRG(name, high) \
1294 static void glue(gen_, name)(DisasContext * ctx) \
1295 { \
1296 TCGv_i64 a0, a1, b0, b1, tmp; \
1297 if (unlikely(!ctx->vsx_enabled)) { \
1298 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1299 return; \
1300 } \
1301 a0 = tcg_temp_new_i64(); \
1302 a1 = tcg_temp_new_i64(); \
1303 b0 = tcg_temp_new_i64(); \
1304 b1 = tcg_temp_new_i64(); \
1305 tmp = tcg_temp_new_i64(); \
1306 if (high) { \
1307 get_cpu_vsrh(a0, xA(ctx->opcode)); \
1308 get_cpu_vsrh(a1, xA(ctx->opcode)); \
1309 get_cpu_vsrh(b0, xB(ctx->opcode)); \
1310 get_cpu_vsrh(b1, xB(ctx->opcode)); \
1311 } else { \
1312 get_cpu_vsrl(a0, xA(ctx->opcode)); \
1313 get_cpu_vsrl(a1, xA(ctx->opcode)); \
1314 get_cpu_vsrl(b0, xB(ctx->opcode)); \
1315 get_cpu_vsrl(b1, xB(ctx->opcode)); \
1316 } \
1317 tcg_gen_shri_i64(a0, a0, 32); \
1318 tcg_gen_shri_i64(b0, b0, 32); \
1319 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \
1320 set_cpu_vsrh(xT(ctx->opcode), tmp); \
1321 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \
1322 set_cpu_vsrl(xT(ctx->opcode), tmp); \
1323 tcg_temp_free_i64(a0); \
1324 tcg_temp_free_i64(a1); \
1325 tcg_temp_free_i64(b0); \
1326 tcg_temp_free_i64(b1); \
1327 tcg_temp_free_i64(tmp); \
1328 }
1329
1330 VSX_XXMRG(xxmrghw, 1)
1331 VSX_XXMRG(xxmrglw, 0)
1332
1333 static void gen_xxsel(DisasContext * ctx)
1334 {
1335 TCGv_i64 a, b, c, tmp;
1336 if (unlikely(!ctx->vsx_enabled)) {
1337 gen_exception(ctx, POWERPC_EXCP_VSXU);
1338 return;
1339 }
1340 a = tcg_temp_new_i64();
1341 b = tcg_temp_new_i64();
1342 c = tcg_temp_new_i64();
1343 tmp = tcg_temp_new_i64();
1344
1345 get_cpu_vsrh(a, xA(ctx->opcode));
1346 get_cpu_vsrh(b, xB(ctx->opcode));
1347 get_cpu_vsrh(c, xC(ctx->opcode));
1348
1349 tcg_gen_and_i64(b, b, c);
1350 tcg_gen_andc_i64(a, a, c);
1351 tcg_gen_or_i64(tmp, a, b);
1352 set_cpu_vsrh(xT(ctx->opcode), tmp);
1353
1354 get_cpu_vsrl(a, xA(ctx->opcode));
1355 get_cpu_vsrl(b, xB(ctx->opcode));
1356 get_cpu_vsrl(c, xC(ctx->opcode));
1357
1358 tcg_gen_and_i64(b, b, c);
1359 tcg_gen_andc_i64(a, a, c);
1360 tcg_gen_or_i64(tmp, a, b);
1361 set_cpu_vsrl(xT(ctx->opcode), tmp);
1362
1363 tcg_temp_free_i64(a);
1364 tcg_temp_free_i64(b);
1365 tcg_temp_free_i64(c);
1366 tcg_temp_free_i64(tmp);
1367 }
1368
1369 static void gen_xxspltw(DisasContext *ctx)
1370 {
1371 TCGv_i64 b, b2;
1372 TCGv_i64 vsr;
1373
1374 if (unlikely(!ctx->vsx_enabled)) {
1375 gen_exception(ctx, POWERPC_EXCP_VSXU);
1376 return;
1377 }
1378
1379 vsr = tcg_temp_new_i64();
1380 if (UIM(ctx->opcode) & 2) {
1381 get_cpu_vsrl(vsr, xB(ctx->opcode));
1382 } else {
1383 get_cpu_vsrh(vsr, xB(ctx->opcode));
1384 }
1385
1386 b = tcg_temp_new_i64();
1387 b2 = tcg_temp_new_i64();
1388
1389 if (UIM(ctx->opcode) & 1) {
1390 tcg_gen_ext32u_i64(b, vsr);
1391 } else {
1392 tcg_gen_shri_i64(b, vsr, 32);
1393 }
1394
1395 tcg_gen_shli_i64(b2, b, 32);
1396 tcg_gen_or_i64(vsr, b, b2);
1397 set_cpu_vsrh(xT(ctx->opcode), vsr);
1398 set_cpu_vsrl(xT(ctx->opcode), vsr);
1399
1400 tcg_temp_free_i64(vsr);
1401 tcg_temp_free_i64(b);
1402 tcg_temp_free_i64(b2);
1403 }
1404
1405 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1406
1407 static void gen_xxspltib(DisasContext *ctx)
1408 {
1409 unsigned char uim8 = IMM8(ctx->opcode);
1410 TCGv_i64 vsr;
1411 if (xS(ctx->opcode) < 32) {
1412 if (unlikely(!ctx->altivec_enabled)) {
1413 gen_exception(ctx, POWERPC_EXCP_VPU);
1414 return;
1415 }
1416 } else {
1417 if (unlikely(!ctx->vsx_enabled)) {
1418 gen_exception(ctx, POWERPC_EXCP_VSXU);
1419 return;
1420 }
1421 }
1422 vsr = tcg_temp_new_i64();
1423 tcg_gen_movi_i64(vsr, pattern(uim8));
1424 set_cpu_vsrh(xT(ctx->opcode), vsr);
1425 set_cpu_vsrl(xT(ctx->opcode), vsr);
1426 tcg_temp_free_i64(vsr);
1427 }
1428
1429 static void gen_xxsldwi(DisasContext *ctx)
1430 {
1431 TCGv_i64 xth, xtl;
1432 if (unlikely(!ctx->vsx_enabled)) {
1433 gen_exception(ctx, POWERPC_EXCP_VSXU);
1434 return;
1435 }
1436 xth = tcg_temp_new_i64();
1437 xtl = tcg_temp_new_i64();
1438
1439 switch (SHW(ctx->opcode)) {
1440 case 0: {
1441 get_cpu_vsrh(xth, xA(ctx->opcode));
1442 get_cpu_vsrl(xtl, xA(ctx->opcode));
1443 break;
1444 }
1445 case 1: {
1446 TCGv_i64 t0 = tcg_temp_new_i64();
1447 get_cpu_vsrh(xth, xA(ctx->opcode));
1448 tcg_gen_shli_i64(xth, xth, 32);
1449 get_cpu_vsrl(t0, xA(ctx->opcode));
1450 tcg_gen_shri_i64(t0, t0, 32);
1451 tcg_gen_or_i64(xth, xth, t0);
1452 get_cpu_vsrl(xtl, xA(ctx->opcode));
1453 tcg_gen_shli_i64(xtl, xtl, 32);
1454 get_cpu_vsrh(t0, xB(ctx->opcode));
1455 tcg_gen_shri_i64(t0, t0, 32);
1456 tcg_gen_or_i64(xtl, xtl, t0);
1457 tcg_temp_free_i64(t0);
1458 break;
1459 }
1460 case 2: {
1461 get_cpu_vsrl(xth, xA(ctx->opcode));
1462 get_cpu_vsrh(xtl, xB(ctx->opcode));
1463 break;
1464 }
1465 case 3: {
1466 TCGv_i64 t0 = tcg_temp_new_i64();
1467 get_cpu_vsrl(xth, xA(ctx->opcode));
1468 tcg_gen_shli_i64(xth, xth, 32);
1469 get_cpu_vsrh(t0, xB(ctx->opcode));
1470 tcg_gen_shri_i64(t0, t0, 32);
1471 tcg_gen_or_i64(xth, xth, t0);
1472 get_cpu_vsrh(xtl, xB(ctx->opcode));
1473 tcg_gen_shli_i64(xtl, xtl, 32);
1474 get_cpu_vsrl(t0, xB(ctx->opcode));
1475 tcg_gen_shri_i64(t0, t0, 32);
1476 tcg_gen_or_i64(xtl, xtl, t0);
1477 tcg_temp_free_i64(t0);
1478 break;
1479 }
1480 }
1481
1482 set_cpu_vsrh(xT(ctx->opcode), xth);
1483 set_cpu_vsrl(xT(ctx->opcode), xtl);
1484
1485 tcg_temp_free_i64(xth);
1486 tcg_temp_free_i64(xtl);
1487 }
1488
1489 #define VSX_EXTRACT_INSERT(name) \
1490 static void gen_##name(DisasContext *ctx) \
1491 { \
1492 TCGv xt, xb; \
1493 TCGv_i32 t0; \
1494 TCGv_i64 t1; \
1495 uint8_t uimm = UIMM4(ctx->opcode); \
1496 \
1497 if (unlikely(!ctx->vsx_enabled)) { \
1498 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1499 return; \
1500 } \
1501 xt = tcg_const_tl(xT(ctx->opcode)); \
1502 xb = tcg_const_tl(xB(ctx->opcode)); \
1503 t0 = tcg_temp_new_i32(); \
1504 t1 = tcg_temp_new_i64(); \
1505 /* uimm > 15 out of bound and for \
1506 * uimm > 12 handle as per hardware in helper \
1507 */ \
1508 if (uimm > 15) { \
1509 tcg_gen_movi_i64(t1, 0); \
1510 set_cpu_vsrh(xT(ctx->opcode), t1); \
1511 set_cpu_vsrl(xT(ctx->opcode), t1); \
1512 return; \
1513 } \
1514 tcg_gen_movi_i32(t0, uimm); \
1515 gen_helper_##name(cpu_env, xt, xb, t0); \
1516 tcg_temp_free(xb); \
1517 tcg_temp_free(xt); \
1518 tcg_temp_free_i32(t0); \
1519 tcg_temp_free_i64(t1); \
1520 }
1521
1522 VSX_EXTRACT_INSERT(xxextractuw)
1523 VSX_EXTRACT_INSERT(xxinsertw)
1524
1525 #ifdef TARGET_PPC64
1526 static void gen_xsxexpdp(DisasContext *ctx)
1527 {
1528 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1529 TCGv_i64 t0;
1530 if (unlikely(!ctx->vsx_enabled)) {
1531 gen_exception(ctx, POWERPC_EXCP_VSXU);
1532 return;
1533 }
1534 t0 = tcg_temp_new_i64();
1535 get_cpu_vsrh(t0, xB(ctx->opcode));
1536 tcg_gen_extract_i64(rt, t0, 52, 11);
1537 tcg_temp_free_i64(t0);
1538 }
1539
1540 static void gen_xsxexpqp(DisasContext *ctx)
1541 {
1542 TCGv_i64 xth;
1543 TCGv_i64 xtl;
1544 TCGv_i64 xbh;
1545
1546 if (unlikely(!ctx->vsx_enabled)) {
1547 gen_exception(ctx, POWERPC_EXCP_VSXU);
1548 return;
1549 }
1550 xth = tcg_temp_new_i64();
1551 xtl = tcg_temp_new_i64();
1552 xbh = tcg_temp_new_i64();
1553 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1554
1555 tcg_gen_extract_i64(xth, xbh, 48, 15);
1556 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1557 tcg_gen_movi_i64(xtl, 0);
1558 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1559
1560 tcg_temp_free_i64(xbh);
1561 tcg_temp_free_i64(xth);
1562 tcg_temp_free_i64(xtl);
1563 }
1564
1565 static void gen_xsiexpdp(DisasContext *ctx)
1566 {
1567 TCGv_i64 xth;
1568 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1569 TCGv rb = cpu_gpr[rB(ctx->opcode)];
1570 TCGv_i64 t0;
1571
1572 if (unlikely(!ctx->vsx_enabled)) {
1573 gen_exception(ctx, POWERPC_EXCP_VSXU);
1574 return;
1575 }
1576 t0 = tcg_temp_new_i64();
1577 xth = tcg_temp_new_i64();
1578 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1579 tcg_gen_andi_i64(t0, rb, 0x7FF);
1580 tcg_gen_shli_i64(t0, t0, 52);
1581 tcg_gen_or_i64(xth, xth, t0);
1582 set_cpu_vsrh(xT(ctx->opcode), xth);
1583 /* dword[1] is undefined */
1584 tcg_temp_free_i64(t0);
1585 tcg_temp_free_i64(xth);
1586 }
1587
1588 static void gen_xsiexpqp(DisasContext *ctx)
1589 {
1590 TCGv_i64 xth;
1591 TCGv_i64 xtl;
1592 TCGv_i64 xah;
1593 TCGv_i64 xal;
1594 TCGv_i64 xbh;
1595 TCGv_i64 t0;
1596
1597 if (unlikely(!ctx->vsx_enabled)) {
1598 gen_exception(ctx, POWERPC_EXCP_VSXU);
1599 return;
1600 }
1601 xth = tcg_temp_new_i64();
1602 xtl = tcg_temp_new_i64();
1603 xah = tcg_temp_new_i64();
1604 xal = tcg_temp_new_i64();
1605 get_cpu_vsrh(xah, rA(ctx->opcode) + 32);
1606 get_cpu_vsrl(xal, rA(ctx->opcode) + 32);
1607 xbh = tcg_temp_new_i64();
1608 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1609 t0 = tcg_temp_new_i64();
1610
1611 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1612 tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1613 tcg_gen_shli_i64(t0, t0, 48);
1614 tcg_gen_or_i64(xth, xth, t0);
1615 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1616 tcg_gen_mov_i64(xtl, xal);
1617 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1618
1619 tcg_temp_free_i64(t0);
1620 tcg_temp_free_i64(xth);
1621 tcg_temp_free_i64(xtl);
1622 tcg_temp_free_i64(xah);
1623 tcg_temp_free_i64(xal);
1624 tcg_temp_free_i64(xbh);
1625 }
1626
1627 static void gen_xsxsigdp(DisasContext *ctx)
1628 {
1629 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1630 TCGv_i64 t0, t1, zr, nan, exp;
1631
1632 if (unlikely(!ctx->vsx_enabled)) {
1633 gen_exception(ctx, POWERPC_EXCP_VSXU);
1634 return;
1635 }
1636 exp = tcg_temp_new_i64();
1637 t0 = tcg_temp_new_i64();
1638 t1 = tcg_temp_new_i64();
1639 zr = tcg_const_i64(0);
1640 nan = tcg_const_i64(2047);
1641
1642 get_cpu_vsrh(t1, xB(ctx->opcode));
1643 tcg_gen_extract_i64(exp, t1, 52, 11);
1644 tcg_gen_movi_i64(t0, 0x0010000000000000);
1645 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1646 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1647 get_cpu_vsrh(t1, xB(ctx->opcode));
1648 tcg_gen_andi_i64(rt, t1, 0x000FFFFFFFFFFFFF);
1649 tcg_gen_or_i64(rt, rt, t0);
1650
1651 tcg_temp_free_i64(t0);
1652 tcg_temp_free_i64(t1);
1653 tcg_temp_free_i64(exp);
1654 tcg_temp_free_i64(zr);
1655 tcg_temp_free_i64(nan);
1656 }
1657
1658 static void gen_xsxsigqp(DisasContext *ctx)
1659 {
1660 TCGv_i64 t0, zr, nan, exp;
1661 TCGv_i64 xth;
1662 TCGv_i64 xtl;
1663 TCGv_i64 xbh;
1664 TCGv_i64 xbl;
1665
1666 if (unlikely(!ctx->vsx_enabled)) {
1667 gen_exception(ctx, POWERPC_EXCP_VSXU);
1668 return;
1669 }
1670 xth = tcg_temp_new_i64();
1671 xtl = tcg_temp_new_i64();
1672 xbh = tcg_temp_new_i64();
1673 xbl = tcg_temp_new_i64();
1674 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1675 get_cpu_vsrl(xbl, rB(ctx->opcode) + 32);
1676 exp = tcg_temp_new_i64();
1677 t0 = tcg_temp_new_i64();
1678 zr = tcg_const_i64(0);
1679 nan = tcg_const_i64(32767);
1680
1681 tcg_gen_extract_i64(exp, xbh, 48, 15);
1682 tcg_gen_movi_i64(t0, 0x0001000000000000);
1683 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1684 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1685 tcg_gen_andi_i64(xth, xbh, 0x0000FFFFFFFFFFFF);
1686 tcg_gen_or_i64(xth, xth, t0);
1687 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1688 tcg_gen_mov_i64(xtl, xbl);
1689 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1690
1691 tcg_temp_free_i64(t0);
1692 tcg_temp_free_i64(exp);
1693 tcg_temp_free_i64(zr);
1694 tcg_temp_free_i64(nan);
1695 tcg_temp_free_i64(xth);
1696 tcg_temp_free_i64(xtl);
1697 tcg_temp_free_i64(xbh);
1698 tcg_temp_free_i64(xbl);
1699 }
1700 #endif
1701
1702 static void gen_xviexpsp(DisasContext *ctx)
1703 {
1704 TCGv_i64 xth;
1705 TCGv_i64 xtl;
1706 TCGv_i64 xah;
1707 TCGv_i64 xal;
1708 TCGv_i64 xbh;
1709 TCGv_i64 xbl;
1710 TCGv_i64 t0;
1711
1712 if (unlikely(!ctx->vsx_enabled)) {
1713 gen_exception(ctx, POWERPC_EXCP_VSXU);
1714 return;
1715 }
1716 xth = tcg_temp_new_i64();
1717 xtl = tcg_temp_new_i64();
1718 xah = tcg_temp_new_i64();
1719 xal = tcg_temp_new_i64();
1720 xbh = tcg_temp_new_i64();
1721 xbl = tcg_temp_new_i64();
1722 get_cpu_vsrh(xah, xA(ctx->opcode));
1723 get_cpu_vsrl(xal, xA(ctx->opcode));
1724 get_cpu_vsrh(xbh, xB(ctx->opcode));
1725 get_cpu_vsrl(xbl, xB(ctx->opcode));
1726 t0 = tcg_temp_new_i64();
1727
1728 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1729 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1730 tcg_gen_shli_i64(t0, t0, 23);
1731 tcg_gen_or_i64(xth, xth, t0);
1732 set_cpu_vsrh(xT(ctx->opcode), xth);
1733 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1734 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1735 tcg_gen_shli_i64(t0, t0, 23);
1736 tcg_gen_or_i64(xtl, xtl, t0);
1737 set_cpu_vsrl(xT(ctx->opcode), xtl);
1738
1739 tcg_temp_free_i64(t0);
1740 tcg_temp_free_i64(xth);
1741 tcg_temp_free_i64(xtl);
1742 tcg_temp_free_i64(xah);
1743 tcg_temp_free_i64(xal);
1744 tcg_temp_free_i64(xbh);
1745 tcg_temp_free_i64(xbl);
1746 }
1747
1748 static void gen_xviexpdp(DisasContext *ctx)
1749 {
1750 TCGv_i64 xth;
1751 TCGv_i64 xtl;
1752 TCGv_i64 xah;
1753 TCGv_i64 xal;
1754 TCGv_i64 xbh;
1755 TCGv_i64 xbl;
1756 TCGv_i64 t0;
1757
1758 if (unlikely(!ctx->vsx_enabled)) {
1759 gen_exception(ctx, POWERPC_EXCP_VSXU);
1760 return;
1761 }
1762 xth = tcg_temp_new_i64();
1763 xtl = tcg_temp_new_i64();
1764 xah = tcg_temp_new_i64();
1765 xal = tcg_temp_new_i64();
1766 xbh = tcg_temp_new_i64();
1767 xbl = tcg_temp_new_i64();
1768 get_cpu_vsrh(xah, xA(ctx->opcode));
1769 get_cpu_vsrl(xal, xA(ctx->opcode));
1770 get_cpu_vsrh(xbh, xB(ctx->opcode));
1771 get_cpu_vsrl(xbl, xB(ctx->opcode));
1772 t0 = tcg_temp_new_i64();
1773
1774 tcg_gen_andi_i64(xth, xah, 0x800FFFFFFFFFFFFF);
1775 tcg_gen_andi_i64(t0, xbh, 0x7FF);
1776 tcg_gen_shli_i64(t0, t0, 52);
1777 tcg_gen_or_i64(xth, xth, t0);
1778 set_cpu_vsrh(xT(ctx->opcode), xth);
1779 tcg_gen_andi_i64(xtl, xal, 0x800FFFFFFFFFFFFF);
1780 tcg_gen_andi_i64(t0, xbl, 0x7FF);
1781 tcg_gen_shli_i64(t0, t0, 52);
1782 tcg_gen_or_i64(xtl, xtl, t0);
1783 set_cpu_vsrl(xT(ctx->opcode), xtl);
1784
1785 tcg_temp_free_i64(t0);
1786 tcg_temp_free_i64(xth);
1787 tcg_temp_free_i64(xtl);
1788 tcg_temp_free_i64(xah);
1789 tcg_temp_free_i64(xal);
1790 tcg_temp_free_i64(xbh);
1791 tcg_temp_free_i64(xbl);
1792 }
1793
1794 static void gen_xvxexpsp(DisasContext *ctx)
1795 {
1796 TCGv_i64 xth;
1797 TCGv_i64 xtl;
1798 TCGv_i64 xbh;
1799 TCGv_i64 xbl;
1800
1801 if (unlikely(!ctx->vsx_enabled)) {
1802 gen_exception(ctx, POWERPC_EXCP_VSXU);
1803 return;
1804 }
1805 xth = tcg_temp_new_i64();
1806 xtl = tcg_temp_new_i64();
1807 xbh = tcg_temp_new_i64();
1808 xbl = tcg_temp_new_i64();
1809 get_cpu_vsrh(xbh, xB(ctx->opcode));
1810 get_cpu_vsrl(xbl, xB(ctx->opcode));
1811
1812 tcg_gen_shri_i64(xth, xbh, 23);
1813 tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1814 set_cpu_vsrh(xT(ctx->opcode), xth);
1815 tcg_gen_shri_i64(xtl, xbl, 23);
1816 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1817 set_cpu_vsrl(xT(ctx->opcode), xtl);
1818
1819 tcg_temp_free_i64(xth);
1820 tcg_temp_free_i64(xtl);
1821 tcg_temp_free_i64(xbh);
1822 tcg_temp_free_i64(xbl);
1823 }
1824
1825 static void gen_xvxexpdp(DisasContext *ctx)
1826 {
1827 TCGv_i64 xth;
1828 TCGv_i64 xtl;
1829 TCGv_i64 xbh;
1830 TCGv_i64 xbl;
1831
1832 if (unlikely(!ctx->vsx_enabled)) {
1833 gen_exception(ctx, POWERPC_EXCP_VSXU);
1834 return;
1835 }
1836 xth = tcg_temp_new_i64();
1837 xtl = tcg_temp_new_i64();
1838 xbh = tcg_temp_new_i64();
1839 xbl = tcg_temp_new_i64();
1840 get_cpu_vsrh(xbh, xB(ctx->opcode));
1841 get_cpu_vsrl(xbl, xB(ctx->opcode));
1842
1843 tcg_gen_extract_i64(xth, xbh, 52, 11);
1844 set_cpu_vsrh(xT(ctx->opcode), xth);
1845 tcg_gen_extract_i64(xtl, xbl, 52, 11);
1846 set_cpu_vsrl(xT(ctx->opcode), xtl);
1847
1848 tcg_temp_free_i64(xth);
1849 tcg_temp_free_i64(xtl);
1850 tcg_temp_free_i64(xbh);
1851 tcg_temp_free_i64(xbl);
1852 }
1853
1854 GEN_VSX_HELPER_2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
1855
1856 static void gen_xvxsigdp(DisasContext *ctx)
1857 {
1858 TCGv_i64 xth;
1859 TCGv_i64 xtl;
1860 TCGv_i64 xbh;
1861 TCGv_i64 xbl;
1862 TCGv_i64 t0, zr, nan, exp;
1863
1864 if (unlikely(!ctx->vsx_enabled)) {
1865 gen_exception(ctx, POWERPC_EXCP_VSXU);
1866 return;
1867 }
1868 xth = tcg_temp_new_i64();
1869 xtl = tcg_temp_new_i64();
1870 xbh = tcg_temp_new_i64();
1871 xbl = tcg_temp_new_i64();
1872 get_cpu_vsrh(xbh, xB(ctx->opcode));
1873 get_cpu_vsrl(xbl, xB(ctx->opcode));
1874 exp = tcg_temp_new_i64();
1875 t0 = tcg_temp_new_i64();
1876 zr = tcg_const_i64(0);
1877 nan = tcg_const_i64(2047);
1878
1879 tcg_gen_extract_i64(exp, xbh, 52, 11);
1880 tcg_gen_movi_i64(t0, 0x0010000000000000);
1881 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1882 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1883 tcg_gen_andi_i64(xth, xbh, 0x000FFFFFFFFFFFFF);
1884 tcg_gen_or_i64(xth, xth, t0);
1885 set_cpu_vsrh(xT(ctx->opcode), xth);
1886
1887 tcg_gen_extract_i64(exp, xbl, 52, 11);
1888 tcg_gen_movi_i64(t0, 0x0010000000000000);
1889 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1890 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1891 tcg_gen_andi_i64(xtl, xbl, 0x000FFFFFFFFFFFFF);
1892 tcg_gen_or_i64(xtl, xtl, t0);
1893 set_cpu_vsrl(xT(ctx->opcode), xtl);
1894
1895 tcg_temp_free_i64(t0);
1896 tcg_temp_free_i64(exp);
1897 tcg_temp_free_i64(zr);
1898 tcg_temp_free_i64(nan);
1899 tcg_temp_free_i64(xth);
1900 tcg_temp_free_i64(xtl);
1901 tcg_temp_free_i64(xbh);
1902 tcg_temp_free_i64(xbl);
1903 }
1904
1905 #undef GEN_XX2FORM
1906 #undef GEN_XX3FORM
1907 #undef GEN_XX2IFORM
1908 #undef GEN_XX3_RC_FORM
1909 #undef GEN_XX3FORM_DM
1910 #undef VSX_LOGICAL