]> git.proxmox.com Git - mirror_qemu.git/blob - target/ppc/translate/vsx-impl.inc.c
docs/devel/testing: Fix typo in dockerfile path
[mirror_qemu.git] / target / ppc / translate / vsx-impl.inc.c
1 /*** VSX extension ***/
2
3 static inline void get_cpu_vsrh(TCGv_i64 dst, int n)
4 {
5 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, true));
6 }
7
8 static inline void get_cpu_vsrl(TCGv_i64 dst, int n)
9 {
10 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, false));
11 }
12
13 static inline void set_cpu_vsrh(int n, TCGv_i64 src)
14 {
15 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, true));
16 }
17
18 static inline void set_cpu_vsrl(int n, TCGv_i64 src)
19 {
20 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, false));
21 }
22
23 #define VSX_LOAD_SCALAR(name, operation) \
24 static void gen_##name(DisasContext *ctx) \
25 { \
26 TCGv EA; \
27 TCGv_i64 t0; \
28 if (unlikely(!ctx->vsx_enabled)) { \
29 gen_exception(ctx, POWERPC_EXCP_VSXU); \
30 return; \
31 } \
32 t0 = tcg_temp_new_i64(); \
33 gen_set_access_type(ctx, ACCESS_INT); \
34 EA = tcg_temp_new(); \
35 gen_addr_reg_index(ctx, EA); \
36 gen_qemu_##operation(ctx, t0, EA); \
37 set_cpu_vsrh(xT(ctx->opcode), t0); \
38 /* NOTE: cpu_vsrl is undefined */ \
39 tcg_temp_free(EA); \
40 tcg_temp_free_i64(t0); \
41 }
42
43 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
44 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
45 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
46 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
47 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
48 VSX_LOAD_SCALAR(lxsspx, ld32fs)
49
50 static void gen_lxvd2x(DisasContext *ctx)
51 {
52 TCGv EA;
53 TCGv_i64 t0;
54 if (unlikely(!ctx->vsx_enabled)) {
55 gen_exception(ctx, POWERPC_EXCP_VSXU);
56 return;
57 }
58 t0 = tcg_temp_new_i64();
59 gen_set_access_type(ctx, ACCESS_INT);
60 EA = tcg_temp_new();
61 gen_addr_reg_index(ctx, EA);
62 gen_qemu_ld64_i64(ctx, t0, EA);
63 set_cpu_vsrh(xT(ctx->opcode), t0);
64 tcg_gen_addi_tl(EA, EA, 8);
65 gen_qemu_ld64_i64(ctx, t0, EA);
66 set_cpu_vsrl(xT(ctx->opcode), t0);
67 tcg_temp_free(EA);
68 tcg_temp_free_i64(t0);
69 }
70
71 static void gen_lxvdsx(DisasContext *ctx)
72 {
73 TCGv EA;
74 TCGv_i64 t0;
75 TCGv_i64 t1;
76 if (unlikely(!ctx->vsx_enabled)) {
77 gen_exception(ctx, POWERPC_EXCP_VSXU);
78 return;
79 }
80 t0 = tcg_temp_new_i64();
81 t1 = tcg_temp_new_i64();
82 gen_set_access_type(ctx, ACCESS_INT);
83 EA = tcg_temp_new();
84 gen_addr_reg_index(ctx, EA);
85 gen_qemu_ld64_i64(ctx, t0, EA);
86 set_cpu_vsrh(xT(ctx->opcode), t0);
87 tcg_gen_mov_i64(t1, t0);
88 set_cpu_vsrl(xT(ctx->opcode), t1);
89 tcg_temp_free(EA);
90 tcg_temp_free_i64(t0);
91 tcg_temp_free_i64(t1);
92 }
93
94 static void gen_lxvw4x(DisasContext *ctx)
95 {
96 TCGv EA;
97 TCGv_i64 xth;
98 TCGv_i64 xtl;
99 if (unlikely(!ctx->vsx_enabled)) {
100 gen_exception(ctx, POWERPC_EXCP_VSXU);
101 return;
102 }
103 xth = tcg_temp_new_i64();
104 xtl = tcg_temp_new_i64();
105
106 gen_set_access_type(ctx, ACCESS_INT);
107 EA = tcg_temp_new();
108
109 gen_addr_reg_index(ctx, EA);
110 if (ctx->le_mode) {
111 TCGv_i64 t0 = tcg_temp_new_i64();
112 TCGv_i64 t1 = tcg_temp_new_i64();
113
114 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
115 tcg_gen_shri_i64(t1, t0, 32);
116 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
117 tcg_gen_addi_tl(EA, EA, 8);
118 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
119 tcg_gen_shri_i64(t1, t0, 32);
120 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
121 tcg_temp_free_i64(t0);
122 tcg_temp_free_i64(t1);
123 } else {
124 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
125 tcg_gen_addi_tl(EA, EA, 8);
126 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
127 }
128 set_cpu_vsrh(xT(ctx->opcode), xth);
129 set_cpu_vsrl(xT(ctx->opcode), xtl);
130 tcg_temp_free(EA);
131 tcg_temp_free_i64(xth);
132 tcg_temp_free_i64(xtl);
133 }
134
135 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
136 TCGv_i64 inh, TCGv_i64 inl)
137 {
138 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
139 TCGv_i64 t0 = tcg_temp_new_i64();
140 TCGv_i64 t1 = tcg_temp_new_i64();
141
142 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
143 tcg_gen_and_i64(t0, inh, mask);
144 tcg_gen_shli_i64(t0, t0, 8);
145 tcg_gen_shri_i64(t1, inh, 8);
146 tcg_gen_and_i64(t1, t1, mask);
147 tcg_gen_or_i64(outh, t0, t1);
148
149 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
150 tcg_gen_and_i64(t0, inl, mask);
151 tcg_gen_shli_i64(t0, t0, 8);
152 tcg_gen_shri_i64(t1, inl, 8);
153 tcg_gen_and_i64(t1, t1, mask);
154 tcg_gen_or_i64(outl, t0, t1);
155
156 tcg_temp_free_i64(t0);
157 tcg_temp_free_i64(t1);
158 tcg_temp_free_i64(mask);
159 }
160
161 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
162 TCGv_i64 inh, TCGv_i64 inl)
163 {
164 TCGv_i64 hi = tcg_temp_new_i64();
165 TCGv_i64 lo = tcg_temp_new_i64();
166
167 tcg_gen_bswap64_i64(hi, inh);
168 tcg_gen_bswap64_i64(lo, inl);
169 tcg_gen_shri_i64(outh, hi, 32);
170 tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
171 tcg_gen_shri_i64(outl, lo, 32);
172 tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
173
174 tcg_temp_free_i64(hi);
175 tcg_temp_free_i64(lo);
176 }
177 static void gen_lxvh8x(DisasContext *ctx)
178 {
179 TCGv EA;
180 TCGv_i64 xth;
181 TCGv_i64 xtl;
182
183 if (unlikely(!ctx->vsx_enabled)) {
184 gen_exception(ctx, POWERPC_EXCP_VSXU);
185 return;
186 }
187 xth = tcg_temp_new_i64();
188 xtl = tcg_temp_new_i64();
189 gen_set_access_type(ctx, ACCESS_INT);
190
191 EA = tcg_temp_new();
192 gen_addr_reg_index(ctx, EA);
193 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
194 tcg_gen_addi_tl(EA, EA, 8);
195 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
196 if (ctx->le_mode) {
197 gen_bswap16x8(xth, xtl, xth, xtl);
198 }
199 set_cpu_vsrh(xT(ctx->opcode), xth);
200 set_cpu_vsrl(xT(ctx->opcode), xtl);
201 tcg_temp_free(EA);
202 tcg_temp_free_i64(xth);
203 tcg_temp_free_i64(xtl);
204 }
205
206 static void gen_lxvb16x(DisasContext *ctx)
207 {
208 TCGv EA;
209 TCGv_i64 xth;
210 TCGv_i64 xtl;
211
212 if (unlikely(!ctx->vsx_enabled)) {
213 gen_exception(ctx, POWERPC_EXCP_VSXU);
214 return;
215 }
216 xth = tcg_temp_new_i64();
217 xtl = tcg_temp_new_i64();
218 gen_set_access_type(ctx, ACCESS_INT);
219 EA = tcg_temp_new();
220 gen_addr_reg_index(ctx, EA);
221 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
222 tcg_gen_addi_tl(EA, EA, 8);
223 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
224 set_cpu_vsrh(xT(ctx->opcode), xth);
225 set_cpu_vsrl(xT(ctx->opcode), xtl);
226 tcg_temp_free(EA);
227 tcg_temp_free_i64(xth);
228 tcg_temp_free_i64(xtl);
229 }
230
231 #define VSX_VECTOR_LOAD(name, op, indexed) \
232 static void gen_##name(DisasContext *ctx) \
233 { \
234 int xt; \
235 TCGv EA; \
236 TCGv_i64 xth; \
237 TCGv_i64 xtl; \
238 \
239 if (indexed) { \
240 xt = xT(ctx->opcode); \
241 } else { \
242 xt = DQxT(ctx->opcode); \
243 } \
244 \
245 if (xt < 32) { \
246 if (unlikely(!ctx->vsx_enabled)) { \
247 gen_exception(ctx, POWERPC_EXCP_VSXU); \
248 return; \
249 } \
250 } else { \
251 if (unlikely(!ctx->altivec_enabled)) { \
252 gen_exception(ctx, POWERPC_EXCP_VPU); \
253 return; \
254 } \
255 } \
256 xth = tcg_temp_new_i64(); \
257 xtl = tcg_temp_new_i64(); \
258 gen_set_access_type(ctx, ACCESS_INT); \
259 EA = tcg_temp_new(); \
260 if (indexed) { \
261 gen_addr_reg_index(ctx, EA); \
262 } else { \
263 gen_addr_imm_index(ctx, EA, 0x0F); \
264 } \
265 if (ctx->le_mode) { \
266 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
267 set_cpu_vsrl(xt, xtl); \
268 tcg_gen_addi_tl(EA, EA, 8); \
269 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
270 set_cpu_vsrh(xt, xth); \
271 } else { \
272 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
273 set_cpu_vsrh(xt, xth); \
274 tcg_gen_addi_tl(EA, EA, 8); \
275 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
276 set_cpu_vsrl(xt, xtl); \
277 } \
278 tcg_temp_free(EA); \
279 tcg_temp_free_i64(xth); \
280 tcg_temp_free_i64(xtl); \
281 }
282
283 VSX_VECTOR_LOAD(lxv, ld_i64, 0)
284 VSX_VECTOR_LOAD(lxvx, ld_i64, 1)
285
286 #define VSX_VECTOR_STORE(name, op, indexed) \
287 static void gen_##name(DisasContext *ctx) \
288 { \
289 int xt; \
290 TCGv EA; \
291 TCGv_i64 xth; \
292 TCGv_i64 xtl; \
293 \
294 if (indexed) { \
295 xt = xT(ctx->opcode); \
296 } else { \
297 xt = DQxT(ctx->opcode); \
298 } \
299 \
300 if (xt < 32) { \
301 if (unlikely(!ctx->vsx_enabled)) { \
302 gen_exception(ctx, POWERPC_EXCP_VSXU); \
303 return; \
304 } \
305 } else { \
306 if (unlikely(!ctx->altivec_enabled)) { \
307 gen_exception(ctx, POWERPC_EXCP_VPU); \
308 return; \
309 } \
310 } \
311 xth = tcg_temp_new_i64(); \
312 xtl = tcg_temp_new_i64(); \
313 get_cpu_vsrh(xth, xt); \
314 get_cpu_vsrl(xtl, xt); \
315 gen_set_access_type(ctx, ACCESS_INT); \
316 EA = tcg_temp_new(); \
317 if (indexed) { \
318 gen_addr_reg_index(ctx, EA); \
319 } else { \
320 gen_addr_imm_index(ctx, EA, 0x0F); \
321 } \
322 if (ctx->le_mode) { \
323 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
324 tcg_gen_addi_tl(EA, EA, 8); \
325 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
326 } else { \
327 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
328 tcg_gen_addi_tl(EA, EA, 8); \
329 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
330 } \
331 tcg_temp_free(EA); \
332 tcg_temp_free_i64(xth); \
333 tcg_temp_free_i64(xtl); \
334 }
335
336 VSX_VECTOR_STORE(stxv, st_i64, 0)
337 VSX_VECTOR_STORE(stxvx, st_i64, 1)
338
339 #ifdef TARGET_PPC64
340 #define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
341 static void gen_##name(DisasContext *ctx) \
342 { \
343 TCGv EA, xt; \
344 \
345 if (xT(ctx->opcode) < 32) { \
346 if (unlikely(!ctx->vsx_enabled)) { \
347 gen_exception(ctx, POWERPC_EXCP_VSXU); \
348 return; \
349 } \
350 } else { \
351 if (unlikely(!ctx->altivec_enabled)) { \
352 gen_exception(ctx, POWERPC_EXCP_VPU); \
353 return; \
354 } \
355 } \
356 EA = tcg_temp_new(); \
357 xt = tcg_const_tl(xT(ctx->opcode)); \
358 gen_set_access_type(ctx, ACCESS_INT); \
359 gen_addr_register(ctx, EA); \
360 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
361 tcg_temp_free(EA); \
362 tcg_temp_free(xt); \
363 }
364
365 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
366 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
367 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
368 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
369 #endif
370
371 #define VSX_LOAD_SCALAR_DS(name, operation) \
372 static void gen_##name(DisasContext *ctx) \
373 { \
374 TCGv EA; \
375 TCGv_i64 xth; \
376 \
377 if (unlikely(!ctx->altivec_enabled)) { \
378 gen_exception(ctx, POWERPC_EXCP_VPU); \
379 return; \
380 } \
381 xth = tcg_temp_new_i64(); \
382 gen_set_access_type(ctx, ACCESS_INT); \
383 EA = tcg_temp_new(); \
384 gen_addr_imm_index(ctx, EA, 0x03); \
385 gen_qemu_##operation(ctx, xth, EA); \
386 set_cpu_vsrh(rD(ctx->opcode) + 32, xth); \
387 /* NOTE: cpu_vsrl is undefined */ \
388 tcg_temp_free(EA); \
389 tcg_temp_free_i64(xth); \
390 }
391
392 VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
393 VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
394
395 #define VSX_STORE_SCALAR(name, operation) \
396 static void gen_##name(DisasContext *ctx) \
397 { \
398 TCGv EA; \
399 TCGv_i64 t0; \
400 if (unlikely(!ctx->vsx_enabled)) { \
401 gen_exception(ctx, POWERPC_EXCP_VSXU); \
402 return; \
403 } \
404 t0 = tcg_temp_new_i64(); \
405 gen_set_access_type(ctx, ACCESS_INT); \
406 EA = tcg_temp_new(); \
407 gen_addr_reg_index(ctx, EA); \
408 get_cpu_vsrh(t0, xS(ctx->opcode)); \
409 gen_qemu_##operation(ctx, t0, EA); \
410 tcg_temp_free(EA); \
411 tcg_temp_free_i64(t0); \
412 }
413
414 VSX_STORE_SCALAR(stxsdx, st64_i64)
415
416 VSX_STORE_SCALAR(stxsibx, st8_i64)
417 VSX_STORE_SCALAR(stxsihx, st16_i64)
418 VSX_STORE_SCALAR(stxsiwx, st32_i64)
419 VSX_STORE_SCALAR(stxsspx, st32fs)
420
421 static void gen_stxvd2x(DisasContext *ctx)
422 {
423 TCGv EA;
424 TCGv_i64 t0;
425 if (unlikely(!ctx->vsx_enabled)) {
426 gen_exception(ctx, POWERPC_EXCP_VSXU);
427 return;
428 }
429 t0 = tcg_temp_new_i64();
430 gen_set_access_type(ctx, ACCESS_INT);
431 EA = tcg_temp_new();
432 gen_addr_reg_index(ctx, EA);
433 get_cpu_vsrh(t0, xS(ctx->opcode));
434 gen_qemu_st64_i64(ctx, t0, EA);
435 tcg_gen_addi_tl(EA, EA, 8);
436 get_cpu_vsrl(t0, xS(ctx->opcode));
437 gen_qemu_st64_i64(ctx, t0, EA);
438 tcg_temp_free(EA);
439 tcg_temp_free_i64(t0);
440 }
441
442 static void gen_stxvw4x(DisasContext *ctx)
443 {
444 TCGv EA;
445 TCGv_i64 xsh;
446 TCGv_i64 xsl;
447
448 if (unlikely(!ctx->vsx_enabled)) {
449 gen_exception(ctx, POWERPC_EXCP_VSXU);
450 return;
451 }
452 xsh = tcg_temp_new_i64();
453 xsl = tcg_temp_new_i64();
454 get_cpu_vsrh(xsh, xS(ctx->opcode));
455 get_cpu_vsrl(xsl, xS(ctx->opcode));
456 gen_set_access_type(ctx, ACCESS_INT);
457 EA = tcg_temp_new();
458 gen_addr_reg_index(ctx, EA);
459 if (ctx->le_mode) {
460 TCGv_i64 t0 = tcg_temp_new_i64();
461 TCGv_i64 t1 = tcg_temp_new_i64();
462
463 tcg_gen_shri_i64(t0, xsh, 32);
464 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
465 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
466 tcg_gen_addi_tl(EA, EA, 8);
467 tcg_gen_shri_i64(t0, xsl, 32);
468 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
469 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
470 tcg_temp_free_i64(t0);
471 tcg_temp_free_i64(t1);
472 } else {
473 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
474 tcg_gen_addi_tl(EA, EA, 8);
475 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
476 }
477 tcg_temp_free(EA);
478 tcg_temp_free_i64(xsh);
479 tcg_temp_free_i64(xsl);
480 }
481
482 static void gen_stxvh8x(DisasContext *ctx)
483 {
484 TCGv EA;
485 TCGv_i64 xsh;
486 TCGv_i64 xsl;
487
488 if (unlikely(!ctx->vsx_enabled)) {
489 gen_exception(ctx, POWERPC_EXCP_VSXU);
490 return;
491 }
492 xsh = tcg_temp_new_i64();
493 xsl = tcg_temp_new_i64();
494 get_cpu_vsrh(xsh, xS(ctx->opcode));
495 get_cpu_vsrl(xsl, xS(ctx->opcode));
496 gen_set_access_type(ctx, ACCESS_INT);
497 EA = tcg_temp_new();
498 gen_addr_reg_index(ctx, EA);
499 if (ctx->le_mode) {
500 TCGv_i64 outh = tcg_temp_new_i64();
501 TCGv_i64 outl = tcg_temp_new_i64();
502
503 gen_bswap16x8(outh, outl, xsh, xsl);
504 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
505 tcg_gen_addi_tl(EA, EA, 8);
506 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
507 tcg_temp_free_i64(outh);
508 tcg_temp_free_i64(outl);
509 } else {
510 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
511 tcg_gen_addi_tl(EA, EA, 8);
512 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
513 }
514 tcg_temp_free(EA);
515 tcg_temp_free_i64(xsh);
516 tcg_temp_free_i64(xsl);
517 }
518
519 static void gen_stxvb16x(DisasContext *ctx)
520 {
521 TCGv EA;
522 TCGv_i64 xsh;
523 TCGv_i64 xsl;
524
525 if (unlikely(!ctx->vsx_enabled)) {
526 gen_exception(ctx, POWERPC_EXCP_VSXU);
527 return;
528 }
529 xsh = tcg_temp_new_i64();
530 xsl = tcg_temp_new_i64();
531 get_cpu_vsrh(xsh, xS(ctx->opcode));
532 get_cpu_vsrl(xsl, xS(ctx->opcode));
533 gen_set_access_type(ctx, ACCESS_INT);
534 EA = tcg_temp_new();
535 gen_addr_reg_index(ctx, EA);
536 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
537 tcg_gen_addi_tl(EA, EA, 8);
538 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
539 tcg_temp_free(EA);
540 tcg_temp_free_i64(xsh);
541 tcg_temp_free_i64(xsl);
542 }
543
544 #define VSX_STORE_SCALAR_DS(name, operation) \
545 static void gen_##name(DisasContext *ctx) \
546 { \
547 TCGv EA; \
548 TCGv_i64 xth; \
549 \
550 if (unlikely(!ctx->altivec_enabled)) { \
551 gen_exception(ctx, POWERPC_EXCP_VPU); \
552 return; \
553 } \
554 xth = tcg_temp_new_i64(); \
555 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
556 gen_set_access_type(ctx, ACCESS_INT); \
557 EA = tcg_temp_new(); \
558 gen_addr_imm_index(ctx, EA, 0x03); \
559 gen_qemu_##operation(ctx, xth, EA); \
560 /* NOTE: cpu_vsrl is undefined */ \
561 tcg_temp_free(EA); \
562 tcg_temp_free_i64(xth); \
563 }
564
565 VSX_STORE_SCALAR_DS(stxsd, st64_i64)
566 VSX_STORE_SCALAR_DS(stxssp, st32fs)
567
568 static void gen_mfvsrwz(DisasContext *ctx)
569 {
570 if (xS(ctx->opcode) < 32) {
571 if (unlikely(!ctx->fpu_enabled)) {
572 gen_exception(ctx, POWERPC_EXCP_FPU);
573 return;
574 }
575 } else {
576 if (unlikely(!ctx->altivec_enabled)) {
577 gen_exception(ctx, POWERPC_EXCP_VPU);
578 return;
579 }
580 }
581 TCGv_i64 tmp = tcg_temp_new_i64();
582 TCGv_i64 xsh = tcg_temp_new_i64();
583 get_cpu_vsrh(xsh, xS(ctx->opcode));
584 tcg_gen_ext32u_i64(tmp, xsh);
585 tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
586 tcg_temp_free_i64(tmp);
587 tcg_temp_free_i64(xsh);
588 }
589
590 static void gen_mtvsrwa(DisasContext *ctx)
591 {
592 if (xS(ctx->opcode) < 32) {
593 if (unlikely(!ctx->fpu_enabled)) {
594 gen_exception(ctx, POWERPC_EXCP_FPU);
595 return;
596 }
597 } else {
598 if (unlikely(!ctx->altivec_enabled)) {
599 gen_exception(ctx, POWERPC_EXCP_VPU);
600 return;
601 }
602 }
603 TCGv_i64 tmp = tcg_temp_new_i64();
604 TCGv_i64 xsh = tcg_temp_new_i64();
605 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
606 tcg_gen_ext32s_i64(xsh, tmp);
607 set_cpu_vsrh(xT(ctx->opcode), xsh);
608 tcg_temp_free_i64(tmp);
609 tcg_temp_free_i64(xsh);
610 }
611
612 static void gen_mtvsrwz(DisasContext *ctx)
613 {
614 if (xS(ctx->opcode) < 32) {
615 if (unlikely(!ctx->fpu_enabled)) {
616 gen_exception(ctx, POWERPC_EXCP_FPU);
617 return;
618 }
619 } else {
620 if (unlikely(!ctx->altivec_enabled)) {
621 gen_exception(ctx, POWERPC_EXCP_VPU);
622 return;
623 }
624 }
625 TCGv_i64 tmp = tcg_temp_new_i64();
626 TCGv_i64 xsh = tcg_temp_new_i64();
627 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
628 tcg_gen_ext32u_i64(xsh, tmp);
629 set_cpu_vsrh(xT(ctx->opcode), xsh);
630 tcg_temp_free_i64(tmp);
631 tcg_temp_free_i64(xsh);
632 }
633
634 #if defined(TARGET_PPC64)
635 static void gen_mfvsrd(DisasContext *ctx)
636 {
637 TCGv_i64 t0;
638 if (xS(ctx->opcode) < 32) {
639 if (unlikely(!ctx->fpu_enabled)) {
640 gen_exception(ctx, POWERPC_EXCP_FPU);
641 return;
642 }
643 } else {
644 if (unlikely(!ctx->altivec_enabled)) {
645 gen_exception(ctx, POWERPC_EXCP_VPU);
646 return;
647 }
648 }
649 t0 = tcg_temp_new_i64();
650 get_cpu_vsrh(t0, xS(ctx->opcode));
651 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
652 tcg_temp_free_i64(t0);
653 }
654
655 static void gen_mtvsrd(DisasContext *ctx)
656 {
657 TCGv_i64 t0;
658 if (xS(ctx->opcode) < 32) {
659 if (unlikely(!ctx->fpu_enabled)) {
660 gen_exception(ctx, POWERPC_EXCP_FPU);
661 return;
662 }
663 } else {
664 if (unlikely(!ctx->altivec_enabled)) {
665 gen_exception(ctx, POWERPC_EXCP_VPU);
666 return;
667 }
668 }
669 t0 = tcg_temp_new_i64();
670 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
671 set_cpu_vsrh(xT(ctx->opcode), t0);
672 tcg_temp_free_i64(t0);
673 }
674
675 static void gen_mfvsrld(DisasContext *ctx)
676 {
677 TCGv_i64 t0;
678 if (xS(ctx->opcode) < 32) {
679 if (unlikely(!ctx->vsx_enabled)) {
680 gen_exception(ctx, POWERPC_EXCP_VSXU);
681 return;
682 }
683 } else {
684 if (unlikely(!ctx->altivec_enabled)) {
685 gen_exception(ctx, POWERPC_EXCP_VPU);
686 return;
687 }
688 }
689 t0 = tcg_temp_new_i64();
690 get_cpu_vsrl(t0, xS(ctx->opcode));
691 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
692 tcg_temp_free_i64(t0);
693 }
694
695 static void gen_mtvsrdd(DisasContext *ctx)
696 {
697 TCGv_i64 t0;
698 if (xT(ctx->opcode) < 32) {
699 if (unlikely(!ctx->vsx_enabled)) {
700 gen_exception(ctx, POWERPC_EXCP_VSXU);
701 return;
702 }
703 } else {
704 if (unlikely(!ctx->altivec_enabled)) {
705 gen_exception(ctx, POWERPC_EXCP_VPU);
706 return;
707 }
708 }
709
710 t0 = tcg_temp_new_i64();
711 if (!rA(ctx->opcode)) {
712 tcg_gen_movi_i64(t0, 0);
713 } else {
714 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
715 }
716 set_cpu_vsrh(xT(ctx->opcode), t0);
717
718 tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
719 set_cpu_vsrl(xT(ctx->opcode), t0);
720 tcg_temp_free_i64(t0);
721 }
722
723 static void gen_mtvsrws(DisasContext *ctx)
724 {
725 TCGv_i64 t0;
726 if (xT(ctx->opcode) < 32) {
727 if (unlikely(!ctx->vsx_enabled)) {
728 gen_exception(ctx, POWERPC_EXCP_VSXU);
729 return;
730 }
731 } else {
732 if (unlikely(!ctx->altivec_enabled)) {
733 gen_exception(ctx, POWERPC_EXCP_VPU);
734 return;
735 }
736 }
737
738 t0 = tcg_temp_new_i64();
739 tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
740 cpu_gpr[rA(ctx->opcode)], 32, 32);
741 set_cpu_vsrl(xT(ctx->opcode), t0);
742 set_cpu_vsrh(xT(ctx->opcode), t0);
743 tcg_temp_free_i64(t0);
744 }
745
746 #endif
747
748 static void gen_xxpermdi(DisasContext *ctx)
749 {
750 TCGv_i64 xh, xl;
751
752 if (unlikely(!ctx->vsx_enabled)) {
753 gen_exception(ctx, POWERPC_EXCP_VSXU);
754 return;
755 }
756
757 xh = tcg_temp_new_i64();
758 xl = tcg_temp_new_i64();
759
760 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
761 (xT(ctx->opcode) == xB(ctx->opcode)))) {
762 if ((DM(ctx->opcode) & 2) == 0) {
763 get_cpu_vsrh(xh, xA(ctx->opcode));
764 } else {
765 get_cpu_vsrl(xh, xA(ctx->opcode));
766 }
767 if ((DM(ctx->opcode) & 1) == 0) {
768 get_cpu_vsrh(xl, xB(ctx->opcode));
769 } else {
770 get_cpu_vsrl(xl, xB(ctx->opcode));
771 }
772
773 set_cpu_vsrh(xT(ctx->opcode), xh);
774 set_cpu_vsrl(xT(ctx->opcode), xl);
775 } else {
776 if ((DM(ctx->opcode) & 2) == 0) {
777 get_cpu_vsrh(xh, xA(ctx->opcode));
778 set_cpu_vsrh(xT(ctx->opcode), xh);
779 } else {
780 get_cpu_vsrl(xh, xA(ctx->opcode));
781 set_cpu_vsrh(xT(ctx->opcode), xh);
782 }
783 if ((DM(ctx->opcode) & 1) == 0) {
784 get_cpu_vsrh(xl, xB(ctx->opcode));
785 set_cpu_vsrl(xT(ctx->opcode), xl);
786 } else {
787 get_cpu_vsrl(xl, xB(ctx->opcode));
788 set_cpu_vsrl(xT(ctx->opcode), xl);
789 }
790 }
791 tcg_temp_free_i64(xh);
792 tcg_temp_free_i64(xl);
793 }
794
795 #define OP_ABS 1
796 #define OP_NABS 2
797 #define OP_NEG 3
798 #define OP_CPSGN 4
799 #define SGN_MASK_DP 0x8000000000000000ull
800 #define SGN_MASK_SP 0x8000000080000000ull
801
802 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
803 static void glue(gen_, name)(DisasContext *ctx) \
804 { \
805 TCGv_i64 xb, sgm; \
806 if (unlikely(!ctx->vsx_enabled)) { \
807 gen_exception(ctx, POWERPC_EXCP_VSXU); \
808 return; \
809 } \
810 xb = tcg_temp_new_i64(); \
811 sgm = tcg_temp_new_i64(); \
812 get_cpu_vsrh(xb, xB(ctx->opcode)); \
813 tcg_gen_movi_i64(sgm, sgn_mask); \
814 switch (op) { \
815 case OP_ABS: { \
816 tcg_gen_andc_i64(xb, xb, sgm); \
817 break; \
818 } \
819 case OP_NABS: { \
820 tcg_gen_or_i64(xb, xb, sgm); \
821 break; \
822 } \
823 case OP_NEG: { \
824 tcg_gen_xor_i64(xb, xb, sgm); \
825 break; \
826 } \
827 case OP_CPSGN: { \
828 TCGv_i64 xa = tcg_temp_new_i64(); \
829 get_cpu_vsrh(xa, xA(ctx->opcode)); \
830 tcg_gen_and_i64(xa, xa, sgm); \
831 tcg_gen_andc_i64(xb, xb, sgm); \
832 tcg_gen_or_i64(xb, xb, xa); \
833 tcg_temp_free_i64(xa); \
834 break; \
835 } \
836 } \
837 set_cpu_vsrh(xT(ctx->opcode), xb); \
838 tcg_temp_free_i64(xb); \
839 tcg_temp_free_i64(sgm); \
840 }
841
842 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
843 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
844 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
845 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
846
847 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
848 static void glue(gen_, name)(DisasContext *ctx) \
849 { \
850 int xa; \
851 int xt = rD(ctx->opcode) + 32; \
852 int xb = rB(ctx->opcode) + 32; \
853 TCGv_i64 xah, xbh, xbl, sgm, tmp; \
854 \
855 if (unlikely(!ctx->vsx_enabled)) { \
856 gen_exception(ctx, POWERPC_EXCP_VSXU); \
857 return; \
858 } \
859 xbh = tcg_temp_new_i64(); \
860 xbl = tcg_temp_new_i64(); \
861 sgm = tcg_temp_new_i64(); \
862 tmp = tcg_temp_new_i64(); \
863 get_cpu_vsrh(xbh, xb); \
864 get_cpu_vsrl(xbl, xb); \
865 tcg_gen_movi_i64(sgm, sgn_mask); \
866 switch (op) { \
867 case OP_ABS: \
868 tcg_gen_andc_i64(xbh, xbh, sgm); \
869 break; \
870 case OP_NABS: \
871 tcg_gen_or_i64(xbh, xbh, sgm); \
872 break; \
873 case OP_NEG: \
874 tcg_gen_xor_i64(xbh, xbh, sgm); \
875 break; \
876 case OP_CPSGN: \
877 xah = tcg_temp_new_i64(); \
878 xa = rA(ctx->opcode) + 32; \
879 get_cpu_vsrh(tmp, xa); \
880 tcg_gen_and_i64(xah, tmp, sgm); \
881 tcg_gen_andc_i64(xbh, xbh, sgm); \
882 tcg_gen_or_i64(xbh, xbh, xah); \
883 tcg_temp_free_i64(xah); \
884 break; \
885 } \
886 set_cpu_vsrh(xt, xbh); \
887 set_cpu_vsrl(xt, xbl); \
888 tcg_temp_free_i64(xbl); \
889 tcg_temp_free_i64(xbh); \
890 tcg_temp_free_i64(sgm); \
891 tcg_temp_free_i64(tmp); \
892 }
893
894 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
895 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
896 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
897 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
898
899 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
900 static void glue(gen_, name)(DisasContext *ctx) \
901 { \
902 TCGv_i64 xbh, xbl, sgm; \
903 if (unlikely(!ctx->vsx_enabled)) { \
904 gen_exception(ctx, POWERPC_EXCP_VSXU); \
905 return; \
906 } \
907 xbh = tcg_temp_new_i64(); \
908 xbl = tcg_temp_new_i64(); \
909 sgm = tcg_temp_new_i64(); \
910 get_cpu_vsrh(xbh, xB(ctx->opcode)); \
911 get_cpu_vsrl(xbl, xB(ctx->opcode)); \
912 tcg_gen_movi_i64(sgm, sgn_mask); \
913 switch (op) { \
914 case OP_ABS: { \
915 tcg_gen_andc_i64(xbh, xbh, sgm); \
916 tcg_gen_andc_i64(xbl, xbl, sgm); \
917 break; \
918 } \
919 case OP_NABS: { \
920 tcg_gen_or_i64(xbh, xbh, sgm); \
921 tcg_gen_or_i64(xbl, xbl, sgm); \
922 break; \
923 } \
924 case OP_NEG: { \
925 tcg_gen_xor_i64(xbh, xbh, sgm); \
926 tcg_gen_xor_i64(xbl, xbl, sgm); \
927 break; \
928 } \
929 case OP_CPSGN: { \
930 TCGv_i64 xah = tcg_temp_new_i64(); \
931 TCGv_i64 xal = tcg_temp_new_i64(); \
932 get_cpu_vsrh(xah, xA(ctx->opcode)); \
933 get_cpu_vsrl(xal, xA(ctx->opcode)); \
934 tcg_gen_and_i64(xah, xah, sgm); \
935 tcg_gen_and_i64(xal, xal, sgm); \
936 tcg_gen_andc_i64(xbh, xbh, sgm); \
937 tcg_gen_andc_i64(xbl, xbl, sgm); \
938 tcg_gen_or_i64(xbh, xbh, xah); \
939 tcg_gen_or_i64(xbl, xbl, xal); \
940 tcg_temp_free_i64(xah); \
941 tcg_temp_free_i64(xal); \
942 break; \
943 } \
944 } \
945 set_cpu_vsrh(xT(ctx->opcode), xbh); \
946 set_cpu_vsrl(xT(ctx->opcode), xbl); \
947 tcg_temp_free_i64(xbh); \
948 tcg_temp_free_i64(xbl); \
949 tcg_temp_free_i64(sgm); \
950 }
951
952 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
953 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
954 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
955 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
956 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
957 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
958 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
959 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
960
961 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
962 static void gen_##name(DisasContext *ctx) \
963 { \
964 TCGv_i32 opc; \
965 if (unlikely(!ctx->vsx_enabled)) { \
966 gen_exception(ctx, POWERPC_EXCP_VSXU); \
967 return; \
968 } \
969 opc = tcg_const_i32(ctx->opcode); \
970 gen_helper_##name(cpu_env, opc); \
971 tcg_temp_free_i32(opc); \
972 }
973
974 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
975 static void gen_##name(DisasContext *ctx) \
976 { \
977 TCGv_i64 t0; \
978 TCGv_i64 t1; \
979 if (unlikely(!ctx->vsx_enabled)) { \
980 gen_exception(ctx, POWERPC_EXCP_VSXU); \
981 return; \
982 } \
983 t0 = tcg_temp_new_i64(); \
984 t1 = tcg_temp_new_i64(); \
985 get_cpu_vsrh(t0, xB(ctx->opcode)); \
986 gen_helper_##name(t1, cpu_env, t0); \
987 set_cpu_vsrh(xT(ctx->opcode), t1); \
988 tcg_temp_free_i64(t0); \
989 tcg_temp_free_i64(t1); \
990 }
991
992 GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
993 GEN_VSX_HELPER_2(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
994 GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
995 GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
996 GEN_VSX_HELPER_2(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
997 GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
998 GEN_VSX_HELPER_2(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
999 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1000 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1001 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1002 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1003 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1004 GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
1005 GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
1006 GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
1007 GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
1008 GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
1009 GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
1010 GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
1011 GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
1012 GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
1013 GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
1014 GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
1015 GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
1016 GEN_VSX_HELPER_2(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1017 GEN_VSX_HELPER_2(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1018 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1019 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1020 GEN_VSX_HELPER_2(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1021 GEN_VSX_HELPER_2(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1022 GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1023 GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1024 GEN_VSX_HELPER_2(xsmaxcdp, 0x00, 0x10, 0, PPC2_ISA300)
1025 GEN_VSX_HELPER_2(xsmincdp, 0x00, 0x11, 0, PPC2_ISA300)
1026 GEN_VSX_HELPER_2(xsmaxjdp, 0x00, 0x12, 0, PPC2_ISA300)
1027 GEN_VSX_HELPER_2(xsminjdp, 0x00, 0x12, 0, PPC2_ISA300)
1028 GEN_VSX_HELPER_2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1029 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1030 GEN_VSX_HELPER_2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1031 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1032 GEN_VSX_HELPER_2(xscvqpdp, 0x04, 0x1A, 0x14, PPC2_ISA300)
1033 GEN_VSX_HELPER_2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1034 GEN_VSX_HELPER_2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1035 GEN_VSX_HELPER_2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1036 GEN_VSX_HELPER_2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1037 GEN_VSX_HELPER_2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1038 GEN_VSX_HELPER_2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1039 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1040 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
1041 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1042 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1043 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1044 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1045 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1046 GEN_VSX_HELPER_2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1047 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1048 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1049 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1050 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1051 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1052 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1053 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1054
1055 GEN_VSX_HELPER_2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1056 GEN_VSX_HELPER_2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1057 GEN_VSX_HELPER_2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1058 GEN_VSX_HELPER_2(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1059
1060 GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1061 GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1062 GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1063 GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1064 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1065 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1066 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1067 GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
1068 GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
1069 GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
1070 GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
1071 GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
1072 GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
1073 GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
1074 GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
1075 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1076 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1077 GEN_VSX_HELPER_2(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1078 GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1079 GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1080
1081 GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1082 GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1083 GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1084 GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1085 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1086 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1087 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1088 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1089 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1090 GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
1091 GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
1092 GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
1093 GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
1094 GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
1095 GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
1096 GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
1097 GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
1098 GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1099 GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1100 GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
1101 GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
1102 GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
1103 GEN_VSX_HELPER_2(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
1104 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1105 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1106 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1107 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1108 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1109 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1110 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1111 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1112 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1113 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1114 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1115 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1116 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1117 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1118
1119 GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1120 GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1121 GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1122 GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1123 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1124 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1125 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1126 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1127 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1128 GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
1129 GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
1130 GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
1131 GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
1132 GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
1133 GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
1134 GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
1135 GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
1136 GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1137 GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1138 GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
1139 GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
1140 GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
1141 GEN_VSX_HELPER_2(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
1142 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1143 GEN_VSX_HELPER_2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1144 GEN_VSX_HELPER_2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1145 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1146 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1147 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1148 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1149 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1150 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1151 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1152 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1153 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1154 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1155 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1156 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1157 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1158 GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1159 GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1160 GEN_VSX_HELPER_2(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
1161 GEN_VSX_HELPER_2(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
1162
1163 static void gen_xxbrd(DisasContext *ctx)
1164 {
1165 TCGv_i64 xth;
1166 TCGv_i64 xtl;
1167 TCGv_i64 xbh;
1168 TCGv_i64 xbl;
1169
1170 if (unlikely(!ctx->vsx_enabled)) {
1171 gen_exception(ctx, POWERPC_EXCP_VSXU);
1172 return;
1173 }
1174 xth = tcg_temp_new_i64();
1175 xtl = tcg_temp_new_i64();
1176 xbh = tcg_temp_new_i64();
1177 xbl = tcg_temp_new_i64();
1178 get_cpu_vsrh(xbh, xB(ctx->opcode));
1179 get_cpu_vsrl(xbl, xB(ctx->opcode));
1180
1181 tcg_gen_bswap64_i64(xth, xbh);
1182 tcg_gen_bswap64_i64(xtl, xbl);
1183 set_cpu_vsrh(xT(ctx->opcode), xth);
1184 set_cpu_vsrl(xT(ctx->opcode), xtl);
1185
1186 tcg_temp_free_i64(xth);
1187 tcg_temp_free_i64(xtl);
1188 tcg_temp_free_i64(xbh);
1189 tcg_temp_free_i64(xbl);
1190 }
1191
1192 static void gen_xxbrh(DisasContext *ctx)
1193 {
1194 TCGv_i64 xth;
1195 TCGv_i64 xtl;
1196 TCGv_i64 xbh;
1197 TCGv_i64 xbl;
1198
1199 if (unlikely(!ctx->vsx_enabled)) {
1200 gen_exception(ctx, POWERPC_EXCP_VSXU);
1201 return;
1202 }
1203 xth = tcg_temp_new_i64();
1204 xtl = tcg_temp_new_i64();
1205 xbh = tcg_temp_new_i64();
1206 xbl = tcg_temp_new_i64();
1207 get_cpu_vsrh(xbh, xB(ctx->opcode));
1208 get_cpu_vsrl(xbl, xB(ctx->opcode));
1209
1210 gen_bswap16x8(xth, xtl, xbh, xbl);
1211 set_cpu_vsrh(xT(ctx->opcode), xth);
1212 set_cpu_vsrl(xT(ctx->opcode), xtl);
1213
1214 tcg_temp_free_i64(xth);
1215 tcg_temp_free_i64(xtl);
1216 tcg_temp_free_i64(xbh);
1217 tcg_temp_free_i64(xbl);
1218 }
1219
1220 static void gen_xxbrq(DisasContext *ctx)
1221 {
1222 TCGv_i64 xth;
1223 TCGv_i64 xtl;
1224 TCGv_i64 xbh;
1225 TCGv_i64 xbl;
1226 TCGv_i64 t0;
1227
1228 if (unlikely(!ctx->vsx_enabled)) {
1229 gen_exception(ctx, POWERPC_EXCP_VSXU);
1230 return;
1231 }
1232 xth = tcg_temp_new_i64();
1233 xtl = tcg_temp_new_i64();
1234 xbh = tcg_temp_new_i64();
1235 xbl = tcg_temp_new_i64();
1236 get_cpu_vsrh(xbh, xB(ctx->opcode));
1237 get_cpu_vsrl(xbl, xB(ctx->opcode));
1238 t0 = tcg_temp_new_i64();
1239
1240 tcg_gen_bswap64_i64(t0, xbl);
1241 tcg_gen_bswap64_i64(xtl, xbh);
1242 set_cpu_vsrl(xT(ctx->opcode), xtl);
1243 tcg_gen_mov_i64(xth, t0);
1244 set_cpu_vsrh(xT(ctx->opcode), xth);
1245
1246 tcg_temp_free_i64(t0);
1247 tcg_temp_free_i64(xth);
1248 tcg_temp_free_i64(xtl);
1249 tcg_temp_free_i64(xbh);
1250 tcg_temp_free_i64(xbl);
1251 }
1252
1253 static void gen_xxbrw(DisasContext *ctx)
1254 {
1255 TCGv_i64 xth;
1256 TCGv_i64 xtl;
1257 TCGv_i64 xbh;
1258 TCGv_i64 xbl;
1259
1260 if (unlikely(!ctx->vsx_enabled)) {
1261 gen_exception(ctx, POWERPC_EXCP_VSXU);
1262 return;
1263 }
1264 xth = tcg_temp_new_i64();
1265 xtl = tcg_temp_new_i64();
1266 xbh = tcg_temp_new_i64();
1267 xbl = tcg_temp_new_i64();
1268 get_cpu_vsrh(xbh, xB(ctx->opcode));
1269 get_cpu_vsrl(xbl, xB(ctx->opcode));
1270
1271 gen_bswap32x4(xth, xtl, xbh, xbl);
1272 set_cpu_vsrh(xT(ctx->opcode), xth);
1273 set_cpu_vsrl(xT(ctx->opcode), xtl);
1274
1275 tcg_temp_free_i64(xth);
1276 tcg_temp_free_i64(xtl);
1277 tcg_temp_free_i64(xbh);
1278 tcg_temp_free_i64(xbl);
1279 }
1280
1281 #define VSX_LOGICAL(name, vece, tcg_op) \
1282 static void glue(gen_, name)(DisasContext *ctx) \
1283 { \
1284 if (unlikely(!ctx->vsx_enabled)) { \
1285 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1286 return; \
1287 } \
1288 tcg_op(vece, vsr_full_offset(xT(ctx->opcode)), \
1289 vsr_full_offset(xA(ctx->opcode)), \
1290 vsr_full_offset(xB(ctx->opcode)), 16, 16); \
1291 }
1292
1293 VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1294 VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1295 VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1296 VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1297 VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1298 VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1299 VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1300 VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1301
1302 #define VSX_XXMRG(name, high) \
1303 static void glue(gen_, name)(DisasContext *ctx) \
1304 { \
1305 TCGv_i64 a0, a1, b0, b1, tmp; \
1306 if (unlikely(!ctx->vsx_enabled)) { \
1307 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1308 return; \
1309 } \
1310 a0 = tcg_temp_new_i64(); \
1311 a1 = tcg_temp_new_i64(); \
1312 b0 = tcg_temp_new_i64(); \
1313 b1 = tcg_temp_new_i64(); \
1314 tmp = tcg_temp_new_i64(); \
1315 if (high) { \
1316 get_cpu_vsrh(a0, xA(ctx->opcode)); \
1317 get_cpu_vsrh(a1, xA(ctx->opcode)); \
1318 get_cpu_vsrh(b0, xB(ctx->opcode)); \
1319 get_cpu_vsrh(b1, xB(ctx->opcode)); \
1320 } else { \
1321 get_cpu_vsrl(a0, xA(ctx->opcode)); \
1322 get_cpu_vsrl(a1, xA(ctx->opcode)); \
1323 get_cpu_vsrl(b0, xB(ctx->opcode)); \
1324 get_cpu_vsrl(b1, xB(ctx->opcode)); \
1325 } \
1326 tcg_gen_shri_i64(a0, a0, 32); \
1327 tcg_gen_shri_i64(b0, b0, 32); \
1328 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \
1329 set_cpu_vsrh(xT(ctx->opcode), tmp); \
1330 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \
1331 set_cpu_vsrl(xT(ctx->opcode), tmp); \
1332 tcg_temp_free_i64(a0); \
1333 tcg_temp_free_i64(a1); \
1334 tcg_temp_free_i64(b0); \
1335 tcg_temp_free_i64(b1); \
1336 tcg_temp_free_i64(tmp); \
1337 }
1338
1339 VSX_XXMRG(xxmrghw, 1)
1340 VSX_XXMRG(xxmrglw, 0)
1341
1342 static void gen_xxsel(DisasContext *ctx)
1343 {
1344 int rt = xT(ctx->opcode);
1345 int ra = xA(ctx->opcode);
1346 int rb = xB(ctx->opcode);
1347 int rc = xC(ctx->opcode);
1348
1349 if (unlikely(!ctx->vsx_enabled)) {
1350 gen_exception(ctx, POWERPC_EXCP_VSXU);
1351 return;
1352 }
1353 tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(rt), vsr_full_offset(rc),
1354 vsr_full_offset(rb), vsr_full_offset(ra), 16, 16);
1355 }
1356
1357 static void gen_xxspltw(DisasContext *ctx)
1358 {
1359 int rt = xT(ctx->opcode);
1360 int rb = xB(ctx->opcode);
1361 int uim = UIM(ctx->opcode);
1362 int tofs, bofs;
1363
1364 if (unlikely(!ctx->vsx_enabled)) {
1365 gen_exception(ctx, POWERPC_EXCP_VSXU);
1366 return;
1367 }
1368
1369 tofs = vsr_full_offset(rt);
1370 bofs = vsr_full_offset(rb);
1371 bofs += uim << MO_32;
1372 #ifndef HOST_WORDS_BIG_ENDIAN
1373 bofs ^= 8 | 4;
1374 #endif
1375
1376 tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1377 }
1378
1379 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1380
1381 static void gen_xxspltib(DisasContext *ctx)
1382 {
1383 uint8_t uim8 = IMM8(ctx->opcode);
1384 int rt = xT(ctx->opcode);
1385
1386 if (rt < 32) {
1387 if (unlikely(!ctx->vsx_enabled)) {
1388 gen_exception(ctx, POWERPC_EXCP_VSXU);
1389 return;
1390 }
1391 } else {
1392 if (unlikely(!ctx->altivec_enabled)) {
1393 gen_exception(ctx, POWERPC_EXCP_VPU);
1394 return;
1395 }
1396 }
1397 tcg_gen_gvec_dup8i(vsr_full_offset(rt), 16, 16, uim8);
1398 }
1399
1400 static void gen_xxsldwi(DisasContext *ctx)
1401 {
1402 TCGv_i64 xth, xtl;
1403 if (unlikely(!ctx->vsx_enabled)) {
1404 gen_exception(ctx, POWERPC_EXCP_VSXU);
1405 return;
1406 }
1407 xth = tcg_temp_new_i64();
1408 xtl = tcg_temp_new_i64();
1409
1410 switch (SHW(ctx->opcode)) {
1411 case 0: {
1412 get_cpu_vsrh(xth, xA(ctx->opcode));
1413 get_cpu_vsrl(xtl, xA(ctx->opcode));
1414 break;
1415 }
1416 case 1: {
1417 TCGv_i64 t0 = tcg_temp_new_i64();
1418 get_cpu_vsrh(xth, xA(ctx->opcode));
1419 tcg_gen_shli_i64(xth, xth, 32);
1420 get_cpu_vsrl(t0, xA(ctx->opcode));
1421 tcg_gen_shri_i64(t0, t0, 32);
1422 tcg_gen_or_i64(xth, xth, t0);
1423 get_cpu_vsrl(xtl, xA(ctx->opcode));
1424 tcg_gen_shli_i64(xtl, xtl, 32);
1425 get_cpu_vsrh(t0, xB(ctx->opcode));
1426 tcg_gen_shri_i64(t0, t0, 32);
1427 tcg_gen_or_i64(xtl, xtl, t0);
1428 tcg_temp_free_i64(t0);
1429 break;
1430 }
1431 case 2: {
1432 get_cpu_vsrl(xth, xA(ctx->opcode));
1433 get_cpu_vsrh(xtl, xB(ctx->opcode));
1434 break;
1435 }
1436 case 3: {
1437 TCGv_i64 t0 = tcg_temp_new_i64();
1438 get_cpu_vsrl(xth, xA(ctx->opcode));
1439 tcg_gen_shli_i64(xth, xth, 32);
1440 get_cpu_vsrh(t0, xB(ctx->opcode));
1441 tcg_gen_shri_i64(t0, t0, 32);
1442 tcg_gen_or_i64(xth, xth, t0);
1443 get_cpu_vsrh(xtl, xB(ctx->opcode));
1444 tcg_gen_shli_i64(xtl, xtl, 32);
1445 get_cpu_vsrl(t0, xB(ctx->opcode));
1446 tcg_gen_shri_i64(t0, t0, 32);
1447 tcg_gen_or_i64(xtl, xtl, t0);
1448 tcg_temp_free_i64(t0);
1449 break;
1450 }
1451 }
1452
1453 set_cpu_vsrh(xT(ctx->opcode), xth);
1454 set_cpu_vsrl(xT(ctx->opcode), xtl);
1455
1456 tcg_temp_free_i64(xth);
1457 tcg_temp_free_i64(xtl);
1458 }
1459
1460 #define VSX_EXTRACT_INSERT(name) \
1461 static void gen_##name(DisasContext *ctx) \
1462 { \
1463 TCGv xt, xb; \
1464 TCGv_i32 t0; \
1465 TCGv_i64 t1; \
1466 uint8_t uimm = UIMM4(ctx->opcode); \
1467 \
1468 if (unlikely(!ctx->vsx_enabled)) { \
1469 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1470 return; \
1471 } \
1472 xt = tcg_const_tl(xT(ctx->opcode)); \
1473 xb = tcg_const_tl(xB(ctx->opcode)); \
1474 t0 = tcg_temp_new_i32(); \
1475 t1 = tcg_temp_new_i64(); \
1476 /* \
1477 * uimm > 15 out of bound and for \
1478 * uimm > 12 handle as per hardware in helper \
1479 */ \
1480 if (uimm > 15) { \
1481 tcg_gen_movi_i64(t1, 0); \
1482 set_cpu_vsrh(xT(ctx->opcode), t1); \
1483 set_cpu_vsrl(xT(ctx->opcode), t1); \
1484 return; \
1485 } \
1486 tcg_gen_movi_i32(t0, uimm); \
1487 gen_helper_##name(cpu_env, xt, xb, t0); \
1488 tcg_temp_free(xb); \
1489 tcg_temp_free(xt); \
1490 tcg_temp_free_i32(t0); \
1491 tcg_temp_free_i64(t1); \
1492 }
1493
1494 VSX_EXTRACT_INSERT(xxextractuw)
1495 VSX_EXTRACT_INSERT(xxinsertw)
1496
1497 #ifdef TARGET_PPC64
1498 static void gen_xsxexpdp(DisasContext *ctx)
1499 {
1500 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1501 TCGv_i64 t0;
1502 if (unlikely(!ctx->vsx_enabled)) {
1503 gen_exception(ctx, POWERPC_EXCP_VSXU);
1504 return;
1505 }
1506 t0 = tcg_temp_new_i64();
1507 get_cpu_vsrh(t0, xB(ctx->opcode));
1508 tcg_gen_extract_i64(rt, t0, 52, 11);
1509 tcg_temp_free_i64(t0);
1510 }
1511
1512 static void gen_xsxexpqp(DisasContext *ctx)
1513 {
1514 TCGv_i64 xth;
1515 TCGv_i64 xtl;
1516 TCGv_i64 xbh;
1517
1518 if (unlikely(!ctx->vsx_enabled)) {
1519 gen_exception(ctx, POWERPC_EXCP_VSXU);
1520 return;
1521 }
1522 xth = tcg_temp_new_i64();
1523 xtl = tcg_temp_new_i64();
1524 xbh = tcg_temp_new_i64();
1525 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1526
1527 tcg_gen_extract_i64(xth, xbh, 48, 15);
1528 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1529 tcg_gen_movi_i64(xtl, 0);
1530 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1531
1532 tcg_temp_free_i64(xbh);
1533 tcg_temp_free_i64(xth);
1534 tcg_temp_free_i64(xtl);
1535 }
1536
1537 static void gen_xsiexpdp(DisasContext *ctx)
1538 {
1539 TCGv_i64 xth;
1540 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1541 TCGv rb = cpu_gpr[rB(ctx->opcode)];
1542 TCGv_i64 t0;
1543
1544 if (unlikely(!ctx->vsx_enabled)) {
1545 gen_exception(ctx, POWERPC_EXCP_VSXU);
1546 return;
1547 }
1548 t0 = tcg_temp_new_i64();
1549 xth = tcg_temp_new_i64();
1550 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1551 tcg_gen_andi_i64(t0, rb, 0x7FF);
1552 tcg_gen_shli_i64(t0, t0, 52);
1553 tcg_gen_or_i64(xth, xth, t0);
1554 set_cpu_vsrh(xT(ctx->opcode), xth);
1555 /* dword[1] is undefined */
1556 tcg_temp_free_i64(t0);
1557 tcg_temp_free_i64(xth);
1558 }
1559
1560 static void gen_xsiexpqp(DisasContext *ctx)
1561 {
1562 TCGv_i64 xth;
1563 TCGv_i64 xtl;
1564 TCGv_i64 xah;
1565 TCGv_i64 xal;
1566 TCGv_i64 xbh;
1567 TCGv_i64 t0;
1568
1569 if (unlikely(!ctx->vsx_enabled)) {
1570 gen_exception(ctx, POWERPC_EXCP_VSXU);
1571 return;
1572 }
1573 xth = tcg_temp_new_i64();
1574 xtl = tcg_temp_new_i64();
1575 xah = tcg_temp_new_i64();
1576 xal = tcg_temp_new_i64();
1577 get_cpu_vsrh(xah, rA(ctx->opcode) + 32);
1578 get_cpu_vsrl(xal, rA(ctx->opcode) + 32);
1579 xbh = tcg_temp_new_i64();
1580 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1581 t0 = tcg_temp_new_i64();
1582
1583 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1584 tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1585 tcg_gen_shli_i64(t0, t0, 48);
1586 tcg_gen_or_i64(xth, xth, t0);
1587 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1588 tcg_gen_mov_i64(xtl, xal);
1589 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1590
1591 tcg_temp_free_i64(t0);
1592 tcg_temp_free_i64(xth);
1593 tcg_temp_free_i64(xtl);
1594 tcg_temp_free_i64(xah);
1595 tcg_temp_free_i64(xal);
1596 tcg_temp_free_i64(xbh);
1597 }
1598
1599 static void gen_xsxsigdp(DisasContext *ctx)
1600 {
1601 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1602 TCGv_i64 t0, t1, zr, nan, exp;
1603
1604 if (unlikely(!ctx->vsx_enabled)) {
1605 gen_exception(ctx, POWERPC_EXCP_VSXU);
1606 return;
1607 }
1608 exp = tcg_temp_new_i64();
1609 t0 = tcg_temp_new_i64();
1610 t1 = tcg_temp_new_i64();
1611 zr = tcg_const_i64(0);
1612 nan = tcg_const_i64(2047);
1613
1614 get_cpu_vsrh(t1, xB(ctx->opcode));
1615 tcg_gen_extract_i64(exp, t1, 52, 11);
1616 tcg_gen_movi_i64(t0, 0x0010000000000000);
1617 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1618 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1619 get_cpu_vsrh(t1, xB(ctx->opcode));
1620 tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1621
1622 tcg_temp_free_i64(t0);
1623 tcg_temp_free_i64(t1);
1624 tcg_temp_free_i64(exp);
1625 tcg_temp_free_i64(zr);
1626 tcg_temp_free_i64(nan);
1627 }
1628
1629 static void gen_xsxsigqp(DisasContext *ctx)
1630 {
1631 TCGv_i64 t0, zr, nan, exp;
1632 TCGv_i64 xth;
1633 TCGv_i64 xtl;
1634 TCGv_i64 xbh;
1635 TCGv_i64 xbl;
1636
1637 if (unlikely(!ctx->vsx_enabled)) {
1638 gen_exception(ctx, POWERPC_EXCP_VSXU);
1639 return;
1640 }
1641 xth = tcg_temp_new_i64();
1642 xtl = tcg_temp_new_i64();
1643 xbh = tcg_temp_new_i64();
1644 xbl = tcg_temp_new_i64();
1645 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1646 get_cpu_vsrl(xbl, rB(ctx->opcode) + 32);
1647 exp = tcg_temp_new_i64();
1648 t0 = tcg_temp_new_i64();
1649 zr = tcg_const_i64(0);
1650 nan = tcg_const_i64(32767);
1651
1652 tcg_gen_extract_i64(exp, xbh, 48, 15);
1653 tcg_gen_movi_i64(t0, 0x0001000000000000);
1654 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1655 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1656 tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1657 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1658 tcg_gen_mov_i64(xtl, xbl);
1659 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1660
1661 tcg_temp_free_i64(t0);
1662 tcg_temp_free_i64(exp);
1663 tcg_temp_free_i64(zr);
1664 tcg_temp_free_i64(nan);
1665 tcg_temp_free_i64(xth);
1666 tcg_temp_free_i64(xtl);
1667 tcg_temp_free_i64(xbh);
1668 tcg_temp_free_i64(xbl);
1669 }
1670 #endif
1671
1672 static void gen_xviexpsp(DisasContext *ctx)
1673 {
1674 TCGv_i64 xth;
1675 TCGv_i64 xtl;
1676 TCGv_i64 xah;
1677 TCGv_i64 xal;
1678 TCGv_i64 xbh;
1679 TCGv_i64 xbl;
1680 TCGv_i64 t0;
1681
1682 if (unlikely(!ctx->vsx_enabled)) {
1683 gen_exception(ctx, POWERPC_EXCP_VSXU);
1684 return;
1685 }
1686 xth = tcg_temp_new_i64();
1687 xtl = tcg_temp_new_i64();
1688 xah = tcg_temp_new_i64();
1689 xal = tcg_temp_new_i64();
1690 xbh = tcg_temp_new_i64();
1691 xbl = tcg_temp_new_i64();
1692 get_cpu_vsrh(xah, xA(ctx->opcode));
1693 get_cpu_vsrl(xal, xA(ctx->opcode));
1694 get_cpu_vsrh(xbh, xB(ctx->opcode));
1695 get_cpu_vsrl(xbl, xB(ctx->opcode));
1696 t0 = tcg_temp_new_i64();
1697
1698 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1699 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1700 tcg_gen_shli_i64(t0, t0, 23);
1701 tcg_gen_or_i64(xth, xth, t0);
1702 set_cpu_vsrh(xT(ctx->opcode), xth);
1703 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1704 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1705 tcg_gen_shli_i64(t0, t0, 23);
1706 tcg_gen_or_i64(xtl, xtl, t0);
1707 set_cpu_vsrl(xT(ctx->opcode), xtl);
1708
1709 tcg_temp_free_i64(t0);
1710 tcg_temp_free_i64(xth);
1711 tcg_temp_free_i64(xtl);
1712 tcg_temp_free_i64(xah);
1713 tcg_temp_free_i64(xal);
1714 tcg_temp_free_i64(xbh);
1715 tcg_temp_free_i64(xbl);
1716 }
1717
1718 static void gen_xviexpdp(DisasContext *ctx)
1719 {
1720 TCGv_i64 xth;
1721 TCGv_i64 xtl;
1722 TCGv_i64 xah;
1723 TCGv_i64 xal;
1724 TCGv_i64 xbh;
1725 TCGv_i64 xbl;
1726
1727 if (unlikely(!ctx->vsx_enabled)) {
1728 gen_exception(ctx, POWERPC_EXCP_VSXU);
1729 return;
1730 }
1731 xth = tcg_temp_new_i64();
1732 xtl = tcg_temp_new_i64();
1733 xah = tcg_temp_new_i64();
1734 xal = tcg_temp_new_i64();
1735 xbh = tcg_temp_new_i64();
1736 xbl = tcg_temp_new_i64();
1737 get_cpu_vsrh(xah, xA(ctx->opcode));
1738 get_cpu_vsrl(xal, xA(ctx->opcode));
1739 get_cpu_vsrh(xbh, xB(ctx->opcode));
1740 get_cpu_vsrl(xbl, xB(ctx->opcode));
1741
1742 tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
1743 set_cpu_vsrh(xT(ctx->opcode), xth);
1744
1745 tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
1746 set_cpu_vsrl(xT(ctx->opcode), xtl);
1747
1748 tcg_temp_free_i64(xth);
1749 tcg_temp_free_i64(xtl);
1750 tcg_temp_free_i64(xah);
1751 tcg_temp_free_i64(xal);
1752 tcg_temp_free_i64(xbh);
1753 tcg_temp_free_i64(xbl);
1754 }
1755
1756 static void gen_xvxexpsp(DisasContext *ctx)
1757 {
1758 TCGv_i64 xth;
1759 TCGv_i64 xtl;
1760 TCGv_i64 xbh;
1761 TCGv_i64 xbl;
1762
1763 if (unlikely(!ctx->vsx_enabled)) {
1764 gen_exception(ctx, POWERPC_EXCP_VSXU);
1765 return;
1766 }
1767 xth = tcg_temp_new_i64();
1768 xtl = tcg_temp_new_i64();
1769 xbh = tcg_temp_new_i64();
1770 xbl = tcg_temp_new_i64();
1771 get_cpu_vsrh(xbh, xB(ctx->opcode));
1772 get_cpu_vsrl(xbl, xB(ctx->opcode));
1773
1774 tcg_gen_shri_i64(xth, xbh, 23);
1775 tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1776 set_cpu_vsrh(xT(ctx->opcode), xth);
1777 tcg_gen_shri_i64(xtl, xbl, 23);
1778 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1779 set_cpu_vsrl(xT(ctx->opcode), xtl);
1780
1781 tcg_temp_free_i64(xth);
1782 tcg_temp_free_i64(xtl);
1783 tcg_temp_free_i64(xbh);
1784 tcg_temp_free_i64(xbl);
1785 }
1786
1787 static void gen_xvxexpdp(DisasContext *ctx)
1788 {
1789 TCGv_i64 xth;
1790 TCGv_i64 xtl;
1791 TCGv_i64 xbh;
1792 TCGv_i64 xbl;
1793
1794 if (unlikely(!ctx->vsx_enabled)) {
1795 gen_exception(ctx, POWERPC_EXCP_VSXU);
1796 return;
1797 }
1798 xth = tcg_temp_new_i64();
1799 xtl = tcg_temp_new_i64();
1800 xbh = tcg_temp_new_i64();
1801 xbl = tcg_temp_new_i64();
1802 get_cpu_vsrh(xbh, xB(ctx->opcode));
1803 get_cpu_vsrl(xbl, xB(ctx->opcode));
1804
1805 tcg_gen_extract_i64(xth, xbh, 52, 11);
1806 set_cpu_vsrh(xT(ctx->opcode), xth);
1807 tcg_gen_extract_i64(xtl, xbl, 52, 11);
1808 set_cpu_vsrl(xT(ctx->opcode), xtl);
1809
1810 tcg_temp_free_i64(xth);
1811 tcg_temp_free_i64(xtl);
1812 tcg_temp_free_i64(xbh);
1813 tcg_temp_free_i64(xbl);
1814 }
1815
1816 GEN_VSX_HELPER_2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
1817
1818 static void gen_xvxsigdp(DisasContext *ctx)
1819 {
1820 TCGv_i64 xth;
1821 TCGv_i64 xtl;
1822 TCGv_i64 xbh;
1823 TCGv_i64 xbl;
1824 TCGv_i64 t0, zr, nan, exp;
1825
1826 if (unlikely(!ctx->vsx_enabled)) {
1827 gen_exception(ctx, POWERPC_EXCP_VSXU);
1828 return;
1829 }
1830 xth = tcg_temp_new_i64();
1831 xtl = tcg_temp_new_i64();
1832 xbh = tcg_temp_new_i64();
1833 xbl = tcg_temp_new_i64();
1834 get_cpu_vsrh(xbh, xB(ctx->opcode));
1835 get_cpu_vsrl(xbl, xB(ctx->opcode));
1836 exp = tcg_temp_new_i64();
1837 t0 = tcg_temp_new_i64();
1838 zr = tcg_const_i64(0);
1839 nan = tcg_const_i64(2047);
1840
1841 tcg_gen_extract_i64(exp, xbh, 52, 11);
1842 tcg_gen_movi_i64(t0, 0x0010000000000000);
1843 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1844 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1845 tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
1846 set_cpu_vsrh(xT(ctx->opcode), xth);
1847
1848 tcg_gen_extract_i64(exp, xbl, 52, 11);
1849 tcg_gen_movi_i64(t0, 0x0010000000000000);
1850 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1851 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1852 tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
1853 set_cpu_vsrl(xT(ctx->opcode), xtl);
1854
1855 tcg_temp_free_i64(t0);
1856 tcg_temp_free_i64(exp);
1857 tcg_temp_free_i64(zr);
1858 tcg_temp_free_i64(nan);
1859 tcg_temp_free_i64(xth);
1860 tcg_temp_free_i64(xtl);
1861 tcg_temp_free_i64(xbh);
1862 tcg_temp_free_i64(xbl);
1863 }
1864
1865 #undef GEN_XX2FORM
1866 #undef GEN_XX3FORM
1867 #undef GEN_XX2IFORM
1868 #undef GEN_XX3_RC_FORM
1869 #undef GEN_XX3FORM_DM
1870 #undef VSX_LOGICAL