]> git.proxmox.com Git - mirror_qemu.git/blame - target-ppc/translate/vsx-impl.inc.c
target-ppc: add lxvb16x instruction
[mirror_qemu.git] / target-ppc / translate / vsx-impl.inc.c
CommitLineData
3014427a
BH
1/*** VSX extension ***/
2
3static inline TCGv_i64 cpu_vsrh(int n)
4{
5 if (n < 32) {
6 return cpu_fpr[n];
7 } else {
8 return cpu_avrh[n-32];
9 }
10}
11
12static inline TCGv_i64 cpu_vsrl(int n)
13{
14 if (n < 32) {
15 return cpu_vsr[n];
16 } else {
17 return cpu_avrl[n-32];
18 }
19}
20
21#define VSX_LOAD_SCALAR(name, operation) \
22static void gen_##name(DisasContext *ctx) \
23{ \
24 TCGv EA; \
25 if (unlikely(!ctx->vsx_enabled)) { \
26 gen_exception(ctx, POWERPC_EXCP_VSXU); \
27 return; \
28 } \
29 gen_set_access_type(ctx, ACCESS_INT); \
30 EA = tcg_temp_new(); \
31 gen_addr_reg_index(ctx, EA); \
32 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33 /* NOTE: cpu_vsrl is undefined */ \
34 tcg_temp_free(EA); \
35}
36
4f364fe7 37VSX_LOAD_SCALAR(lxsdx, ld64_i64)
3014427a 38VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
740ae9a2
ND
39VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
40VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
3014427a
BH
41VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
42VSX_LOAD_SCALAR(lxsspx, ld32fs)
43
44static void gen_lxvd2x(DisasContext *ctx)
45{
46 TCGv EA;
47 if (unlikely(!ctx->vsx_enabled)) {
48 gen_exception(ctx, POWERPC_EXCP_VSXU);
49 return;
50 }
51 gen_set_access_type(ctx, ACCESS_INT);
52 EA = tcg_temp_new();
53 gen_addr_reg_index(ctx, EA);
4f364fe7 54 gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
3014427a 55 tcg_gen_addi_tl(EA, EA, 8);
4f364fe7 56 gen_qemu_ld64_i64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
3014427a
BH
57 tcg_temp_free(EA);
58}
59
60static void gen_lxvdsx(DisasContext *ctx)
61{
62 TCGv EA;
63 if (unlikely(!ctx->vsx_enabled)) {
64 gen_exception(ctx, POWERPC_EXCP_VSXU);
65 return;
66 }
67 gen_set_access_type(ctx, ACCESS_INT);
68 EA = tcg_temp_new();
69 gen_addr_reg_index(ctx, EA);
4f364fe7 70 gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
3014427a
BH
71 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
72 tcg_temp_free(EA);
73}
74
75static void gen_lxvw4x(DisasContext *ctx)
76{
77 TCGv EA;
3014427a
BH
78 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
79 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
80 if (unlikely(!ctx->vsx_enabled)) {
81 gen_exception(ctx, POWERPC_EXCP_VSXU);
82 return;
83 }
84 gen_set_access_type(ctx, ACCESS_INT);
85 EA = tcg_temp_new();
3014427a
BH
86
87 gen_addr_reg_index(ctx, EA);
f34001ec
ND
88 if (ctx->le_mode) {
89 TCGv_i64 t0 = tcg_temp_new_i64();
90 TCGv_i64 t1 = tcg_temp_new_i64();
3014427a 91
f34001ec
ND
92 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
93 tcg_gen_shri_i64(t1, t0, 32);
94 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
95 tcg_gen_addi_tl(EA, EA, 8);
96 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
97 tcg_gen_shri_i64(t1, t0, 32);
98 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
99 tcg_temp_free_i64(t0);
100 tcg_temp_free_i64(t1);
101 } else {
102 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
103 tcg_gen_addi_tl(EA, EA, 8);
104 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
105 }
3014427a 106 tcg_temp_free(EA);
3014427a
BH
107}
108
1c074419
ND
109static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
110 TCGv_i64 inh, TCGv_i64 inl)
111{
112 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
113 TCGv_i64 t0 = tcg_temp_new_i64();
114 TCGv_i64 t1 = tcg_temp_new_i64();
115
116 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
117 tcg_gen_and_i64(t0, inh, mask);
118 tcg_gen_shli_i64(t0, t0, 8);
119 tcg_gen_shri_i64(t1, inh, 8);
120 tcg_gen_and_i64(t1, t1, mask);
121 tcg_gen_or_i64(outh, t0, t1);
122
123 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
124 tcg_gen_and_i64(t0, inl, mask);
125 tcg_gen_shli_i64(t0, t0, 8);
126 tcg_gen_shri_i64(t1, inl, 8);
127 tcg_gen_and_i64(t1, t1, mask);
128 tcg_gen_or_i64(outl, t0, t1);
129
130 tcg_temp_free_i64(t0);
131 tcg_temp_free_i64(t1);
132 tcg_temp_free_i64(mask);
133}
134
135static void gen_lxvh8x(DisasContext *ctx)
136{
137 TCGv EA;
138 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
139 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
140
141 if (unlikely(!ctx->vsx_enabled)) {
142 gen_exception(ctx, POWERPC_EXCP_VSXU);
143 return;
144 }
145 gen_set_access_type(ctx, ACCESS_INT);
146
147 EA = tcg_temp_new();
148 gen_addr_reg_index(ctx, EA);
149 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
150 tcg_gen_addi_tl(EA, EA, 8);
151 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
152 if (ctx->le_mode) {
153 gen_bswap16x8(xth, xtl, xth, xtl);
154 }
155 tcg_temp_free(EA);
156}
157
8ee38fac
ND
158static void gen_lxvb16x(DisasContext *ctx)
159{
160 TCGv EA;
161 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
162 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
163
164 if (unlikely(!ctx->vsx_enabled)) {
165 gen_exception(ctx, POWERPC_EXCP_VSXU);
166 return;
167 }
168 gen_set_access_type(ctx, ACCESS_INT);
169 EA = tcg_temp_new();
170 gen_addr_reg_index(ctx, EA);
171 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
172 tcg_gen_addi_tl(EA, EA, 8);
173 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
174 tcg_temp_free(EA);
175}
176
3014427a
BH
177#define VSX_STORE_SCALAR(name, operation) \
178static void gen_##name(DisasContext *ctx) \
179{ \
180 TCGv EA; \
181 if (unlikely(!ctx->vsx_enabled)) { \
182 gen_exception(ctx, POWERPC_EXCP_VSXU); \
183 return; \
184 } \
185 gen_set_access_type(ctx, ACCESS_INT); \
186 EA = tcg_temp_new(); \
187 gen_addr_reg_index(ctx, EA); \
188 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
189 tcg_temp_free(EA); \
190}
191
2468f23d 192VSX_STORE_SCALAR(stxsdx, st64_i64)
ddb9ac50
ND
193
194VSX_STORE_SCALAR(stxsibx, st8_i64)
195VSX_STORE_SCALAR(stxsihx, st16_i64)
3014427a
BH
196VSX_STORE_SCALAR(stxsiwx, st32_i64)
197VSX_STORE_SCALAR(stxsspx, st32fs)
198
199static void gen_stxvd2x(DisasContext *ctx)
200{
201 TCGv EA;
202 if (unlikely(!ctx->vsx_enabled)) {
203 gen_exception(ctx, POWERPC_EXCP_VSXU);
204 return;
205 }
206 gen_set_access_type(ctx, ACCESS_INT);
207 EA = tcg_temp_new();
208 gen_addr_reg_index(ctx, EA);
2468f23d 209 gen_qemu_st64_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
3014427a 210 tcg_gen_addi_tl(EA, EA, 8);
2468f23d 211 gen_qemu_st64_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
3014427a
BH
212 tcg_temp_free(EA);
213}
214
215static void gen_stxvw4x(DisasContext *ctx)
216{
0aec21d8
ND
217 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
218 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
3014427a
BH
219 TCGv EA;
220 if (unlikely(!ctx->vsx_enabled)) {
221 gen_exception(ctx, POWERPC_EXCP_VSXU);
222 return;
223 }
224 gen_set_access_type(ctx, ACCESS_INT);
225 EA = tcg_temp_new();
226 gen_addr_reg_index(ctx, EA);
0aec21d8
ND
227 if (ctx->le_mode) {
228 TCGv_i64 t0 = tcg_temp_new_i64();
229 TCGv_i64 t1 = tcg_temp_new_i64();
3014427a 230
0aec21d8
ND
231 tcg_gen_shri_i64(t0, xsh, 32);
232 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
233 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
234 tcg_gen_addi_tl(EA, EA, 8);
235 tcg_gen_shri_i64(t0, xsl, 32);
236 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
237 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
238 tcg_temp_free_i64(t0);
239 tcg_temp_free_i64(t1);
240 } else {
241 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
242 tcg_gen_addi_tl(EA, EA, 8);
0b8ac648
ND
243 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
244 }
245 tcg_temp_free(EA);
246}
247
248static void gen_stxvh8x(DisasContext *ctx)
249{
250 TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
251 TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
252 TCGv EA;
253
254 if (unlikely(!ctx->vsx_enabled)) {
255 gen_exception(ctx, POWERPC_EXCP_VSXU);
256 return;
257 }
258 gen_set_access_type(ctx, ACCESS_INT);
259 EA = tcg_temp_new();
260 gen_addr_reg_index(ctx, EA);
261 if (ctx->le_mode) {
262 TCGv_i64 outh = tcg_temp_new_i64();
263 TCGv_i64 outl = tcg_temp_new_i64();
264
265 gen_bswap16x8(outh, outl, xsh, xsl);
266 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
267 tcg_gen_addi_tl(EA, EA, 8);
268 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
269 tcg_temp_free_i64(outh);
270 tcg_temp_free_i64(outl);
271 } else {
272 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
273 tcg_gen_addi_tl(EA, EA, 8);
0aec21d8
ND
274 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
275 }
3014427a 276 tcg_temp_free(EA);
3014427a
BH
277}
278
279#define MV_VSRW(name, tcgop1, tcgop2, target, source) \
280static void gen_##name(DisasContext *ctx) \
281{ \
282 if (xS(ctx->opcode) < 32) { \
283 if (unlikely(!ctx->fpu_enabled)) { \
284 gen_exception(ctx, POWERPC_EXCP_FPU); \
285 return; \
286 } \
287 } else { \
288 if (unlikely(!ctx->altivec_enabled)) { \
289 gen_exception(ctx, POWERPC_EXCP_VPU); \
290 return; \
291 } \
292 } \
293 TCGv_i64 tmp = tcg_temp_new_i64(); \
294 tcg_gen_##tcgop1(tmp, source); \
295 tcg_gen_##tcgop2(target, tmp); \
296 tcg_temp_free_i64(tmp); \
297}
298
299
300MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
301 cpu_vsrh(xS(ctx->opcode)))
302MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
303 cpu_gpr[rA(ctx->opcode)])
304MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
305 cpu_gpr[rA(ctx->opcode)])
306
307#if defined(TARGET_PPC64)
308#define MV_VSRD(name, target, source) \
309static void gen_##name(DisasContext *ctx) \
310{ \
311 if (xS(ctx->opcode) < 32) { \
312 if (unlikely(!ctx->fpu_enabled)) { \
313 gen_exception(ctx, POWERPC_EXCP_FPU); \
314 return; \
315 } \
316 } else { \
317 if (unlikely(!ctx->altivec_enabled)) { \
318 gen_exception(ctx, POWERPC_EXCP_VPU); \
319 return; \
320 } \
321 } \
322 tcg_gen_mov_i64(target, source); \
323}
324
325MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
326MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
327
63583202
RB
328static void gen_mfvsrld(DisasContext *ctx)
329{
330 if (xS(ctx->opcode) < 32) {
331 if (unlikely(!ctx->vsx_enabled)) {
332 gen_exception(ctx, POWERPC_EXCP_VSXU);
333 return;
334 }
335 } else {
336 if (unlikely(!ctx->altivec_enabled)) {
337 gen_exception(ctx, POWERPC_EXCP_VPU);
338 return;
339 }
340 }
341
342 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], cpu_vsrl(xS(ctx->opcode)));
343}
344
b9731075
RB
345static void gen_mtvsrdd(DisasContext *ctx)
346{
347 if (xT(ctx->opcode) < 32) {
348 if (unlikely(!ctx->vsx_enabled)) {
349 gen_exception(ctx, POWERPC_EXCP_VSXU);
350 return;
351 }
352 } else {
353 if (unlikely(!ctx->altivec_enabled)) {
354 gen_exception(ctx, POWERPC_EXCP_VPU);
355 return;
356 }
357 }
358
359 if (!rA(ctx->opcode)) {
360 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0);
361 } else {
362 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)]);
363 }
364
365 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rB(ctx->opcode)]);
366}
367
3014427a
BH
368#endif
369
370static void gen_xxpermdi(DisasContext *ctx)
371{
372 if (unlikely(!ctx->vsx_enabled)) {
373 gen_exception(ctx, POWERPC_EXCP_VSXU);
374 return;
375 }
376
377 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
378 (xT(ctx->opcode) == xB(ctx->opcode)))) {
379 TCGv_i64 xh, xl;
380
381 xh = tcg_temp_new_i64();
382 xl = tcg_temp_new_i64();
383
384 if ((DM(ctx->opcode) & 2) == 0) {
385 tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode)));
386 } else {
387 tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode)));
388 }
389 if ((DM(ctx->opcode) & 1) == 0) {
390 tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode)));
391 } else {
392 tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode)));
393 }
394
395 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh);
396 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl);
397
398 tcg_temp_free_i64(xh);
399 tcg_temp_free_i64(xl);
400 } else {
401 if ((DM(ctx->opcode) & 2) == 0) {
402 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
403 } else {
404 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
405 }
406 if ((DM(ctx->opcode) & 1) == 0) {
407 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
408 } else {
409 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
410 }
411 }
412}
413
414#define OP_ABS 1
415#define OP_NABS 2
416#define OP_NEG 3
417#define OP_CPSGN 4
418#define SGN_MASK_DP 0x8000000000000000ull
419#define SGN_MASK_SP 0x8000000080000000ull
420
421#define VSX_SCALAR_MOVE(name, op, sgn_mask) \
422static void glue(gen_, name)(DisasContext * ctx) \
423 { \
424 TCGv_i64 xb, sgm; \
425 if (unlikely(!ctx->vsx_enabled)) { \
426 gen_exception(ctx, POWERPC_EXCP_VSXU); \
427 return; \
428 } \
429 xb = tcg_temp_new_i64(); \
430 sgm = tcg_temp_new_i64(); \
431 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
432 tcg_gen_movi_i64(sgm, sgn_mask); \
433 switch (op) { \
434 case OP_ABS: { \
435 tcg_gen_andc_i64(xb, xb, sgm); \
436 break; \
437 } \
438 case OP_NABS: { \
439 tcg_gen_or_i64(xb, xb, sgm); \
440 break; \
441 } \
442 case OP_NEG: { \
443 tcg_gen_xor_i64(xb, xb, sgm); \
444 break; \
445 } \
446 case OP_CPSGN: { \
447 TCGv_i64 xa = tcg_temp_new_i64(); \
448 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
449 tcg_gen_and_i64(xa, xa, sgm); \
450 tcg_gen_andc_i64(xb, xb, sgm); \
451 tcg_gen_or_i64(xb, xb, xa); \
452 tcg_temp_free_i64(xa); \
453 break; \
454 } \
455 } \
456 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
457 tcg_temp_free_i64(xb); \
458 tcg_temp_free_i64(sgm); \
459 }
460
461VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
462VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
463VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
464VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
465
466#define VSX_VECTOR_MOVE(name, op, sgn_mask) \
467static void glue(gen_, name)(DisasContext * ctx) \
468 { \
469 TCGv_i64 xbh, xbl, sgm; \
470 if (unlikely(!ctx->vsx_enabled)) { \
471 gen_exception(ctx, POWERPC_EXCP_VSXU); \
472 return; \
473 } \
474 xbh = tcg_temp_new_i64(); \
475 xbl = tcg_temp_new_i64(); \
476 sgm = tcg_temp_new_i64(); \
477 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
478 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
479 tcg_gen_movi_i64(sgm, sgn_mask); \
480 switch (op) { \
481 case OP_ABS: { \
482 tcg_gen_andc_i64(xbh, xbh, sgm); \
483 tcg_gen_andc_i64(xbl, xbl, sgm); \
484 break; \
485 } \
486 case OP_NABS: { \
487 tcg_gen_or_i64(xbh, xbh, sgm); \
488 tcg_gen_or_i64(xbl, xbl, sgm); \
489 break; \
490 } \
491 case OP_NEG: { \
492 tcg_gen_xor_i64(xbh, xbh, sgm); \
493 tcg_gen_xor_i64(xbl, xbl, sgm); \
494 break; \
495 } \
496 case OP_CPSGN: { \
497 TCGv_i64 xah = tcg_temp_new_i64(); \
498 TCGv_i64 xal = tcg_temp_new_i64(); \
499 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
500 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
501 tcg_gen_and_i64(xah, xah, sgm); \
502 tcg_gen_and_i64(xal, xal, sgm); \
503 tcg_gen_andc_i64(xbh, xbh, sgm); \
504 tcg_gen_andc_i64(xbl, xbl, sgm); \
505 tcg_gen_or_i64(xbh, xbh, xah); \
506 tcg_gen_or_i64(xbl, xbl, xal); \
507 tcg_temp_free_i64(xah); \
508 tcg_temp_free_i64(xal); \
509 break; \
510 } \
511 } \
512 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
513 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
514 tcg_temp_free_i64(xbh); \
515 tcg_temp_free_i64(xbl); \
516 tcg_temp_free_i64(sgm); \
517 }
518
519VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
520VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
521VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
522VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
523VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
524VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
525VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
526VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
527
528#define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
529static void gen_##name(DisasContext * ctx) \
530{ \
531 TCGv_i32 opc; \
532 if (unlikely(!ctx->vsx_enabled)) { \
533 gen_exception(ctx, POWERPC_EXCP_VSXU); \
534 return; \
535 } \
3014427a
BH
536 opc = tcg_const_i32(ctx->opcode); \
537 gen_helper_##name(cpu_env, opc); \
538 tcg_temp_free_i32(opc); \
539}
540
541#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
542static void gen_##name(DisasContext * ctx) \
543{ \
544 if (unlikely(!ctx->vsx_enabled)) { \
545 gen_exception(ctx, POWERPC_EXCP_VSXU); \
546 return; \
547 } \
3014427a
BH
548 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
549 cpu_vsrh(xB(ctx->opcode))); \
550}
551
552GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
553GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
554GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
555GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
556GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
557GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
558GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
559GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
560GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
561GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
562GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
563GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
564GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
565GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
566GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
567GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
568GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
569GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
570GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
571GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
572GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
573GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
574GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
575GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
576GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
577GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
578GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
579GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
580GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
581GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
582GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
583GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
584GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
585GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
586GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
587GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
588GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
589
590GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
591GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
592GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
593GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
594GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
595GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
596GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
597GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
598GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
599GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
600GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
601GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
602GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
603GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
604GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
605GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
606GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
607
608GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
609GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
610GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
611GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
612GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
613GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
614GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
615GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
616GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
617GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
618GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
619GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
620GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
621GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
622GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
623GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
624GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
625GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
626GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
627GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
628GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
629GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
630GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
631GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
632GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
633GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
634GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
635GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
636GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
637GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
638GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
639GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
640GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
641GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
642GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
643GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
644
645GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
646GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
647GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
648GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
649GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
650GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
651GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
652GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
653GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
654GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
655GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
656GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
657GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
658GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
659GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
660GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
661GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
662GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
663GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
664GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
665GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
666GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
667GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
668GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
669GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
670GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
671GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
672GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
673GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
674GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
675GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
676GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
677GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
678GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
679GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
680GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
681
682#define VSX_LOGICAL(name, tcg_op) \
683static void glue(gen_, name)(DisasContext * ctx) \
684 { \
685 if (unlikely(!ctx->vsx_enabled)) { \
686 gen_exception(ctx, POWERPC_EXCP_VSXU); \
687 return; \
688 } \
689 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
690 cpu_vsrh(xB(ctx->opcode))); \
691 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
692 cpu_vsrl(xB(ctx->opcode))); \
693 }
694
695VSX_LOGICAL(xxland, tcg_gen_and_i64)
696VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
697VSX_LOGICAL(xxlor, tcg_gen_or_i64)
698VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
699VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
700VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
701VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
702VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
703
704#define VSX_XXMRG(name, high) \
705static void glue(gen_, name)(DisasContext * ctx) \
706 { \
707 TCGv_i64 a0, a1, b0, b1; \
708 if (unlikely(!ctx->vsx_enabled)) { \
709 gen_exception(ctx, POWERPC_EXCP_VSXU); \
710 return; \
711 } \
712 a0 = tcg_temp_new_i64(); \
713 a1 = tcg_temp_new_i64(); \
714 b0 = tcg_temp_new_i64(); \
715 b1 = tcg_temp_new_i64(); \
716 if (high) { \
717 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
718 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
719 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
720 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
721 } else { \
722 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
723 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
724 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
725 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
726 } \
727 tcg_gen_shri_i64(a0, a0, 32); \
728 tcg_gen_shri_i64(b0, b0, 32); \
729 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
730 b0, a0, 32, 32); \
731 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
732 b1, a1, 32, 32); \
733 tcg_temp_free_i64(a0); \
734 tcg_temp_free_i64(a1); \
735 tcg_temp_free_i64(b0); \
736 tcg_temp_free_i64(b1); \
737 }
738
739VSX_XXMRG(xxmrghw, 1)
740VSX_XXMRG(xxmrglw, 0)
741
742static void gen_xxsel(DisasContext * ctx)
743{
744 TCGv_i64 a, b, c;
745 if (unlikely(!ctx->vsx_enabled)) {
746 gen_exception(ctx, POWERPC_EXCP_VSXU);
747 return;
748 }
749 a = tcg_temp_new_i64();
750 b = tcg_temp_new_i64();
751 c = tcg_temp_new_i64();
752
753 tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
754 tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
755 tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
756
757 tcg_gen_and_i64(b, b, c);
758 tcg_gen_andc_i64(a, a, c);
759 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
760
761 tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
762 tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
763 tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
764
765 tcg_gen_and_i64(b, b, c);
766 tcg_gen_andc_i64(a, a, c);
767 tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
768
769 tcg_temp_free_i64(a);
770 tcg_temp_free_i64(b);
771 tcg_temp_free_i64(c);
772}
773
774static void gen_xxspltw(DisasContext *ctx)
775{
776 TCGv_i64 b, b2;
777 TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
778 cpu_vsrl(xB(ctx->opcode)) :
779 cpu_vsrh(xB(ctx->opcode));
780
781 if (unlikely(!ctx->vsx_enabled)) {
782 gen_exception(ctx, POWERPC_EXCP_VSXU);
783 return;
784 }
785
786 b = tcg_temp_new_i64();
787 b2 = tcg_temp_new_i64();
788
789 if (UIM(ctx->opcode) & 1) {
790 tcg_gen_ext32u_i64(b, vsr);
791 } else {
792 tcg_gen_shri_i64(b, vsr, 32);
793 }
794
795 tcg_gen_shli_i64(b2, b, 32);
796 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
797 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
798
799 tcg_temp_free_i64(b);
800 tcg_temp_free_i64(b2);
801}
802
f1132835
ND
803#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
804
805static void gen_xxspltib(DisasContext *ctx)
806{
807 unsigned char uim8 = IMM8(ctx->opcode);
808 if (xS(ctx->opcode) < 32) {
809 if (unlikely(!ctx->altivec_enabled)) {
810 gen_exception(ctx, POWERPC_EXCP_VPU);
811 return;
812 }
813 } else {
814 if (unlikely(!ctx->vsx_enabled)) {
815 gen_exception(ctx, POWERPC_EXCP_VSXU);
816 return;
817 }
818 }
819 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), pattern(uim8));
820 tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), pattern(uim8));
821}
822
3014427a
BH
823static void gen_xxsldwi(DisasContext *ctx)
824{
825 TCGv_i64 xth, xtl;
826 if (unlikely(!ctx->vsx_enabled)) {
827 gen_exception(ctx, POWERPC_EXCP_VSXU);
828 return;
829 }
830 xth = tcg_temp_new_i64();
831 xtl = tcg_temp_new_i64();
832
833 switch (SHW(ctx->opcode)) {
834 case 0: {
835 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
836 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
837 break;
838 }
839 case 1: {
840 TCGv_i64 t0 = tcg_temp_new_i64();
841 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
842 tcg_gen_shli_i64(xth, xth, 32);
843 tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
844 tcg_gen_shri_i64(t0, t0, 32);
845 tcg_gen_or_i64(xth, xth, t0);
846 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
847 tcg_gen_shli_i64(xtl, xtl, 32);
848 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
849 tcg_gen_shri_i64(t0, t0, 32);
850 tcg_gen_or_i64(xtl, xtl, t0);
851 tcg_temp_free_i64(t0);
852 break;
853 }
854 case 2: {
855 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
856 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
857 break;
858 }
859 case 3: {
860 TCGv_i64 t0 = tcg_temp_new_i64();
861 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
862 tcg_gen_shli_i64(xth, xth, 32);
863 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
864 tcg_gen_shri_i64(t0, t0, 32);
865 tcg_gen_or_i64(xth, xth, t0);
866 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
867 tcg_gen_shli_i64(xtl, xtl, 32);
868 tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
869 tcg_gen_shri_i64(t0, t0, 32);
870 tcg_gen_or_i64(xtl, xtl, t0);
871 tcg_temp_free_i64(t0);
872 break;
873 }
874 }
875
876 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
877 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
878
879 tcg_temp_free_i64(xth);
880 tcg_temp_free_i64(xtl);
881}
882
883#undef GEN_XX2FORM
884#undef GEN_XX3FORM
885#undef GEN_XX2IFORM
886#undef GEN_XX3_RC_FORM
887#undef GEN_XX3FORM_DM
888#undef VSX_LOGICAL