]> git.proxmox.com Git - mirror_qemu.git/blame - target/ppc/translate/vsx-impl.inc.c
Merge remote-tracking branch 'remotes/vivier2/tags/trivial-branch-pull-request' into...
[mirror_qemu.git] / target / ppc / translate / vsx-impl.inc.c
CommitLineData
3014427a
BH
1/*** VSX extension ***/
2
8b3b2d75
MCA
3static inline void get_cpu_vsrh(TCGv_i64 dst, int n)
4{
d59d1182 5 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, true));
8b3b2d75
MCA
6}
7
8static inline void get_cpu_vsrl(TCGv_i64 dst, int n)
9{
d59d1182 10 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, false));
8b3b2d75
MCA
11}
12
13static inline void set_cpu_vsrh(int n, TCGv_i64 src)
3014427a 14{
d59d1182 15 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, true));
3014427a
BH
16}
17
8b3b2d75 18static inline void set_cpu_vsrl(int n, TCGv_i64 src)
3014427a 19{
d59d1182 20 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, false));
3014427a
BH
21}
22
00084a25
MCA
23static inline TCGv_ptr gen_vsr_ptr(int reg)
24{
25 TCGv_ptr r = tcg_temp_new_ptr();
26 tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
27 return r;
28}
29
3014427a
BH
30#define VSX_LOAD_SCALAR(name, operation) \
31static void gen_##name(DisasContext *ctx) \
32{ \
33 TCGv EA; \
8b3b2d75 34 TCGv_i64 t0; \
3014427a
BH
35 if (unlikely(!ctx->vsx_enabled)) { \
36 gen_exception(ctx, POWERPC_EXCP_VSXU); \
37 return; \
38 } \
8b3b2d75 39 t0 = tcg_temp_new_i64(); \
3014427a
BH
40 gen_set_access_type(ctx, ACCESS_INT); \
41 EA = tcg_temp_new(); \
42 gen_addr_reg_index(ctx, EA); \
8b3b2d75
MCA
43 gen_qemu_##operation(ctx, t0, EA); \
44 set_cpu_vsrh(xT(ctx->opcode), t0); \
3014427a
BH
45 /* NOTE: cpu_vsrl is undefined */ \
46 tcg_temp_free(EA); \
8b3b2d75 47 tcg_temp_free_i64(t0); \
3014427a
BH
48}
49
4f364fe7 50VSX_LOAD_SCALAR(lxsdx, ld64_i64)
3014427a 51VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
740ae9a2
ND
52VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
53VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
3014427a
BH
54VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
55VSX_LOAD_SCALAR(lxsspx, ld32fs)
56
57static void gen_lxvd2x(DisasContext *ctx)
58{
59 TCGv EA;
8b3b2d75 60 TCGv_i64 t0;
3014427a
BH
61 if (unlikely(!ctx->vsx_enabled)) {
62 gen_exception(ctx, POWERPC_EXCP_VSXU);
63 return;
64 }
8b3b2d75 65 t0 = tcg_temp_new_i64();
3014427a
BH
66 gen_set_access_type(ctx, ACCESS_INT);
67 EA = tcg_temp_new();
68 gen_addr_reg_index(ctx, EA);
8b3b2d75
MCA
69 gen_qemu_ld64_i64(ctx, t0, EA);
70 set_cpu_vsrh(xT(ctx->opcode), t0);
3014427a 71 tcg_gen_addi_tl(EA, EA, 8);
8b3b2d75
MCA
72 gen_qemu_ld64_i64(ctx, t0, EA);
73 set_cpu_vsrl(xT(ctx->opcode), t0);
3014427a 74 tcg_temp_free(EA);
8b3b2d75 75 tcg_temp_free_i64(t0);
3014427a
BH
76}
77
78static void gen_lxvdsx(DisasContext *ctx)
79{
80 TCGv EA;
8b3b2d75
MCA
81 TCGv_i64 t0;
82 TCGv_i64 t1;
3014427a
BH
83 if (unlikely(!ctx->vsx_enabled)) {
84 gen_exception(ctx, POWERPC_EXCP_VSXU);
85 return;
86 }
8b3b2d75
MCA
87 t0 = tcg_temp_new_i64();
88 t1 = tcg_temp_new_i64();
3014427a
BH
89 gen_set_access_type(ctx, ACCESS_INT);
90 EA = tcg_temp_new();
91 gen_addr_reg_index(ctx, EA);
8b3b2d75
MCA
92 gen_qemu_ld64_i64(ctx, t0, EA);
93 set_cpu_vsrh(xT(ctx->opcode), t0);
94 tcg_gen_mov_i64(t1, t0);
95 set_cpu_vsrl(xT(ctx->opcode), t1);
3014427a 96 tcg_temp_free(EA);
8b3b2d75
MCA
97 tcg_temp_free_i64(t0);
98 tcg_temp_free_i64(t1);
3014427a
BH
99}
100
101static void gen_lxvw4x(DisasContext *ctx)
102{
103 TCGv EA;
8b3b2d75
MCA
104 TCGv_i64 xth;
105 TCGv_i64 xtl;
3014427a
BH
106 if (unlikely(!ctx->vsx_enabled)) {
107 gen_exception(ctx, POWERPC_EXCP_VSXU);
108 return;
109 }
8b3b2d75
MCA
110 xth = tcg_temp_new_i64();
111 xtl = tcg_temp_new_i64();
2a122435 112
3014427a
BH
113 gen_set_access_type(ctx, ACCESS_INT);
114 EA = tcg_temp_new();
3014427a
BH
115
116 gen_addr_reg_index(ctx, EA);
f34001ec
ND
117 if (ctx->le_mode) {
118 TCGv_i64 t0 = tcg_temp_new_i64();
119 TCGv_i64 t1 = tcg_temp_new_i64();
3014427a 120
f34001ec
ND
121 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
122 tcg_gen_shri_i64(t1, t0, 32);
123 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
124 tcg_gen_addi_tl(EA, EA, 8);
125 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
126 tcg_gen_shri_i64(t1, t0, 32);
127 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
128 tcg_temp_free_i64(t0);
129 tcg_temp_free_i64(t1);
130 } else {
131 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
132 tcg_gen_addi_tl(EA, EA, 8);
133 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
134 }
2a122435
AB
135 set_cpu_vsrh(xT(ctx->opcode), xth);
136 set_cpu_vsrl(xT(ctx->opcode), xtl);
3014427a 137 tcg_temp_free(EA);
8b3b2d75
MCA
138 tcg_temp_free_i64(xth);
139 tcg_temp_free_i64(xtl);
3014427a
BH
140}
141
1c074419
ND
142static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
143 TCGv_i64 inh, TCGv_i64 inl)
144{
145 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
146 TCGv_i64 t0 = tcg_temp_new_i64();
147 TCGv_i64 t1 = tcg_temp_new_i64();
148
149 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
150 tcg_gen_and_i64(t0, inh, mask);
151 tcg_gen_shli_i64(t0, t0, 8);
152 tcg_gen_shri_i64(t1, inh, 8);
153 tcg_gen_and_i64(t1, t1, mask);
154 tcg_gen_or_i64(outh, t0, t1);
155
156 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
157 tcg_gen_and_i64(t0, inl, mask);
158 tcg_gen_shli_i64(t0, t0, 8);
159 tcg_gen_shri_i64(t1, inl, 8);
160 tcg_gen_and_i64(t1, t1, mask);
161 tcg_gen_or_i64(outl, t0, t1);
162
163 tcg_temp_free_i64(t0);
164 tcg_temp_free_i64(t1);
165 tcg_temp_free_i64(mask);
166}
167
14fd8ab2
ND
168static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
169 TCGv_i64 inh, TCGv_i64 inl)
170{
171 TCGv_i64 hi = tcg_temp_new_i64();
172 TCGv_i64 lo = tcg_temp_new_i64();
173
174 tcg_gen_bswap64_i64(hi, inh);
175 tcg_gen_bswap64_i64(lo, inl);
176 tcg_gen_shri_i64(outh, hi, 32);
177 tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
178 tcg_gen_shri_i64(outl, lo, 32);
179 tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
180
181 tcg_temp_free_i64(hi);
182 tcg_temp_free_i64(lo);
183}
1c074419
ND
184static void gen_lxvh8x(DisasContext *ctx)
185{
186 TCGv EA;
8b3b2d75
MCA
187 TCGv_i64 xth;
188 TCGv_i64 xtl;
1c074419
ND
189
190 if (unlikely(!ctx->vsx_enabled)) {
191 gen_exception(ctx, POWERPC_EXCP_VSXU);
192 return;
193 }
8b3b2d75
MCA
194 xth = tcg_temp_new_i64();
195 xtl = tcg_temp_new_i64();
1c074419
ND
196 gen_set_access_type(ctx, ACCESS_INT);
197
198 EA = tcg_temp_new();
199 gen_addr_reg_index(ctx, EA);
200 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
201 tcg_gen_addi_tl(EA, EA, 8);
202 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
203 if (ctx->le_mode) {
204 gen_bswap16x8(xth, xtl, xth, xtl);
205 }
2a122435
AB
206 set_cpu_vsrh(xT(ctx->opcode), xth);
207 set_cpu_vsrl(xT(ctx->opcode), xtl);
1c074419 208 tcg_temp_free(EA);
8b3b2d75
MCA
209 tcg_temp_free_i64(xth);
210 tcg_temp_free_i64(xtl);
1c074419
ND
211}
212
8ee38fac
ND
213static void gen_lxvb16x(DisasContext *ctx)
214{
215 TCGv EA;
8b3b2d75
MCA
216 TCGv_i64 xth;
217 TCGv_i64 xtl;
8ee38fac
ND
218
219 if (unlikely(!ctx->vsx_enabled)) {
220 gen_exception(ctx, POWERPC_EXCP_VSXU);
221 return;
222 }
8b3b2d75
MCA
223 xth = tcg_temp_new_i64();
224 xtl = tcg_temp_new_i64();
8ee38fac
ND
225 gen_set_access_type(ctx, ACCESS_INT);
226 EA = tcg_temp_new();
227 gen_addr_reg_index(ctx, EA);
228 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
229 tcg_gen_addi_tl(EA, EA, 8);
230 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
2a122435
AB
231 set_cpu_vsrh(xT(ctx->opcode), xth);
232 set_cpu_vsrl(xT(ctx->opcode), xtl);
8ee38fac 233 tcg_temp_free(EA);
8b3b2d75
MCA
234 tcg_temp_free_i64(xth);
235 tcg_temp_free_i64(xtl);
8ee38fac
ND
236}
237
e04c5dd1 238#define VSX_VECTOR_LOAD(name, op, indexed) \
d59ba583
ND
239static void gen_##name(DisasContext *ctx) \
240{ \
241 int xt; \
242 TCGv EA; \
8b3b2d75
MCA
243 TCGv_i64 xth; \
244 TCGv_i64 xtl; \
d59ba583
ND
245 \
246 if (indexed) { \
247 xt = xT(ctx->opcode); \
248 } else { \
249 xt = DQxT(ctx->opcode); \
250 } \
d59ba583
ND
251 \
252 if (xt < 32) { \
253 if (unlikely(!ctx->vsx_enabled)) { \
254 gen_exception(ctx, POWERPC_EXCP_VSXU); \
255 return; \
256 } \
257 } else { \
258 if (unlikely(!ctx->altivec_enabled)) { \
259 gen_exception(ctx, POWERPC_EXCP_VPU); \
260 return; \
261 } \
262 } \
8b3b2d75
MCA
263 xth = tcg_temp_new_i64(); \
264 xtl = tcg_temp_new_i64(); \
d59ba583
ND
265 gen_set_access_type(ctx, ACCESS_INT); \
266 EA = tcg_temp_new(); \
267 if (indexed) { \
268 gen_addr_reg_index(ctx, EA); \
269 } else { \
270 gen_addr_imm_index(ctx, EA, 0x0F); \
271 } \
272 if (ctx->le_mode) { \
273 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
8b3b2d75 274 set_cpu_vsrl(xt, xtl); \
d59ba583
ND
275 tcg_gen_addi_tl(EA, EA, 8); \
276 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
8b3b2d75 277 set_cpu_vsrh(xt, xth); \
d59ba583
ND
278 } else { \
279 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
8b3b2d75 280 set_cpu_vsrh(xt, xth); \
d59ba583
ND
281 tcg_gen_addi_tl(EA, EA, 8); \
282 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
8b3b2d75 283 set_cpu_vsrl(xt, xtl); \
d59ba583
ND
284 } \
285 tcg_temp_free(EA); \
8b3b2d75
MCA
286 tcg_temp_free_i64(xth); \
287 tcg_temp_free_i64(xtl); \
d59ba583
ND
288}
289
e04c5dd1
AB
290VSX_VECTOR_LOAD(lxv, ld_i64, 0)
291VSX_VECTOR_LOAD(lxvx, ld_i64, 1)
292
293#define VSX_VECTOR_STORE(name, op, indexed) \
294static void gen_##name(DisasContext *ctx) \
295{ \
296 int xt; \
297 TCGv EA; \
298 TCGv_i64 xth; \
299 TCGv_i64 xtl; \
300 \
301 if (indexed) { \
302 xt = xT(ctx->opcode); \
303 } else { \
304 xt = DQxT(ctx->opcode); \
305 } \
306 \
307 if (xt < 32) { \
308 if (unlikely(!ctx->vsx_enabled)) { \
309 gen_exception(ctx, POWERPC_EXCP_VSXU); \
310 return; \
311 } \
312 } else { \
313 if (unlikely(!ctx->altivec_enabled)) { \
314 gen_exception(ctx, POWERPC_EXCP_VPU); \
315 return; \
316 } \
317 } \
318 xth = tcg_temp_new_i64(); \
319 xtl = tcg_temp_new_i64(); \
320 get_cpu_vsrh(xth, xt); \
321 get_cpu_vsrl(xtl, xt); \
322 gen_set_access_type(ctx, ACCESS_INT); \
323 EA = tcg_temp_new(); \
324 if (indexed) { \
325 gen_addr_reg_index(ctx, EA); \
326 } else { \
327 gen_addr_imm_index(ctx, EA, 0x0F); \
328 } \
329 if (ctx->le_mode) { \
330 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
331 tcg_gen_addi_tl(EA, EA, 8); \
332 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
333 } else { \
334 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
335 tcg_gen_addi_tl(EA, EA, 8); \
336 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
337 } \
338 tcg_temp_free(EA); \
339 tcg_temp_free_i64(xth); \
340 tcg_temp_free_i64(xtl); \
341}
342
343VSX_VECTOR_STORE(stxv, st_i64, 0)
344VSX_VECTOR_STORE(stxvx, st_i64, 1)
d59ba583 345
6914bc4f 346#ifdef TARGET_PPC64
2aba168e
MCA
347#define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
348static void gen_##name(DisasContext *ctx) \
349{ \
350 TCGv EA; \
351 TCGv_ptr xt; \
352 \
353 if (xT(ctx->opcode) < 32) { \
354 if (unlikely(!ctx->vsx_enabled)) { \
355 gen_exception(ctx, POWERPC_EXCP_VSXU); \
356 return; \
357 } \
358 } else { \
359 if (unlikely(!ctx->altivec_enabled)) { \
360 gen_exception(ctx, POWERPC_EXCP_VPU); \
361 return; \
362 } \
363 } \
364 EA = tcg_temp_new(); \
365 xt = gen_vsr_ptr(xT(ctx->opcode)); \
366 gen_set_access_type(ctx, ACCESS_INT); \
367 gen_addr_register(ctx, EA); \
368 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
369 tcg_temp_free(EA); \
370 tcg_temp_free_ptr(xt); \
6914bc4f
ND
371}
372
373VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
176e44e7 374VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
681c2478 375VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
e122090d 376VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
6914bc4f
ND
377#endif
378
5cb091a4
ND
379#define VSX_LOAD_SCALAR_DS(name, operation) \
380static void gen_##name(DisasContext *ctx) \
381{ \
382 TCGv EA; \
8b3b2d75 383 TCGv_i64 xth; \
5cb091a4
ND
384 \
385 if (unlikely(!ctx->altivec_enabled)) { \
386 gen_exception(ctx, POWERPC_EXCP_VPU); \
387 return; \
388 } \
8b3b2d75 389 xth = tcg_temp_new_i64(); \
5cb091a4
ND
390 gen_set_access_type(ctx, ACCESS_INT); \
391 EA = tcg_temp_new(); \
392 gen_addr_imm_index(ctx, EA, 0x03); \
393 gen_qemu_##operation(ctx, xth, EA); \
8b3b2d75 394 set_cpu_vsrh(rD(ctx->opcode) + 32, xth); \
5cb091a4
ND
395 /* NOTE: cpu_vsrl is undefined */ \
396 tcg_temp_free(EA); \
8b3b2d75 397 tcg_temp_free_i64(xth); \
5cb091a4
ND
398}
399
400VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
401VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
402
3014427a
BH
403#define VSX_STORE_SCALAR(name, operation) \
404static void gen_##name(DisasContext *ctx) \
405{ \
406 TCGv EA; \
8b3b2d75 407 TCGv_i64 t0; \
3014427a
BH
408 if (unlikely(!ctx->vsx_enabled)) { \
409 gen_exception(ctx, POWERPC_EXCP_VSXU); \
410 return; \
411 } \
8b3b2d75 412 t0 = tcg_temp_new_i64(); \
3014427a
BH
413 gen_set_access_type(ctx, ACCESS_INT); \
414 EA = tcg_temp_new(); \
415 gen_addr_reg_index(ctx, EA); \
3e5365b7 416 get_cpu_vsrh(t0, xS(ctx->opcode)); \
8b3b2d75 417 gen_qemu_##operation(ctx, t0, EA); \
3014427a 418 tcg_temp_free(EA); \
8b3b2d75 419 tcg_temp_free_i64(t0); \
3014427a
BH
420}
421
2468f23d 422VSX_STORE_SCALAR(stxsdx, st64_i64)
ddb9ac50
ND
423
424VSX_STORE_SCALAR(stxsibx, st8_i64)
425VSX_STORE_SCALAR(stxsihx, st16_i64)
3014427a
BH
426VSX_STORE_SCALAR(stxsiwx, st32_i64)
427VSX_STORE_SCALAR(stxsspx, st32fs)
428
429static void gen_stxvd2x(DisasContext *ctx)
430{
431 TCGv EA;
8b3b2d75 432 TCGv_i64 t0;
3014427a
BH
433 if (unlikely(!ctx->vsx_enabled)) {
434 gen_exception(ctx, POWERPC_EXCP_VSXU);
435 return;
436 }
8b3b2d75 437 t0 = tcg_temp_new_i64();
3014427a
BH
438 gen_set_access_type(ctx, ACCESS_INT);
439 EA = tcg_temp_new();
440 gen_addr_reg_index(ctx, EA);
8b3b2d75
MCA
441 get_cpu_vsrh(t0, xS(ctx->opcode));
442 gen_qemu_st64_i64(ctx, t0, EA);
3014427a 443 tcg_gen_addi_tl(EA, EA, 8);
8b3b2d75
MCA
444 get_cpu_vsrl(t0, xS(ctx->opcode));
445 gen_qemu_st64_i64(ctx, t0, EA);
3014427a 446 tcg_temp_free(EA);
8b3b2d75 447 tcg_temp_free_i64(t0);
3014427a
BH
448}
449
450static void gen_stxvw4x(DisasContext *ctx)
451{
3014427a 452 TCGv EA;
8b3b2d75
MCA
453 TCGv_i64 xsh;
454 TCGv_i64 xsl;
455
3014427a
BH
456 if (unlikely(!ctx->vsx_enabled)) {
457 gen_exception(ctx, POWERPC_EXCP_VSXU);
458 return;
459 }
8b3b2d75
MCA
460 xsh = tcg_temp_new_i64();
461 xsl = tcg_temp_new_i64();
462 get_cpu_vsrh(xsh, xS(ctx->opcode));
463 get_cpu_vsrl(xsl, xS(ctx->opcode));
3014427a
BH
464 gen_set_access_type(ctx, ACCESS_INT);
465 EA = tcg_temp_new();
466 gen_addr_reg_index(ctx, EA);
0aec21d8
ND
467 if (ctx->le_mode) {
468 TCGv_i64 t0 = tcg_temp_new_i64();
469 TCGv_i64 t1 = tcg_temp_new_i64();
3014427a 470
0aec21d8
ND
471 tcg_gen_shri_i64(t0, xsh, 32);
472 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
473 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
474 tcg_gen_addi_tl(EA, EA, 8);
475 tcg_gen_shri_i64(t0, xsl, 32);
476 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
477 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
478 tcg_temp_free_i64(t0);
479 tcg_temp_free_i64(t1);
480 } else {
481 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
482 tcg_gen_addi_tl(EA, EA, 8);
0b8ac648
ND
483 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
484 }
485 tcg_temp_free(EA);
8b3b2d75
MCA
486 tcg_temp_free_i64(xsh);
487 tcg_temp_free_i64(xsl);
0b8ac648
ND
488}
489
490static void gen_stxvh8x(DisasContext *ctx)
491{
0b8ac648 492 TCGv EA;
8b3b2d75
MCA
493 TCGv_i64 xsh;
494 TCGv_i64 xsl;
0b8ac648
ND
495
496 if (unlikely(!ctx->vsx_enabled)) {
497 gen_exception(ctx, POWERPC_EXCP_VSXU);
498 return;
499 }
8b3b2d75
MCA
500 xsh = tcg_temp_new_i64();
501 xsl = tcg_temp_new_i64();
502 get_cpu_vsrh(xsh, xS(ctx->opcode));
503 get_cpu_vsrl(xsl, xS(ctx->opcode));
0b8ac648
ND
504 gen_set_access_type(ctx, ACCESS_INT);
505 EA = tcg_temp_new();
506 gen_addr_reg_index(ctx, EA);
507 if (ctx->le_mode) {
508 TCGv_i64 outh = tcg_temp_new_i64();
509 TCGv_i64 outl = tcg_temp_new_i64();
510
511 gen_bswap16x8(outh, outl, xsh, xsl);
512 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
513 tcg_gen_addi_tl(EA, EA, 8);
514 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
515 tcg_temp_free_i64(outh);
516 tcg_temp_free_i64(outl);
517 } else {
518 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
519 tcg_gen_addi_tl(EA, EA, 8);
0aec21d8
ND
520 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
521 }
3014427a 522 tcg_temp_free(EA);
8b3b2d75
MCA
523 tcg_temp_free_i64(xsh);
524 tcg_temp_free_i64(xsl);
3014427a
BH
525}
526
f3333ce0
ND
527static void gen_stxvb16x(DisasContext *ctx)
528{
f3333ce0 529 TCGv EA;
8b3b2d75
MCA
530 TCGv_i64 xsh;
531 TCGv_i64 xsl;
f3333ce0
ND
532
533 if (unlikely(!ctx->vsx_enabled)) {
534 gen_exception(ctx, POWERPC_EXCP_VSXU);
535 return;
536 }
8b3b2d75
MCA
537 xsh = tcg_temp_new_i64();
538 xsl = tcg_temp_new_i64();
539 get_cpu_vsrh(xsh, xS(ctx->opcode));
540 get_cpu_vsrl(xsl, xS(ctx->opcode));
f3333ce0
ND
541 gen_set_access_type(ctx, ACCESS_INT);
542 EA = tcg_temp_new();
543 gen_addr_reg_index(ctx, EA);
544 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
545 tcg_gen_addi_tl(EA, EA, 8);
546 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
547 tcg_temp_free(EA);
8b3b2d75
MCA
548 tcg_temp_free_i64(xsh);
549 tcg_temp_free_i64(xsl);
f3333ce0
ND
550}
551
e3001664
ND
552#define VSX_STORE_SCALAR_DS(name, operation) \
553static void gen_##name(DisasContext *ctx) \
554{ \
555 TCGv EA; \
8b3b2d75 556 TCGv_i64 xth; \
e3001664
ND
557 \
558 if (unlikely(!ctx->altivec_enabled)) { \
559 gen_exception(ctx, POWERPC_EXCP_VPU); \
560 return; \
561 } \
8b3b2d75
MCA
562 xth = tcg_temp_new_i64(); \
563 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
e3001664
ND
564 gen_set_access_type(ctx, ACCESS_INT); \
565 EA = tcg_temp_new(); \
566 gen_addr_imm_index(ctx, EA, 0x03); \
567 gen_qemu_##operation(ctx, xth, EA); \
568 /* NOTE: cpu_vsrl is undefined */ \
569 tcg_temp_free(EA); \
8b3b2d75 570 tcg_temp_free_i64(xth); \
e3001664
ND
571}
572
e04c5dd1
AB
573VSX_STORE_SCALAR_DS(stxsd, st64_i64)
574VSX_STORE_SCALAR_DS(stxssp, st32fs)
e3001664 575
8b3b2d75
MCA
576static void gen_mfvsrwz(DisasContext *ctx)
577{
578 if (xS(ctx->opcode) < 32) {
579 if (unlikely(!ctx->fpu_enabled)) {
580 gen_exception(ctx, POWERPC_EXCP_FPU);
581 return;
582 }
583 } else {
584 if (unlikely(!ctx->altivec_enabled)) {
585 gen_exception(ctx, POWERPC_EXCP_VPU);
586 return;
587 }
588 }
589 TCGv_i64 tmp = tcg_temp_new_i64();
590 TCGv_i64 xsh = tcg_temp_new_i64();
591 get_cpu_vsrh(xsh, xS(ctx->opcode));
592 tcg_gen_ext32u_i64(tmp, xsh);
593 tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
594 tcg_temp_free_i64(tmp);
595 tcg_temp_free_i64(xsh);
3014427a
BH
596}
597
8b3b2d75
MCA
598static void gen_mtvsrwa(DisasContext *ctx)
599{
600 if (xS(ctx->opcode) < 32) {
601 if (unlikely(!ctx->fpu_enabled)) {
602 gen_exception(ctx, POWERPC_EXCP_FPU);
603 return;
604 }
605 } else {
606 if (unlikely(!ctx->altivec_enabled)) {
607 gen_exception(ctx, POWERPC_EXCP_VPU);
608 return;
609 }
610 }
611 TCGv_i64 tmp = tcg_temp_new_i64();
612 TCGv_i64 xsh = tcg_temp_new_i64();
613 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
614 tcg_gen_ext32s_i64(xsh, tmp);
615 set_cpu_vsrh(xT(ctx->opcode), xsh);
616 tcg_temp_free_i64(tmp);
617 tcg_temp_free_i64(xsh);
618}
3014427a 619
8b3b2d75
MCA
620static void gen_mtvsrwz(DisasContext *ctx)
621{
622 if (xS(ctx->opcode) < 32) {
623 if (unlikely(!ctx->fpu_enabled)) {
624 gen_exception(ctx, POWERPC_EXCP_FPU);
625 return;
626 }
627 } else {
628 if (unlikely(!ctx->altivec_enabled)) {
629 gen_exception(ctx, POWERPC_EXCP_VPU);
630 return;
631 }
632 }
633 TCGv_i64 tmp = tcg_temp_new_i64();
634 TCGv_i64 xsh = tcg_temp_new_i64();
635 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
636 tcg_gen_ext32u_i64(xsh, tmp);
637 set_cpu_vsrh(xT(ctx->opcode), xsh);
638 tcg_temp_free_i64(tmp);
639 tcg_temp_free_i64(xsh);
640}
3014427a
BH
641
642#if defined(TARGET_PPC64)
8b3b2d75
MCA
643static void gen_mfvsrd(DisasContext *ctx)
644{
645 TCGv_i64 t0;
646 if (xS(ctx->opcode) < 32) {
647 if (unlikely(!ctx->fpu_enabled)) {
648 gen_exception(ctx, POWERPC_EXCP_FPU);
649 return;
650 }
651 } else {
652 if (unlikely(!ctx->altivec_enabled)) {
653 gen_exception(ctx, POWERPC_EXCP_VPU);
654 return;
655 }
656 }
657 t0 = tcg_temp_new_i64();
658 get_cpu_vsrh(t0, xS(ctx->opcode));
659 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
660 tcg_temp_free_i64(t0);
3014427a
BH
661}
662
8b3b2d75
MCA
663static void gen_mtvsrd(DisasContext *ctx)
664{
665 TCGv_i64 t0;
666 if (xS(ctx->opcode) < 32) {
667 if (unlikely(!ctx->fpu_enabled)) {
668 gen_exception(ctx, POWERPC_EXCP_FPU);
669 return;
670 }
671 } else {
672 if (unlikely(!ctx->altivec_enabled)) {
673 gen_exception(ctx, POWERPC_EXCP_VPU);
674 return;
675 }
676 }
677 t0 = tcg_temp_new_i64();
678 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
679 set_cpu_vsrh(xT(ctx->opcode), t0);
680 tcg_temp_free_i64(t0);
681}
3014427a 682
63583202
RB
683static void gen_mfvsrld(DisasContext *ctx)
684{
8b3b2d75 685 TCGv_i64 t0;
63583202
RB
686 if (xS(ctx->opcode) < 32) {
687 if (unlikely(!ctx->vsx_enabled)) {
688 gen_exception(ctx, POWERPC_EXCP_VSXU);
689 return;
690 }
691 } else {
692 if (unlikely(!ctx->altivec_enabled)) {
693 gen_exception(ctx, POWERPC_EXCP_VPU);
694 return;
695 }
696 }
8b3b2d75
MCA
697 t0 = tcg_temp_new_i64();
698 get_cpu_vsrl(t0, xS(ctx->opcode));
699 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
700 tcg_temp_free_i64(t0);
63583202
RB
701}
702
b9731075
RB
703static void gen_mtvsrdd(DisasContext *ctx)
704{
8b3b2d75 705 TCGv_i64 t0;
b9731075
RB
706 if (xT(ctx->opcode) < 32) {
707 if (unlikely(!ctx->vsx_enabled)) {
708 gen_exception(ctx, POWERPC_EXCP_VSXU);
709 return;
710 }
711 } else {
712 if (unlikely(!ctx->altivec_enabled)) {
713 gen_exception(ctx, POWERPC_EXCP_VPU);
714 return;
715 }
716 }
717
8b3b2d75 718 t0 = tcg_temp_new_i64();
b9731075 719 if (!rA(ctx->opcode)) {
8b3b2d75 720 tcg_gen_movi_i64(t0, 0);
b9731075 721 } else {
8b3b2d75 722 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
b9731075 723 }
8b3b2d75 724 set_cpu_vsrh(xT(ctx->opcode), t0);
b9731075 725
8b3b2d75
MCA
726 tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
727 set_cpu_vsrl(xT(ctx->opcode), t0);
728 tcg_temp_free_i64(t0);
b9731075
RB
729}
730
1a136cdc
RB
731static void gen_mtvsrws(DisasContext *ctx)
732{
8b3b2d75 733 TCGv_i64 t0;
1a136cdc
RB
734 if (xT(ctx->opcode) < 32) {
735 if (unlikely(!ctx->vsx_enabled)) {
736 gen_exception(ctx, POWERPC_EXCP_VSXU);
737 return;
738 }
739 } else {
740 if (unlikely(!ctx->altivec_enabled)) {
741 gen_exception(ctx, POWERPC_EXCP_VPU);
742 return;
743 }
744 }
745
8b3b2d75
MCA
746 t0 = tcg_temp_new_i64();
747 tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
1a136cdc 748 cpu_gpr[rA(ctx->opcode)], 32, 32);
8b3b2d75
MCA
749 set_cpu_vsrl(xT(ctx->opcode), t0);
750 set_cpu_vsrh(xT(ctx->opcode), t0);
751 tcg_temp_free_i64(t0);
1a136cdc
RB
752}
753
3014427a
BH
754#endif
755
756static void gen_xxpermdi(DisasContext *ctx)
757{
8b3b2d75
MCA
758 TCGv_i64 xh, xl;
759
3014427a
BH
760 if (unlikely(!ctx->vsx_enabled)) {
761 gen_exception(ctx, POWERPC_EXCP_VSXU);
762 return;
763 }
764
8b3b2d75
MCA
765 xh = tcg_temp_new_i64();
766 xl = tcg_temp_new_i64();
767
3014427a
BH
768 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
769 (xT(ctx->opcode) == xB(ctx->opcode)))) {
3014427a 770 if ((DM(ctx->opcode) & 2) == 0) {
8b3b2d75 771 get_cpu_vsrh(xh, xA(ctx->opcode));
3014427a 772 } else {
8b3b2d75 773 get_cpu_vsrl(xh, xA(ctx->opcode));
3014427a
BH
774 }
775 if ((DM(ctx->opcode) & 1) == 0) {
8b3b2d75 776 get_cpu_vsrh(xl, xB(ctx->opcode));
3014427a 777 } else {
8b3b2d75 778 get_cpu_vsrl(xl, xB(ctx->opcode));
3014427a
BH
779 }
780
8b3b2d75
MCA
781 set_cpu_vsrh(xT(ctx->opcode), xh);
782 set_cpu_vsrl(xT(ctx->opcode), xl);
3014427a
BH
783 } else {
784 if ((DM(ctx->opcode) & 2) == 0) {
8b3b2d75
MCA
785 get_cpu_vsrh(xh, xA(ctx->opcode));
786 set_cpu_vsrh(xT(ctx->opcode), xh);
3014427a 787 } else {
8b3b2d75
MCA
788 get_cpu_vsrl(xh, xA(ctx->opcode));
789 set_cpu_vsrh(xT(ctx->opcode), xh);
3014427a
BH
790 }
791 if ((DM(ctx->opcode) & 1) == 0) {
8b3b2d75
MCA
792 get_cpu_vsrh(xl, xB(ctx->opcode));
793 set_cpu_vsrl(xT(ctx->opcode), xl);
3014427a 794 } else {
8b3b2d75
MCA
795 get_cpu_vsrl(xl, xB(ctx->opcode));
796 set_cpu_vsrl(xT(ctx->opcode), xl);
3014427a
BH
797 }
798 }
8b3b2d75
MCA
799 tcg_temp_free_i64(xh);
800 tcg_temp_free_i64(xl);
3014427a
BH
801}
802
803#define OP_ABS 1
804#define OP_NABS 2
805#define OP_NEG 3
806#define OP_CPSGN 4
807#define SGN_MASK_DP 0x8000000000000000ull
808#define SGN_MASK_SP 0x8000000080000000ull
809
810#define VSX_SCALAR_MOVE(name, op, sgn_mask) \
34b2300c 811static void glue(gen_, name)(DisasContext *ctx) \
3014427a
BH
812 { \
813 TCGv_i64 xb, sgm; \
814 if (unlikely(!ctx->vsx_enabled)) { \
815 gen_exception(ctx, POWERPC_EXCP_VSXU); \
816 return; \
817 } \
818 xb = tcg_temp_new_i64(); \
819 sgm = tcg_temp_new_i64(); \
8b3b2d75 820 get_cpu_vsrh(xb, xB(ctx->opcode)); \
3014427a
BH
821 tcg_gen_movi_i64(sgm, sgn_mask); \
822 switch (op) { \
823 case OP_ABS: { \
824 tcg_gen_andc_i64(xb, xb, sgm); \
825 break; \
826 } \
827 case OP_NABS: { \
828 tcg_gen_or_i64(xb, xb, sgm); \
829 break; \
830 } \
831 case OP_NEG: { \
832 tcg_gen_xor_i64(xb, xb, sgm); \
833 break; \
834 } \
835 case OP_CPSGN: { \
836 TCGv_i64 xa = tcg_temp_new_i64(); \
8b3b2d75 837 get_cpu_vsrh(xa, xA(ctx->opcode)); \
3014427a
BH
838 tcg_gen_and_i64(xa, xa, sgm); \
839 tcg_gen_andc_i64(xb, xb, sgm); \
840 tcg_gen_or_i64(xb, xb, xa); \
841 tcg_temp_free_i64(xa); \
842 break; \
843 } \
844 } \
8b3b2d75 845 set_cpu_vsrh(xT(ctx->opcode), xb); \
3014427a
BH
846 tcg_temp_free_i64(xb); \
847 tcg_temp_free_i64(sgm); \
848 }
849
850VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
851VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
852VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
853VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
854
3259dbd9
DG
855#define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
856static void glue(gen_, name)(DisasContext *ctx) \
857{ \
014ed3bb 858 int xa; \
3259dbd9
DG
859 int xt = rD(ctx->opcode) + 32; \
860 int xb = rB(ctx->opcode) + 32; \
8b3b2d75 861 TCGv_i64 xah, xbh, xbl, sgm, tmp; \
3259dbd9
DG
862 \
863 if (unlikely(!ctx->vsx_enabled)) { \
864 gen_exception(ctx, POWERPC_EXCP_VSXU); \
865 return; \
866 } \
867 xbh = tcg_temp_new_i64(); \
868 xbl = tcg_temp_new_i64(); \
869 sgm = tcg_temp_new_i64(); \
8b3b2d75
MCA
870 tmp = tcg_temp_new_i64(); \
871 get_cpu_vsrh(xbh, xb); \
872 get_cpu_vsrl(xbl, xb); \
3259dbd9
DG
873 tcg_gen_movi_i64(sgm, sgn_mask); \
874 switch (op) { \
875 case OP_ABS: \
876 tcg_gen_andc_i64(xbh, xbh, sgm); \
877 break; \
878 case OP_NABS: \
879 tcg_gen_or_i64(xbh, xbh, sgm); \
880 break; \
8497d7fc
ND
881 case OP_NEG: \
882 tcg_gen_xor_i64(xbh, xbh, sgm); \
883 break; \
014ed3bb
ND
884 case OP_CPSGN: \
885 xah = tcg_temp_new_i64(); \
886 xa = rA(ctx->opcode) + 32; \
8b3b2d75
MCA
887 get_cpu_vsrh(tmp, xa); \
888 tcg_gen_and_i64(xah, tmp, sgm); \
014ed3bb
ND
889 tcg_gen_andc_i64(xbh, xbh, sgm); \
890 tcg_gen_or_i64(xbh, xbh, xah); \
891 tcg_temp_free_i64(xah); \
892 break; \
3259dbd9 893 } \
8b3b2d75
MCA
894 set_cpu_vsrh(xt, xbh); \
895 set_cpu_vsrl(xt, xbl); \
3259dbd9
DG
896 tcg_temp_free_i64(xbl); \
897 tcg_temp_free_i64(xbh); \
898 tcg_temp_free_i64(sgm); \
8b3b2d75 899 tcg_temp_free_i64(tmp); \
3259dbd9
DG
900}
901
902VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
903VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
8497d7fc 904VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
014ed3bb 905VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
3259dbd9 906
3014427a 907#define VSX_VECTOR_MOVE(name, op, sgn_mask) \
34b2300c 908static void glue(gen_, name)(DisasContext *ctx) \
3014427a
BH
909 { \
910 TCGv_i64 xbh, xbl, sgm; \
911 if (unlikely(!ctx->vsx_enabled)) { \
912 gen_exception(ctx, POWERPC_EXCP_VSXU); \
913 return; \
914 } \
915 xbh = tcg_temp_new_i64(); \
916 xbl = tcg_temp_new_i64(); \
917 sgm = tcg_temp_new_i64(); \
77bd8937
AB
918 get_cpu_vsrh(xbh, xB(ctx->opcode)); \
919 get_cpu_vsrl(xbl, xB(ctx->opcode)); \
3014427a
BH
920 tcg_gen_movi_i64(sgm, sgn_mask); \
921 switch (op) { \
922 case OP_ABS: { \
923 tcg_gen_andc_i64(xbh, xbh, sgm); \
924 tcg_gen_andc_i64(xbl, xbl, sgm); \
925 break; \
926 } \
927 case OP_NABS: { \
928 tcg_gen_or_i64(xbh, xbh, sgm); \
929 tcg_gen_or_i64(xbl, xbl, sgm); \
930 break; \
931 } \
932 case OP_NEG: { \
933 tcg_gen_xor_i64(xbh, xbh, sgm); \
934 tcg_gen_xor_i64(xbl, xbl, sgm); \
935 break; \
936 } \
937 case OP_CPSGN: { \
938 TCGv_i64 xah = tcg_temp_new_i64(); \
939 TCGv_i64 xal = tcg_temp_new_i64(); \
8b3b2d75
MCA
940 get_cpu_vsrh(xah, xA(ctx->opcode)); \
941 get_cpu_vsrl(xal, xA(ctx->opcode)); \
3014427a
BH
942 tcg_gen_and_i64(xah, xah, sgm); \
943 tcg_gen_and_i64(xal, xal, sgm); \
944 tcg_gen_andc_i64(xbh, xbh, sgm); \
945 tcg_gen_andc_i64(xbl, xbl, sgm); \
946 tcg_gen_or_i64(xbh, xbh, xah); \
947 tcg_gen_or_i64(xbl, xbl, xal); \
948 tcg_temp_free_i64(xah); \
949 tcg_temp_free_i64(xal); \
950 break; \
951 } \
952 } \
8b3b2d75
MCA
953 set_cpu_vsrh(xT(ctx->opcode), xbh); \
954 set_cpu_vsrl(xT(ctx->opcode), xbl); \
3014427a
BH
955 tcg_temp_free_i64(xbh); \
956 tcg_temp_free_i64(xbl); \
957 tcg_temp_free_i64(sgm); \
958 }
959
960VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
961VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
962VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
963VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
964VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
965VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
966VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
967VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
968
00084a25
MCA
969#define VSX_CMP(name, op1, op2, inval, type) \
970static void gen_##name(DisasContext *ctx) \
971{ \
972 TCGv_i32 ignored; \
973 TCGv_ptr xt, xa, xb; \
974 if (unlikely(!ctx->vsx_enabled)) { \
975 gen_exception(ctx, POWERPC_EXCP_VSXU); \
976 return; \
977 } \
978 xt = gen_vsr_ptr(xT(ctx->opcode)); \
979 xa = gen_vsr_ptr(xA(ctx->opcode)); \
980 xb = gen_vsr_ptr(xB(ctx->opcode)); \
981 if ((ctx->opcode >> (31 - 21)) & 1) { \
982 gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb); \
983 } else { \
984 ignored = tcg_temp_new_i32(); \
985 gen_helper_##name(ignored, cpu_env, xt, xa, xb); \
986 tcg_temp_free_i32(ignored); \
987 } \
988 gen_helper_float_check_status(cpu_env); \
989 tcg_temp_free_ptr(xt); \
990 tcg_temp_free_ptr(xa); \
991 tcg_temp_free_ptr(xb); \
992}
993
994VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
995VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
996VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
997VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
998VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
999VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
1000VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
1001VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
1002
e0d6a362
MCA
1003static void gen_xscvqpdp(DisasContext *ctx)
1004{
1005 TCGv_i32 opc;
1006 TCGv_ptr xt, xb;
1007 if (unlikely(!ctx->vsx_enabled)) {
1008 gen_exception(ctx, POWERPC_EXCP_VSXU);
1009 return;
1010 }
1011 opc = tcg_const_i32(ctx->opcode);
1012 xt = gen_vsr_ptr(xT(ctx->opcode));
1013 xb = gen_vsr_ptr(xB(ctx->opcode));
1014 gen_helper_xscvqpdp(cpu_env, opc, xt, xb);
1015 tcg_temp_free_i32(opc);
1016 tcg_temp_free_ptr(xt);
1017 tcg_temp_free_ptr(xb);
1018}
1019
3014427a 1020#define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
34b2300c 1021static void gen_##name(DisasContext *ctx) \
3014427a
BH
1022{ \
1023 TCGv_i32 opc; \
1024 if (unlikely(!ctx->vsx_enabled)) { \
1025 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1026 return; \
1027 } \
3014427a
BH
1028 opc = tcg_const_i32(ctx->opcode); \
1029 gen_helper_##name(cpu_env, opc); \
1030 tcg_temp_free_i32(opc); \
1031}
1032
99125c74
MCA
1033#define GEN_VSX_HELPER_X3(name, op1, op2, inval, type) \
1034static void gen_##name(DisasContext *ctx) \
1035{ \
1036 TCGv_ptr xt, xa, xb; \
1037 if (unlikely(!ctx->vsx_enabled)) { \
1038 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1039 return; \
1040 } \
1041 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1042 xa = gen_vsr_ptr(xA(ctx->opcode)); \
1043 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1044 gen_helper_##name(cpu_env, xt, xa, xb); \
1045 tcg_temp_free_ptr(xt); \
1046 tcg_temp_free_ptr(xa); \
1047 tcg_temp_free_ptr(xb); \
1048}
1049
75cf84cb
MCA
1050#define GEN_VSX_HELPER_X2(name, op1, op2, inval, type) \
1051static void gen_##name(DisasContext *ctx) \
1052{ \
1053 TCGv_ptr xt, xb; \
1054 if (unlikely(!ctx->vsx_enabled)) { \
1055 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1056 return; \
1057 } \
1058 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1059 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1060 gen_helper_##name(cpu_env, xt, xb); \
1061 tcg_temp_free_ptr(xt); \
1062 tcg_temp_free_ptr(xb); \
1063}
1064
033e1fcd
MCA
1065#define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type) \
1066static void gen_##name(DisasContext *ctx) \
1067{ \
1068 TCGv_i32 opc; \
1069 TCGv_ptr xa, xb; \
1070 if (unlikely(!ctx->vsx_enabled)) { \
1071 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1072 return; \
1073 } \
1074 opc = tcg_const_i32(ctx->opcode); \
1075 xa = gen_vsr_ptr(xA(ctx->opcode)); \
1076 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1077 gen_helper_##name(cpu_env, opc, xa, xb); \
1078 tcg_temp_free_i32(opc); \
1079 tcg_temp_free_ptr(xa); \
1080 tcg_temp_free_ptr(xb); \
1081}
1082
8d830485
MCA
1083#define GEN_VSX_HELPER_X1(name, op1, op2, inval, type) \
1084static void gen_##name(DisasContext *ctx) \
1085{ \
1086 TCGv_i32 opc; \
1087 TCGv_ptr xb; \
1088 if (unlikely(!ctx->vsx_enabled)) { \
1089 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1090 return; \
1091 } \
1092 opc = tcg_const_i32(ctx->opcode); \
1093 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1094 gen_helper_##name(cpu_env, opc, xb); \
1095 tcg_temp_free_i32(opc); \
1096 tcg_temp_free_ptr(xb); \
1097}
1098
23d0766b
MCA
1099#define GEN_VSX_HELPER_R3(name, op1, op2, inval, type) \
1100static void gen_##name(DisasContext *ctx) \
1101{ \
1102 TCGv_i32 opc; \
1103 TCGv_ptr xt, xa, xb; \
1104 if (unlikely(!ctx->vsx_enabled)) { \
1105 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1106 return; \
1107 } \
1108 opc = tcg_const_i32(ctx->opcode); \
1109 xt = gen_vsr_ptr(rD(ctx->opcode) + 32); \
1110 xa = gen_vsr_ptr(rA(ctx->opcode) + 32); \
1111 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \
1112 gen_helper_##name(cpu_env, opc, xt, xa, xb); \
1113 tcg_temp_free_i32(opc); \
1114 tcg_temp_free_ptr(xt); \
1115 tcg_temp_free_ptr(xa); \
1116 tcg_temp_free_ptr(xb); \
1117}
1118
99229620
MCA
1119#define GEN_VSX_HELPER_R2(name, op1, op2, inval, type) \
1120static void gen_##name(DisasContext *ctx) \
1121{ \
1122 TCGv_i32 opc; \
1123 TCGv_ptr xt, xb; \
1124 if (unlikely(!ctx->vsx_enabled)) { \
1125 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1126 return; \
1127 } \
1128 opc = tcg_const_i32(ctx->opcode); \
1129 xt = gen_vsr_ptr(rD(ctx->opcode) + 32); \
1130 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \
1131 gen_helper_##name(cpu_env, opc, xt, xb); \
1132 tcg_temp_free_i32(opc); \
1133 tcg_temp_free_ptr(xt); \
1134 tcg_temp_free_ptr(xb); \
1135}
1136
6ae4a57a
MCA
1137#define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type) \
1138static void gen_##name(DisasContext *ctx) \
1139{ \
1140 TCGv_i32 opc; \
1141 TCGv_ptr xa, xb; \
1142 if (unlikely(!ctx->vsx_enabled)) { \
1143 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1144 return; \
1145 } \
1146 opc = tcg_const_i32(ctx->opcode); \
1147 xa = gen_vsr_ptr(rA(ctx->opcode) + 32); \
1148 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \
1149 gen_helper_##name(cpu_env, opc, xa, xb); \
1150 tcg_temp_free_i32(opc); \
1151 tcg_temp_free_ptr(xa); \
1152 tcg_temp_free_ptr(xb); \
1153}
1154
3014427a 1155#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
34b2300c 1156static void gen_##name(DisasContext *ctx) \
3014427a 1157{ \
8b3b2d75
MCA
1158 TCGv_i64 t0; \
1159 TCGv_i64 t1; \
3014427a
BH
1160 if (unlikely(!ctx->vsx_enabled)) { \
1161 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1162 return; \
1163 } \
8b3b2d75
MCA
1164 t0 = tcg_temp_new_i64(); \
1165 t1 = tcg_temp_new_i64(); \
1166 get_cpu_vsrh(t0, xB(ctx->opcode)); \
1167 gen_helper_##name(t1, cpu_env, t0); \
1168 set_cpu_vsrh(xT(ctx->opcode), t1); \
1169 tcg_temp_free_i64(t0); \
1170 tcg_temp_free_i64(t1); \
3014427a
BH
1171}
1172
99125c74 1173GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
23d0766b 1174GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
99125c74
MCA
1175GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1176GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
23d0766b 1177GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
99125c74 1178GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
23d0766b 1179GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
75cf84cb
MCA
1180GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1181GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1182GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
033e1fcd 1183GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
8d830485 1184GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
99125c74
MCA
1185GEN_VSX_HELPER_X3(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
1186GEN_VSX_HELPER_X3(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
1187GEN_VSX_HELPER_X3(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
1188GEN_VSX_HELPER_X3(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
033e1fcd 1189GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
6ae4a57a 1190GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
033e1fcd
MCA
1191GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1192GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
6ae4a57a
MCA
1193GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1194GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
99125c74
MCA
1195GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1196GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
23d0766b
MCA
1197GEN_VSX_HELPER_R3(xsmaxcdp, 0x00, 0x10, 0, PPC2_ISA300)
1198GEN_VSX_HELPER_R3(xsmincdp, 0x00, 0x11, 0, PPC2_ISA300)
1199GEN_VSX_HELPER_R3(xsmaxjdp, 0x00, 0x12, 0, PPC2_ISA300)
1200GEN_VSX_HELPER_R3(xsminjdp, 0x00, 0x12, 0, PPC2_ISA300)
75cf84cb
MCA
1201GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1202GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
99229620 1203GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
3014427a 1204GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
99229620
MCA
1205GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1206GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1207GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1208GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
75cf84cb 1209GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
99229620 1210GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
75cf84cb 1211GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
3014427a 1212GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
75cf84cb
MCA
1213GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1214GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1215GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1216GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1217GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
99229620 1218GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
75cf84cb
MCA
1219GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1220GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1221GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1222GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1223GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1224GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
3014427a 1225GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
99229620
MCA
1226GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1227GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1228GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
23d0766b 1229GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
99125c74
MCA
1230GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1231GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1232GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1233GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
75cf84cb
MCA
1234GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1235GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1236GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
75cf84cb
MCA
1237GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1238GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
8d830485 1239GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
78241762
ND
1240GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1241GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
3014427a 1242
99125c74
MCA
1243GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1244GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1245GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1246GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
75cf84cb
MCA
1247GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1248GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1249GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
033e1fcd 1250GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
8d830485 1251GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
99125c74
MCA
1252GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1253GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
75cf84cb
MCA
1254GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1255GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1256GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1257GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1258GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1259GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1260GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1261GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1262GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1263GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1264GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1265GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1266GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1267GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
3014427a 1268
99125c74
MCA
1269GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1270GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1271GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1272GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
75cf84cb
MCA
1273GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1274GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1275GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
033e1fcd 1276GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
8d830485 1277GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
99125c74
MCA
1278GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1279GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
75cf84cb
MCA
1280GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1281GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1282GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1283GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1284GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1285GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1286GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1287GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1288GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1289GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1290GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1291GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1292GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1293GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1294GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1295GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
403a884a
ND
1296GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1297GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
99125c74
MCA
1298GEN_VSX_HELPER_X3(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
1299GEN_VSX_HELPER_X3(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
3014427a 1300
c9f4e4d8
MCA
1301#define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type) \
1302static void gen_##name(DisasContext *ctx) \
1303{ \
1304 TCGv_ptr xt, xa, b, c; \
1305 if (unlikely(!ctx->vsx_enabled)) { \
1306 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1307 return; \
1308 } \
1309 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1310 xa = gen_vsr_ptr(xA(ctx->opcode)); \
1311 if (ctx->opcode & PPC_BIT(25)) { \
1312 /* \
1313 * AxT + B \
1314 */ \
1315 b = gen_vsr_ptr(xT(ctx->opcode)); \
1316 c = gen_vsr_ptr(xB(ctx->opcode)); \
1317 } else { \
1318 /* \
1319 * AxB + T \
1320 */ \
1321 b = gen_vsr_ptr(xB(ctx->opcode)); \
1322 c = gen_vsr_ptr(xT(ctx->opcode)); \
1323 } \
1324 gen_helper_##name(cpu_env, xt, xa, b, c); \
1325 tcg_temp_free_ptr(xt); \
1326 tcg_temp_free_ptr(xa); \
1327 tcg_temp_free_ptr(b); \
1328 tcg_temp_free_ptr(c); \
1329}
1330
1331GEN_VSX_HELPER_VSX_MADD(xsmadddp, 0x04, 0x04, 0x05, 0, PPC2_VSX)
1332GEN_VSX_HELPER_VSX_MADD(xsmsubdp, 0x04, 0x06, 0x07, 0, PPC2_VSX)
1333GEN_VSX_HELPER_VSX_MADD(xsnmadddp, 0x04, 0x14, 0x15, 0, PPC2_VSX)
1334GEN_VSX_HELPER_VSX_MADD(xsnmsubdp, 0x04, 0x16, 0x17, 0, PPC2_VSX)
1335GEN_VSX_HELPER_VSX_MADD(xsmaddsp, 0x04, 0x00, 0x01, 0, PPC2_VSX207)
1336GEN_VSX_HELPER_VSX_MADD(xsmsubsp, 0x04, 0x02, 0x03, 0, PPC2_VSX207)
1337GEN_VSX_HELPER_VSX_MADD(xsnmaddsp, 0x04, 0x10, 0x11, 0, PPC2_VSX207)
1338GEN_VSX_HELPER_VSX_MADD(xsnmsubsp, 0x04, 0x12, 0x13, 0, PPC2_VSX207)
1339GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1340GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1341GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1342GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1343GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1344GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1345GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1346GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1347
14fd8ab2
ND
1348static void gen_xxbrd(DisasContext *ctx)
1349{
8b3b2d75
MCA
1350 TCGv_i64 xth;
1351 TCGv_i64 xtl;
1352 TCGv_i64 xbh;
1353 TCGv_i64 xbl;
14fd8ab2
ND
1354
1355 if (unlikely(!ctx->vsx_enabled)) {
1356 gen_exception(ctx, POWERPC_EXCP_VSXU);
1357 return;
1358 }
8b3b2d75
MCA
1359 xth = tcg_temp_new_i64();
1360 xtl = tcg_temp_new_i64();
1361 xbh = tcg_temp_new_i64();
1362 xbl = tcg_temp_new_i64();
1363 get_cpu_vsrh(xbh, xB(ctx->opcode));
1364 get_cpu_vsrl(xbl, xB(ctx->opcode));
1365
14fd8ab2
ND
1366 tcg_gen_bswap64_i64(xth, xbh);
1367 tcg_gen_bswap64_i64(xtl, xbl);
8b3b2d75
MCA
1368 set_cpu_vsrh(xT(ctx->opcode), xth);
1369 set_cpu_vsrl(xT(ctx->opcode), xtl);
1370
1371 tcg_temp_free_i64(xth);
1372 tcg_temp_free_i64(xtl);
1373 tcg_temp_free_i64(xbh);
1374 tcg_temp_free_i64(xbl);
14fd8ab2
ND
1375}
1376
1377static void gen_xxbrh(DisasContext *ctx)
1378{
8b3b2d75
MCA
1379 TCGv_i64 xth;
1380 TCGv_i64 xtl;
1381 TCGv_i64 xbh;
1382 TCGv_i64 xbl;
14fd8ab2
ND
1383
1384 if (unlikely(!ctx->vsx_enabled)) {
1385 gen_exception(ctx, POWERPC_EXCP_VSXU);
1386 return;
1387 }
8b3b2d75
MCA
1388 xth = tcg_temp_new_i64();
1389 xtl = tcg_temp_new_i64();
1390 xbh = tcg_temp_new_i64();
1391 xbl = tcg_temp_new_i64();
1392 get_cpu_vsrh(xbh, xB(ctx->opcode));
1393 get_cpu_vsrl(xbl, xB(ctx->opcode));
1394
14fd8ab2 1395 gen_bswap16x8(xth, xtl, xbh, xbl);
8b3b2d75
MCA
1396 set_cpu_vsrh(xT(ctx->opcode), xth);
1397 set_cpu_vsrl(xT(ctx->opcode), xtl);
1398
1399 tcg_temp_free_i64(xth);
1400 tcg_temp_free_i64(xtl);
1401 tcg_temp_free_i64(xbh);
1402 tcg_temp_free_i64(xbl);
14fd8ab2
ND
1403}
1404
1405static void gen_xxbrq(DisasContext *ctx)
1406{
8b3b2d75
MCA
1407 TCGv_i64 xth;
1408 TCGv_i64 xtl;
1409 TCGv_i64 xbh;
1410 TCGv_i64 xbl;
1411 TCGv_i64 t0;
14fd8ab2
ND
1412
1413 if (unlikely(!ctx->vsx_enabled)) {
1414 gen_exception(ctx, POWERPC_EXCP_VSXU);
1415 return;
1416 }
8b3b2d75
MCA
1417 xth = tcg_temp_new_i64();
1418 xtl = tcg_temp_new_i64();
1419 xbh = tcg_temp_new_i64();
1420 xbl = tcg_temp_new_i64();
1421 get_cpu_vsrh(xbh, xB(ctx->opcode));
1422 get_cpu_vsrl(xbl, xB(ctx->opcode));
1423 t0 = tcg_temp_new_i64();
1424
14fd8ab2
ND
1425 tcg_gen_bswap64_i64(t0, xbl);
1426 tcg_gen_bswap64_i64(xtl, xbh);
8b3b2d75 1427 set_cpu_vsrl(xT(ctx->opcode), xtl);
14fd8ab2 1428 tcg_gen_mov_i64(xth, t0);
d47a751a 1429 set_cpu_vsrh(xT(ctx->opcode), xth);
8b3b2d75 1430
14fd8ab2 1431 tcg_temp_free_i64(t0);
8b3b2d75
MCA
1432 tcg_temp_free_i64(xth);
1433 tcg_temp_free_i64(xtl);
1434 tcg_temp_free_i64(xbh);
1435 tcg_temp_free_i64(xbl);
14fd8ab2
ND
1436}
1437
1438static void gen_xxbrw(DisasContext *ctx)
1439{
8b3b2d75
MCA
1440 TCGv_i64 xth;
1441 TCGv_i64 xtl;
1442 TCGv_i64 xbh;
1443 TCGv_i64 xbl;
14fd8ab2
ND
1444
1445 if (unlikely(!ctx->vsx_enabled)) {
1446 gen_exception(ctx, POWERPC_EXCP_VSXU);
1447 return;
1448 }
8b3b2d75
MCA
1449 xth = tcg_temp_new_i64();
1450 xtl = tcg_temp_new_i64();
1451 xbh = tcg_temp_new_i64();
1452 xbl = tcg_temp_new_i64();
1453 get_cpu_vsrh(xbh, xB(ctx->opcode));
1454 get_cpu_vsrl(xbl, xB(ctx->opcode));
1455
14fd8ab2 1456 gen_bswap32x4(xth, xtl, xbh, xbl);
d47a751a 1457 set_cpu_vsrh(xT(ctx->opcode), xth);
8b3b2d75
MCA
1458 set_cpu_vsrl(xT(ctx->opcode), xtl);
1459
1460 tcg_temp_free_i64(xth);
1461 tcg_temp_free_i64(xtl);
1462 tcg_temp_free_i64(xbh);
1463 tcg_temp_free_i64(xbl);
14fd8ab2
ND
1464}
1465
7b8fe477 1466#define VSX_LOGICAL(name, vece, tcg_op) \
34b2300c 1467static void glue(gen_, name)(DisasContext *ctx) \
3014427a
BH
1468 { \
1469 if (unlikely(!ctx->vsx_enabled)) { \
1470 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1471 return; \
1472 } \
7b8fe477
RH
1473 tcg_op(vece, vsr_full_offset(xT(ctx->opcode)), \
1474 vsr_full_offset(xA(ctx->opcode)), \
1475 vsr_full_offset(xB(ctx->opcode)), 16, 16); \
3014427a
BH
1476 }
1477
7b8fe477
RH
1478VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1479VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1480VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1481VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1482VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1483VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1484VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1485VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
3014427a
BH
1486
1487#define VSX_XXMRG(name, high) \
34b2300c 1488static void glue(gen_, name)(DisasContext *ctx) \
3014427a 1489 { \
8b3b2d75 1490 TCGv_i64 a0, a1, b0, b1, tmp; \
3014427a
BH
1491 if (unlikely(!ctx->vsx_enabled)) { \
1492 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1493 return; \
1494 } \
1495 a0 = tcg_temp_new_i64(); \
1496 a1 = tcg_temp_new_i64(); \
1497 b0 = tcg_temp_new_i64(); \
1498 b1 = tcg_temp_new_i64(); \
8b3b2d75 1499 tmp = tcg_temp_new_i64(); \
3014427a 1500 if (high) { \
8b3b2d75
MCA
1501 get_cpu_vsrh(a0, xA(ctx->opcode)); \
1502 get_cpu_vsrh(a1, xA(ctx->opcode)); \
1503 get_cpu_vsrh(b0, xB(ctx->opcode)); \
1504 get_cpu_vsrh(b1, xB(ctx->opcode)); \
3014427a 1505 } else { \
8b3b2d75
MCA
1506 get_cpu_vsrl(a0, xA(ctx->opcode)); \
1507 get_cpu_vsrl(a1, xA(ctx->opcode)); \
1508 get_cpu_vsrl(b0, xB(ctx->opcode)); \
1509 get_cpu_vsrl(b1, xB(ctx->opcode)); \
3014427a
BH
1510 } \
1511 tcg_gen_shri_i64(a0, a0, 32); \
1512 tcg_gen_shri_i64(b0, b0, 32); \
8b3b2d75
MCA
1513 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \
1514 set_cpu_vsrh(xT(ctx->opcode), tmp); \
1515 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \
1516 set_cpu_vsrl(xT(ctx->opcode), tmp); \
3014427a
BH
1517 tcg_temp_free_i64(a0); \
1518 tcg_temp_free_i64(a1); \
1519 tcg_temp_free_i64(b0); \
1520 tcg_temp_free_i64(b1); \
8b3b2d75 1521 tcg_temp_free_i64(tmp); \
3014427a
BH
1522 }
1523
1524VSX_XXMRG(xxmrghw, 1)
1525VSX_XXMRG(xxmrglw, 0)
1526
03dce230
RH
1527static void gen_xxsel(DisasContext *ctx)
1528{
03dce230
RH
1529 int rt = xT(ctx->opcode);
1530 int ra = xA(ctx->opcode);
1531 int rb = xB(ctx->opcode);
1532 int rc = xC(ctx->opcode);
3014427a 1533
03dce230
RH
1534 if (unlikely(!ctx->vsx_enabled)) {
1535 gen_exception(ctx, POWERPC_EXCP_VSXU);
1536 return;
1537 }
fe2d1696
RH
1538 tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(rt), vsr_full_offset(rc),
1539 vsr_full_offset(rb), vsr_full_offset(ra), 16, 16);
3014427a
BH
1540}
1541
1542static void gen_xxspltw(DisasContext *ctx)
1543{
9bb0048e
RH
1544 int rt = xT(ctx->opcode);
1545 int rb = xB(ctx->opcode);
1546 int uim = UIM(ctx->opcode);
1547 int tofs, bofs;
3014427a
BH
1548
1549 if (unlikely(!ctx->vsx_enabled)) {
1550 gen_exception(ctx, POWERPC_EXCP_VSXU);
1551 return;
1552 }
1553
9bb0048e
RH
1554 tofs = vsr_full_offset(rt);
1555 bofs = vsr_full_offset(rb);
1556 bofs += uim << MO_32;
1557#ifndef HOST_WORDS_BIG_ENDIAN
1558 bofs ^= 8 | 4;
1559#endif
3014427a 1560
9bb0048e 1561 tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
3014427a
BH
1562}
1563
f1132835
ND
1564#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1565
1566static void gen_xxspltib(DisasContext *ctx)
1567{
cf95e701
RH
1568 uint8_t uim8 = IMM8(ctx->opcode);
1569 int rt = xT(ctx->opcode);
1570
1571 if (rt < 32) {
4c406ca7
AB
1572 if (unlikely(!ctx->vsx_enabled)) {
1573 gen_exception(ctx, POWERPC_EXCP_VSXU);
f1132835
ND
1574 return;
1575 }
1576 } else {
4c406ca7
AB
1577 if (unlikely(!ctx->altivec_enabled)) {
1578 gen_exception(ctx, POWERPC_EXCP_VPU);
f1132835
ND
1579 return;
1580 }
1581 }
cf95e701 1582 tcg_gen_gvec_dup8i(vsr_full_offset(rt), 16, 16, uim8);
f1132835
ND
1583}
1584
3014427a
BH
1585static void gen_xxsldwi(DisasContext *ctx)
1586{
1587 TCGv_i64 xth, xtl;
1588 if (unlikely(!ctx->vsx_enabled)) {
1589 gen_exception(ctx, POWERPC_EXCP_VSXU);
1590 return;
1591 }
1592 xth = tcg_temp_new_i64();
1593 xtl = tcg_temp_new_i64();
1594
1595 switch (SHW(ctx->opcode)) {
1596 case 0: {
8b3b2d75
MCA
1597 get_cpu_vsrh(xth, xA(ctx->opcode));
1598 get_cpu_vsrl(xtl, xA(ctx->opcode));
3014427a
BH
1599 break;
1600 }
1601 case 1: {
1602 TCGv_i64 t0 = tcg_temp_new_i64();
8b3b2d75 1603 get_cpu_vsrh(xth, xA(ctx->opcode));
3014427a 1604 tcg_gen_shli_i64(xth, xth, 32);
8b3b2d75 1605 get_cpu_vsrl(t0, xA(ctx->opcode));
3014427a
BH
1606 tcg_gen_shri_i64(t0, t0, 32);
1607 tcg_gen_or_i64(xth, xth, t0);
8b3b2d75 1608 get_cpu_vsrl(xtl, xA(ctx->opcode));
3014427a 1609 tcg_gen_shli_i64(xtl, xtl, 32);
8b3b2d75 1610 get_cpu_vsrh(t0, xB(ctx->opcode));
3014427a
BH
1611 tcg_gen_shri_i64(t0, t0, 32);
1612 tcg_gen_or_i64(xtl, xtl, t0);
1613 tcg_temp_free_i64(t0);
1614 break;
1615 }
1616 case 2: {
8b3b2d75
MCA
1617 get_cpu_vsrl(xth, xA(ctx->opcode));
1618 get_cpu_vsrh(xtl, xB(ctx->opcode));
3014427a
BH
1619 break;
1620 }
1621 case 3: {
1622 TCGv_i64 t0 = tcg_temp_new_i64();
8b3b2d75 1623 get_cpu_vsrl(xth, xA(ctx->opcode));
3014427a 1624 tcg_gen_shli_i64(xth, xth, 32);
8b3b2d75 1625 get_cpu_vsrh(t0, xB(ctx->opcode));
3014427a
BH
1626 tcg_gen_shri_i64(t0, t0, 32);
1627 tcg_gen_or_i64(xth, xth, t0);
8b3b2d75 1628 get_cpu_vsrh(xtl, xB(ctx->opcode));
3014427a 1629 tcg_gen_shli_i64(xtl, xtl, 32);
8b3b2d75 1630 get_cpu_vsrl(t0, xB(ctx->opcode));
3014427a
BH
1631 tcg_gen_shri_i64(t0, t0, 32);
1632 tcg_gen_or_i64(xtl, xtl, t0);
1633 tcg_temp_free_i64(t0);
1634 break;
1635 }
1636 }
1637
8b3b2d75
MCA
1638 set_cpu_vsrh(xT(ctx->opcode), xth);
1639 set_cpu_vsrl(xT(ctx->opcode), xtl);
3014427a
BH
1640
1641 tcg_temp_free_i64(xth);
1642 tcg_temp_free_i64(xtl);
1643}
1644
3398b742 1645#define VSX_EXTRACT_INSERT(name) \
8ad901e5
ND
1646static void gen_##name(DisasContext *ctx) \
1647{ \
5ba5335d 1648 TCGv_ptr xt, xb; \
8b3b2d75
MCA
1649 TCGv_i32 t0; \
1650 TCGv_i64 t1; \
8ad901e5
ND
1651 uint8_t uimm = UIMM4(ctx->opcode); \
1652 \
1653 if (unlikely(!ctx->vsx_enabled)) { \
1654 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1655 return; \
1656 } \
5ba5335d
MCA
1657 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1658 xb = gen_vsr_ptr(xB(ctx->opcode)); \
8b3b2d75
MCA
1659 t0 = tcg_temp_new_i32(); \
1660 t1 = tcg_temp_new_i64(); \
34b2300c
DG
1661 /* \
1662 * uimm > 15 out of bound and for \
8ad901e5
ND
1663 * uimm > 12 handle as per hardware in helper \
1664 */ \
1665 if (uimm > 15) { \
8b3b2d75
MCA
1666 tcg_gen_movi_i64(t1, 0); \
1667 set_cpu_vsrh(xT(ctx->opcode), t1); \
1668 set_cpu_vsrl(xT(ctx->opcode), t1); \
8ad901e5
ND
1669 return; \
1670 } \
1671 tcg_gen_movi_i32(t0, uimm); \
1672 gen_helper_##name(cpu_env, xt, xb, t0); \
5ba5335d
MCA
1673 tcg_temp_free_ptr(xb); \
1674 tcg_temp_free_ptr(xt); \
8ad901e5 1675 tcg_temp_free_i32(t0); \
8b3b2d75 1676 tcg_temp_free_i64(t1); \
8ad901e5
ND
1677}
1678
3398b742
ND
1679VSX_EXTRACT_INSERT(xxextractuw)
1680VSX_EXTRACT_INSERT(xxinsertw)
8ad901e5 1681
08e14986
ND
1682#ifdef TARGET_PPC64
1683static void gen_xsxexpdp(DisasContext *ctx)
1684{
1685 TCGv rt = cpu_gpr[rD(ctx->opcode)];
8b3b2d75 1686 TCGv_i64 t0;
08e14986
ND
1687 if (unlikely(!ctx->vsx_enabled)) {
1688 gen_exception(ctx, POWERPC_EXCP_VSXU);
1689 return;
1690 }
8b3b2d75
MCA
1691 t0 = tcg_temp_new_i64();
1692 get_cpu_vsrh(t0, xB(ctx->opcode));
1693 tcg_gen_extract_i64(rt, t0, 52, 11);
1694 tcg_temp_free_i64(t0);
08e14986 1695}
9eceae32
ND
1696
1697static void gen_xsxexpqp(DisasContext *ctx)
1698{
8b3b2d75
MCA
1699 TCGv_i64 xth;
1700 TCGv_i64 xtl;
1701 TCGv_i64 xbh;
9eceae32
ND
1702
1703 if (unlikely(!ctx->vsx_enabled)) {
1704 gen_exception(ctx, POWERPC_EXCP_VSXU);
1705 return;
1706 }
8b3b2d75
MCA
1707 xth = tcg_temp_new_i64();
1708 xtl = tcg_temp_new_i64();
1709 xbh = tcg_temp_new_i64();
1710 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1711
e2622073 1712 tcg_gen_extract_i64(xth, xbh, 48, 15);
8b3b2d75 1713 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
9eceae32 1714 tcg_gen_movi_i64(xtl, 0);
8b3b2d75
MCA
1715 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1716
1717 tcg_temp_free_i64(xbh);
1718 tcg_temp_free_i64(xth);
1719 tcg_temp_free_i64(xtl);
9eceae32 1720}
05538220 1721
1b8d663d
ND
1722static void gen_xsiexpdp(DisasContext *ctx)
1723{
8b3b2d75 1724 TCGv_i64 xth;
1b8d663d
ND
1725 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1726 TCGv rb = cpu_gpr[rB(ctx->opcode)];
1727 TCGv_i64 t0;
1728
1729 if (unlikely(!ctx->vsx_enabled)) {
1730 gen_exception(ctx, POWERPC_EXCP_VSXU);
1731 return;
1732 }
1733 t0 = tcg_temp_new_i64();
8b3b2d75 1734 xth = tcg_temp_new_i64();
1b8d663d
ND
1735 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1736 tcg_gen_andi_i64(t0, rb, 0x7FF);
1737 tcg_gen_shli_i64(t0, t0, 52);
1738 tcg_gen_or_i64(xth, xth, t0);
8b3b2d75 1739 set_cpu_vsrh(xT(ctx->opcode), xth);
1b8d663d
ND
1740 /* dword[1] is undefined */
1741 tcg_temp_free_i64(t0);
8b3b2d75 1742 tcg_temp_free_i64(xth);
1b8d663d
ND
1743}
1744
8a9472ec
ND
1745static void gen_xsiexpqp(DisasContext *ctx)
1746{
8b3b2d75
MCA
1747 TCGv_i64 xth;
1748 TCGv_i64 xtl;
1749 TCGv_i64 xah;
1750 TCGv_i64 xal;
1751 TCGv_i64 xbh;
8a9472ec
ND
1752 TCGv_i64 t0;
1753
1754 if (unlikely(!ctx->vsx_enabled)) {
1755 gen_exception(ctx, POWERPC_EXCP_VSXU);
1756 return;
1757 }
8b3b2d75
MCA
1758 xth = tcg_temp_new_i64();
1759 xtl = tcg_temp_new_i64();
1760 xah = tcg_temp_new_i64();
1761 xal = tcg_temp_new_i64();
1762 get_cpu_vsrh(xah, rA(ctx->opcode) + 32);
1763 get_cpu_vsrl(xal, rA(ctx->opcode) + 32);
1764 xbh = tcg_temp_new_i64();
1765 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
8a9472ec 1766 t0 = tcg_temp_new_i64();
8b3b2d75 1767
8a9472ec
ND
1768 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1769 tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1770 tcg_gen_shli_i64(t0, t0, 48);
1771 tcg_gen_or_i64(xth, xth, t0);
8b3b2d75 1772 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
8a9472ec 1773 tcg_gen_mov_i64(xtl, xal);
8b3b2d75
MCA
1774 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1775
8a9472ec 1776 tcg_temp_free_i64(t0);
8b3b2d75
MCA
1777 tcg_temp_free_i64(xth);
1778 tcg_temp_free_i64(xtl);
1779 tcg_temp_free_i64(xah);
1780 tcg_temp_free_i64(xal);
1781 tcg_temp_free_i64(xbh);
8a9472ec
ND
1782}
1783
05538220
ND
1784static void gen_xsxsigdp(DisasContext *ctx)
1785{
1786 TCGv rt = cpu_gpr[rD(ctx->opcode)];
8b3b2d75 1787 TCGv_i64 t0, t1, zr, nan, exp;
05538220
ND
1788
1789 if (unlikely(!ctx->vsx_enabled)) {
1790 gen_exception(ctx, POWERPC_EXCP_VSXU);
1791 return;
1792 }
1793 exp = tcg_temp_new_i64();
1794 t0 = tcg_temp_new_i64();
8b3b2d75 1795 t1 = tcg_temp_new_i64();
05538220
ND
1796 zr = tcg_const_i64(0);
1797 nan = tcg_const_i64(2047);
1798
8b3b2d75
MCA
1799 get_cpu_vsrh(t1, xB(ctx->opcode));
1800 tcg_gen_extract_i64(exp, t1, 52, 11);
05538220
ND
1801 tcg_gen_movi_i64(t0, 0x0010000000000000);
1802 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1803 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
8b3b2d75 1804 get_cpu_vsrh(t1, xB(ctx->opcode));
dd977e4f 1805 tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
05538220
ND
1806
1807 tcg_temp_free_i64(t0);
8b3b2d75 1808 tcg_temp_free_i64(t1);
05538220
ND
1809 tcg_temp_free_i64(exp);
1810 tcg_temp_free_i64(zr);
1811 tcg_temp_free_i64(nan);
1812}
1813
29f8ddb7
ND
1814static void gen_xsxsigqp(DisasContext *ctx)
1815{
1816 TCGv_i64 t0, zr, nan, exp;
8b3b2d75
MCA
1817 TCGv_i64 xth;
1818 TCGv_i64 xtl;
1819 TCGv_i64 xbh;
1820 TCGv_i64 xbl;
29f8ddb7
ND
1821
1822 if (unlikely(!ctx->vsx_enabled)) {
1823 gen_exception(ctx, POWERPC_EXCP_VSXU);
1824 return;
1825 }
8b3b2d75
MCA
1826 xth = tcg_temp_new_i64();
1827 xtl = tcg_temp_new_i64();
1828 xbh = tcg_temp_new_i64();
1829 xbl = tcg_temp_new_i64();
1830 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1831 get_cpu_vsrl(xbl, rB(ctx->opcode) + 32);
29f8ddb7
ND
1832 exp = tcg_temp_new_i64();
1833 t0 = tcg_temp_new_i64();
1834 zr = tcg_const_i64(0);
1835 nan = tcg_const_i64(32767);
1836
8b3b2d75 1837 tcg_gen_extract_i64(exp, xbh, 48, 15);
29f8ddb7
ND
1838 tcg_gen_movi_i64(t0, 0x0001000000000000);
1839 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1840 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
dd977e4f 1841 tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
8b3b2d75
MCA
1842 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1843 tcg_gen_mov_i64(xtl, xbl);
1844 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
29f8ddb7
ND
1845
1846 tcg_temp_free_i64(t0);
1847 tcg_temp_free_i64(exp);
1848 tcg_temp_free_i64(zr);
1849 tcg_temp_free_i64(nan);
8b3b2d75
MCA
1850 tcg_temp_free_i64(xth);
1851 tcg_temp_free_i64(xtl);
1852 tcg_temp_free_i64(xbh);
1853 tcg_temp_free_i64(xbl);
29f8ddb7 1854}
08e14986
ND
1855#endif
1856
d9031405
ND
1857static void gen_xviexpsp(DisasContext *ctx)
1858{
8b3b2d75
MCA
1859 TCGv_i64 xth;
1860 TCGv_i64 xtl;
1861 TCGv_i64 xah;
1862 TCGv_i64 xal;
1863 TCGv_i64 xbh;
1864 TCGv_i64 xbl;
d9031405
ND
1865 TCGv_i64 t0;
1866
1867 if (unlikely(!ctx->vsx_enabled)) {
1868 gen_exception(ctx, POWERPC_EXCP_VSXU);
1869 return;
1870 }
8b3b2d75
MCA
1871 xth = tcg_temp_new_i64();
1872 xtl = tcg_temp_new_i64();
1873 xah = tcg_temp_new_i64();
1874 xal = tcg_temp_new_i64();
1875 xbh = tcg_temp_new_i64();
1876 xbl = tcg_temp_new_i64();
1877 get_cpu_vsrh(xah, xA(ctx->opcode));
1878 get_cpu_vsrl(xal, xA(ctx->opcode));
1879 get_cpu_vsrh(xbh, xB(ctx->opcode));
1880 get_cpu_vsrl(xbl, xB(ctx->opcode));
d9031405 1881 t0 = tcg_temp_new_i64();
8b3b2d75 1882
d9031405
ND
1883 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1884 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1885 tcg_gen_shli_i64(t0, t0, 23);
1886 tcg_gen_or_i64(xth, xth, t0);
8b3b2d75 1887 set_cpu_vsrh(xT(ctx->opcode), xth);
d9031405
ND
1888 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1889 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1890 tcg_gen_shli_i64(t0, t0, 23);
1891 tcg_gen_or_i64(xtl, xtl, t0);
8b3b2d75
MCA
1892 set_cpu_vsrl(xT(ctx->opcode), xtl);
1893
d9031405 1894 tcg_temp_free_i64(t0);
8b3b2d75
MCA
1895 tcg_temp_free_i64(xth);
1896 tcg_temp_free_i64(xtl);
1897 tcg_temp_free_i64(xah);
1898 tcg_temp_free_i64(xal);
1899 tcg_temp_free_i64(xbh);
1900 tcg_temp_free_i64(xbl);
d9031405
ND
1901}
1902
e385e4b7
ND
1903static void gen_xviexpdp(DisasContext *ctx)
1904{
8b3b2d75
MCA
1905 TCGv_i64 xth;
1906 TCGv_i64 xtl;
1907 TCGv_i64 xah;
1908 TCGv_i64 xal;
1909 TCGv_i64 xbh;
1910 TCGv_i64 xbl;
e385e4b7
ND
1911
1912 if (unlikely(!ctx->vsx_enabled)) {
1913 gen_exception(ctx, POWERPC_EXCP_VSXU);
1914 return;
1915 }
8b3b2d75
MCA
1916 xth = tcg_temp_new_i64();
1917 xtl = tcg_temp_new_i64();
1918 xah = tcg_temp_new_i64();
1919 xal = tcg_temp_new_i64();
1920 xbh = tcg_temp_new_i64();
1921 xbl = tcg_temp_new_i64();
1922 get_cpu_vsrh(xah, xA(ctx->opcode));
1923 get_cpu_vsrl(xal, xA(ctx->opcode));
1924 get_cpu_vsrh(xbh, xB(ctx->opcode));
1925 get_cpu_vsrl(xbl, xB(ctx->opcode));
8b3b2d75 1926
cde0a41c 1927 tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
8b3b2d75 1928 set_cpu_vsrh(xT(ctx->opcode), xth);
cde0a41c
PMD
1929
1930 tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
8b3b2d75
MCA
1931 set_cpu_vsrl(xT(ctx->opcode), xtl);
1932
8b3b2d75
MCA
1933 tcg_temp_free_i64(xth);
1934 tcg_temp_free_i64(xtl);
1935 tcg_temp_free_i64(xah);
1936 tcg_temp_free_i64(xal);
1937 tcg_temp_free_i64(xbh);
1938 tcg_temp_free_i64(xbl);
e385e4b7
ND
1939}
1940
08f1ee5a
ND
1941static void gen_xvxexpsp(DisasContext *ctx)
1942{
8b3b2d75
MCA
1943 TCGv_i64 xth;
1944 TCGv_i64 xtl;
1945 TCGv_i64 xbh;
1946 TCGv_i64 xbl;
08f1ee5a
ND
1947
1948 if (unlikely(!ctx->vsx_enabled)) {
1949 gen_exception(ctx, POWERPC_EXCP_VSXU);
1950 return;
1951 }
8b3b2d75
MCA
1952 xth = tcg_temp_new_i64();
1953 xtl = tcg_temp_new_i64();
1954 xbh = tcg_temp_new_i64();
1955 xbl = tcg_temp_new_i64();
1956 get_cpu_vsrh(xbh, xB(ctx->opcode));
1957 get_cpu_vsrl(xbl, xB(ctx->opcode));
1958
08f1ee5a
ND
1959 tcg_gen_shri_i64(xth, xbh, 23);
1960 tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
8b3b2d75 1961 set_cpu_vsrh(xT(ctx->opcode), xth);
08f1ee5a
ND
1962 tcg_gen_shri_i64(xtl, xbl, 23);
1963 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
8b3b2d75
MCA
1964 set_cpu_vsrl(xT(ctx->opcode), xtl);
1965
1966 tcg_temp_free_i64(xth);
1967 tcg_temp_free_i64(xtl);
1968 tcg_temp_free_i64(xbh);
1969 tcg_temp_free_i64(xbl);
08f1ee5a
ND
1970}
1971
46804e28
ND
1972static void gen_xvxexpdp(DisasContext *ctx)
1973{
8b3b2d75
MCA
1974 TCGv_i64 xth;
1975 TCGv_i64 xtl;
1976 TCGv_i64 xbh;
1977 TCGv_i64 xbl;
46804e28
ND
1978
1979 if (unlikely(!ctx->vsx_enabled)) {
1980 gen_exception(ctx, POWERPC_EXCP_VSXU);
1981 return;
1982 }
8b3b2d75
MCA
1983 xth = tcg_temp_new_i64();
1984 xtl = tcg_temp_new_i64();
1985 xbh = tcg_temp_new_i64();
1986 xbl = tcg_temp_new_i64();
1987 get_cpu_vsrh(xbh, xB(ctx->opcode));
1988 get_cpu_vsrl(xbl, xB(ctx->opcode));
1989
e2622073 1990 tcg_gen_extract_i64(xth, xbh, 52, 11);
8b3b2d75 1991 set_cpu_vsrh(xT(ctx->opcode), xth);
e2622073 1992 tcg_gen_extract_i64(xtl, xbl, 52, 11);
8b3b2d75
MCA
1993 set_cpu_vsrl(xT(ctx->opcode), xtl);
1994
1995 tcg_temp_free_i64(xth);
1996 tcg_temp_free_i64(xtl);
1997 tcg_temp_free_i64(xbh);
1998 tcg_temp_free_i64(xbl);
46804e28
ND
1999}
2000
75cf84cb 2001GEN_VSX_HELPER_X2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
c5969d2e 2002
cf9465a1
ND
2003static void gen_xvxsigdp(DisasContext *ctx)
2004{
8b3b2d75
MCA
2005 TCGv_i64 xth;
2006 TCGv_i64 xtl;
2007 TCGv_i64 xbh;
2008 TCGv_i64 xbl;
cf9465a1
ND
2009 TCGv_i64 t0, zr, nan, exp;
2010
2011 if (unlikely(!ctx->vsx_enabled)) {
2012 gen_exception(ctx, POWERPC_EXCP_VSXU);
2013 return;
2014 }
8b3b2d75
MCA
2015 xth = tcg_temp_new_i64();
2016 xtl = tcg_temp_new_i64();
2017 xbh = tcg_temp_new_i64();
2018 xbl = tcg_temp_new_i64();
2019 get_cpu_vsrh(xbh, xB(ctx->opcode));
2020 get_cpu_vsrl(xbl, xB(ctx->opcode));
cf9465a1
ND
2021 exp = tcg_temp_new_i64();
2022 t0 = tcg_temp_new_i64();
2023 zr = tcg_const_i64(0);
2024 nan = tcg_const_i64(2047);
2025
e2622073 2026 tcg_gen_extract_i64(exp, xbh, 52, 11);
cf9465a1
ND
2027 tcg_gen_movi_i64(t0, 0x0010000000000000);
2028 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2029 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
dd977e4f 2030 tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
8b3b2d75 2031 set_cpu_vsrh(xT(ctx->opcode), xth);
cf9465a1 2032
e2622073 2033 tcg_gen_extract_i64(exp, xbl, 52, 11);
cf9465a1
ND
2034 tcg_gen_movi_i64(t0, 0x0010000000000000);
2035 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2036 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
cf4e9363 2037 tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
8b3b2d75 2038 set_cpu_vsrl(xT(ctx->opcode), xtl);
cf9465a1
ND
2039
2040 tcg_temp_free_i64(t0);
2041 tcg_temp_free_i64(exp);
2042 tcg_temp_free_i64(zr);
2043 tcg_temp_free_i64(nan);
8b3b2d75
MCA
2044 tcg_temp_free_i64(xth);
2045 tcg_temp_free_i64(xtl);
2046 tcg_temp_free_i64(xbh);
2047 tcg_temp_free_i64(xbl);
cf9465a1
ND
2048}
2049
3014427a
BH
2050#undef GEN_XX2FORM
2051#undef GEN_XX3FORM
2052#undef GEN_XX2IFORM
2053#undef GEN_XX3_RC_FORM
2054#undef GEN_XX3FORM_DM
2055#undef VSX_LOGICAL