]> git.proxmox.com Git - mirror_qemu.git/blame - tcg/tcg-op.c
Merge tag 'migration-20231020-pull-request' of https://gitlab.com/juan.quintela/qemu...
[mirror_qemu.git] / tcg / tcg-op.c
CommitLineData
951c6300
RH
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2008 Fabrice Bellard
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
757e725b 25#include "qemu/osdep.h"
dcb32f1d 26#include "tcg/tcg.h"
47f7313d 27#include "tcg/tcg-temp-internal.h"
ad3d0e4d 28#include "tcg/tcg-op-common.h"
cac9b0fd 29#include "exec/translation-block.h"
e6d86bed 30#include "exec/plugin-gen.h"
d56fea79 31#include "tcg-internal.h"
951c6300
RH
32
33
b7e8b17a 34void tcg_gen_op1(TCGOpcode opc, TCGArg a1)
951c6300 35{
d4478943 36 TCGOp *op = tcg_emit_op(opc, 1);
75e8b9b7 37 op->args[0] = a1;
951c6300
RH
38}
39
b7e8b17a 40void tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
951c6300 41{
d4478943 42 TCGOp *op = tcg_emit_op(opc, 2);
75e8b9b7
RH
43 op->args[0] = a1;
44 op->args[1] = a2;
951c6300
RH
45}
46
b7e8b17a 47void tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
951c6300 48{
d4478943 49 TCGOp *op = tcg_emit_op(opc, 3);
75e8b9b7
RH
50 op->args[0] = a1;
51 op->args[1] = a2;
52 op->args[2] = a3;
951c6300
RH
53}
54
b7e8b17a 55void tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3, TCGArg a4)
951c6300 56{
d4478943 57 TCGOp *op = tcg_emit_op(opc, 4);
75e8b9b7
RH
58 op->args[0] = a1;
59 op->args[1] = a2;
60 op->args[2] = a3;
61 op->args[3] = a4;
951c6300
RH
62}
63
b7e8b17a
RH
64void tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
65 TCGArg a4, TCGArg a5)
951c6300 66{
d4478943 67 TCGOp *op = tcg_emit_op(opc, 5);
75e8b9b7
RH
68 op->args[0] = a1;
69 op->args[1] = a2;
70 op->args[2] = a3;
71 op->args[3] = a4;
72 op->args[4] = a5;
951c6300
RH
73}
74
b7e8b17a
RH
75void tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
76 TCGArg a4, TCGArg a5, TCGArg a6)
951c6300 77{
d4478943 78 TCGOp *op = tcg_emit_op(opc, 6);
75e8b9b7
RH
79 op->args[0] = a1;
80 op->args[1] = a2;
81 op->args[2] = a3;
82 op->args[3] = a4;
83 op->args[4] = a5;
84 op->args[5] = a6;
951c6300
RH
85}
86
f85b1fc4
RH
87/* Generic ops. */
88
89static void add_last_as_label_use(TCGLabel *l)
90{
91 TCGLabelUse *u = tcg_malloc(sizeof(TCGLabelUse));
92
93 u->op = tcg_last_op();
94 QSIMPLEQ_INSERT_TAIL(&l->branches, u, next);
95}
96
97void tcg_gen_br(TCGLabel *l)
98{
99 tcg_gen_op1(INDEX_op_br, label_arg(l));
100 add_last_as_label_use(l);
101}
102
f65e19bc
PK
103void tcg_gen_mb(TCGBar mb_type)
104{
c914d46d
RH
105#ifdef CONFIG_USER_ONLY
106 bool parallel = tcg_ctx->gen_tb->cflags & CF_PARALLEL;
107#else
108 /*
109 * It is tempting to elide the barrier in a uniprocessor context.
110 * However, even with a single cpu we have i/o threads running in
111 * parallel, and lack of memory order can result in e.g. virtio
112 * queue entries being read incorrectly.
113 */
114 bool parallel = true;
115#endif
116
117 if (parallel) {
b7e8b17a 118 tcg_gen_op1(INDEX_op_mb, mb_type);
f65e19bc
PK
119 }
120}
121
951c6300
RH
122/* 32 bit ops */
123
11d11d61
RH
124void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
125{
126 tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
127}
128
951c6300
RH
129void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
130{
131 /* some cases can be optimized here */
132 if (arg2 == 0) {
133 tcg_gen_mov_i32(ret, arg1);
134 } else {
11d11d61 135 tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
136 }
137}
138
139void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
140{
141 if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
142 /* Don't recurse with tcg_gen_neg_i32. */
143 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
144 } else {
11d11d61 145 tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
951c6300
RH
146 }
147}
148
149void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
150{
151 /* some cases can be optimized here */
152 if (arg2 == 0) {
153 tcg_gen_mov_i32(ret, arg1);
154 } else {
11d11d61 155 tcg_gen_sub_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
156 }
157}
158
474b2e8f 159void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
951c6300 160{
951c6300
RH
161 /* Some cases can be optimized here. */
162 switch (arg2) {
163 case 0:
164 tcg_gen_movi_i32(ret, 0);
165 return;
474b2e8f 166 case -1:
951c6300
RH
167 tcg_gen_mov_i32(ret, arg1);
168 return;
474b2e8f 169 case 0xff:
951c6300
RH
170 /* Don't recurse with tcg_gen_ext8u_i32. */
171 if (TCG_TARGET_HAS_ext8u_i32) {
172 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
173 return;
174 }
175 break;
474b2e8f 176 case 0xffff:
951c6300
RH
177 if (TCG_TARGET_HAS_ext16u_i32) {
178 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
179 return;
180 }
181 break;
182 }
11d11d61
RH
183
184 tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
185}
186
187void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
188{
189 /* Some cases can be optimized here. */
190 if (arg2 == -1) {
191 tcg_gen_movi_i32(ret, -1);
192 } else if (arg2 == 0) {
193 tcg_gen_mov_i32(ret, arg1);
194 } else {
11d11d61 195 tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
196 }
197}
198
199void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
200{
201 /* Some cases can be optimized here. */
202 if (arg2 == 0) {
203 tcg_gen_mov_i32(ret, arg1);
204 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
205 /* Don't recurse with tcg_gen_not_i32. */
206 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
207 } else {
11d11d61 208 tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
209 }
210}
211
474b2e8f 212void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
951c6300 213{
474b2e8f 214 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
951c6300
RH
215 if (arg2 == 0) {
216 tcg_gen_mov_i32(ret, arg1);
217 } else {
11d11d61 218 tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
219 }
220}
221
474b2e8f 222void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
951c6300 223{
474b2e8f 224 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
951c6300
RH
225 if (arg2 == 0) {
226 tcg_gen_mov_i32(ret, arg1);
227 } else {
11d11d61 228 tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
229 }
230}
231
474b2e8f 232void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
951c6300 233{
474b2e8f 234 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
951c6300
RH
235 if (arg2 == 0) {
236 tcg_gen_mov_i32(ret, arg1);
237 } else {
11d11d61 238 tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
239 }
240}
241
42a268c2 242void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
951c6300
RH
243{
244 if (cond == TCG_COND_ALWAYS) {
42a268c2 245 tcg_gen_br(l);
951c6300 246 } else if (cond != TCG_COND_NEVER) {
42a268c2 247 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
f85b1fc4 248 add_last_as_label_use(l);
951c6300
RH
249 }
250}
251
42a268c2 252void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
951c6300 253{
37ed3bf1
RH
254 if (cond == TCG_COND_ALWAYS) {
255 tcg_gen_br(l);
256 } else if (cond != TCG_COND_NEVER) {
11d11d61 257 tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
37ed3bf1 258 }
951c6300
RH
259}
260
261void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
262 TCGv_i32 arg1, TCGv_i32 arg2)
263{
264 if (cond == TCG_COND_ALWAYS) {
265 tcg_gen_movi_i32(ret, 1);
266 } else if (cond == TCG_COND_NEVER) {
267 tcg_gen_movi_i32(ret, 0);
268 } else {
269 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
270 }
271}
272
273void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
274 TCGv_i32 arg1, int32_t arg2)
275{
11d11d61 276 tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
277}
278
3635502d
RH
279void tcg_gen_negsetcond_i32(TCGCond cond, TCGv_i32 ret,
280 TCGv_i32 arg1, TCGv_i32 arg2)
281{
282 if (cond == TCG_COND_ALWAYS) {
283 tcg_gen_movi_i32(ret, -1);
284 } else if (cond == TCG_COND_NEVER) {
285 tcg_gen_movi_i32(ret, 0);
286 } else if (TCG_TARGET_HAS_negsetcond_i32) {
287 tcg_gen_op4i_i32(INDEX_op_negsetcond_i32, ret, arg1, arg2, cond);
288 } else {
289 tcg_gen_setcond_i32(cond, ret, arg1, arg2);
290 tcg_gen_neg_i32(ret, ret);
291 }
292}
293
951c6300
RH
294void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
295{
b2e3ae94
RH
296 if (arg2 == 0) {
297 tcg_gen_movi_i32(ret, 0);
298 } else if (is_power_of_2(arg2)) {
299 tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
300 } else {
11d11d61 301 tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
b2e3ae94 302 }
951c6300
RH
303}
304
305void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
306{
307 if (TCG_TARGET_HAS_div_i32) {
308 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
309 } else if (TCG_TARGET_HAS_div2_i32) {
5dd48602 310 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300
RH
311 tcg_gen_sari_i32(t0, arg1, 31);
312 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
313 tcg_temp_free_i32(t0);
314 } else {
315 gen_helper_div_i32(ret, arg1, arg2);
316 }
317}
318
319void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
320{
321 if (TCG_TARGET_HAS_rem_i32) {
322 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
323 } else if (TCG_TARGET_HAS_div_i32) {
5dd48602 324 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300
RH
325 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
326 tcg_gen_mul_i32(t0, t0, arg2);
327 tcg_gen_sub_i32(ret, arg1, t0);
328 tcg_temp_free_i32(t0);
329 } else if (TCG_TARGET_HAS_div2_i32) {
5dd48602 330 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300
RH
331 tcg_gen_sari_i32(t0, arg1, 31);
332 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
333 tcg_temp_free_i32(t0);
334 } else {
335 gen_helper_rem_i32(ret, arg1, arg2);
336 }
337}
338
339void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
340{
341 if (TCG_TARGET_HAS_div_i32) {
342 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
343 } else if (TCG_TARGET_HAS_div2_i32) {
5dd48602 344 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300
RH
345 tcg_gen_movi_i32(t0, 0);
346 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
347 tcg_temp_free_i32(t0);
348 } else {
349 gen_helper_divu_i32(ret, arg1, arg2);
350 }
351}
352
353void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
354{
355 if (TCG_TARGET_HAS_rem_i32) {
356 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
357 } else if (TCG_TARGET_HAS_div_i32) {
5dd48602 358 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300
RH
359 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
360 tcg_gen_mul_i32(t0, t0, arg2);
361 tcg_gen_sub_i32(ret, arg1, t0);
362 tcg_temp_free_i32(t0);
363 } else if (TCG_TARGET_HAS_div2_i32) {
5dd48602 364 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300
RH
365 tcg_gen_movi_i32(t0, 0);
366 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
367 tcg_temp_free_i32(t0);
368 } else {
369 gen_helper_remu_i32(ret, arg1, arg2);
370 }
371}
372
373void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
374{
375 if (TCG_TARGET_HAS_andc_i32) {
376 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
377 } else {
5dd48602 378 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300
RH
379 tcg_gen_not_i32(t0, arg2);
380 tcg_gen_and_i32(ret, arg1, t0);
381 tcg_temp_free_i32(t0);
382 }
383}
384
385void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
386{
387 if (TCG_TARGET_HAS_eqv_i32) {
388 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
389 } else {
390 tcg_gen_xor_i32(ret, arg1, arg2);
391 tcg_gen_not_i32(ret, ret);
392 }
393}
394
395void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
396{
397 if (TCG_TARGET_HAS_nand_i32) {
398 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
399 } else {
400 tcg_gen_and_i32(ret, arg1, arg2);
401 tcg_gen_not_i32(ret, ret);
402 }
403}
404
405void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
406{
407 if (TCG_TARGET_HAS_nor_i32) {
408 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
409 } else {
410 tcg_gen_or_i32(ret, arg1, arg2);
411 tcg_gen_not_i32(ret, ret);
412 }
413}
414
415void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
416{
417 if (TCG_TARGET_HAS_orc_i32) {
418 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
419 } else {
5dd48602 420 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300
RH
421 tcg_gen_not_i32(t0, arg2);
422 tcg_gen_or_i32(ret, arg1, t0);
423 tcg_temp_free_i32(t0);
424 }
425}
426
0e28d006
RH
427void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
428{
429 if (TCG_TARGET_HAS_clz_i32) {
430 tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
431 } else if (TCG_TARGET_HAS_clz_i64) {
5dd48602
RH
432 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
433 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
0e28d006
RH
434 tcg_gen_extu_i32_i64(t1, arg1);
435 tcg_gen_extu_i32_i64(t2, arg2);
436 tcg_gen_addi_i64(t2, t2, 32);
437 tcg_gen_clz_i64(t1, t1, t2);
438 tcg_gen_extrl_i64_i32(ret, t1);
439 tcg_temp_free_i64(t1);
440 tcg_temp_free_i64(t2);
441 tcg_gen_subi_i32(ret, ret, 32);
442 } else {
443 gen_helper_clz_i32(ret, arg1, arg2);
444 }
445}
446
447void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
448{
11d11d61 449 tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
0e28d006
RH
450}
451
452void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
453{
454 if (TCG_TARGET_HAS_ctz_i32) {
455 tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
456 } else if (TCG_TARGET_HAS_ctz_i64) {
5dd48602
RH
457 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
458 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
0e28d006
RH
459 tcg_gen_extu_i32_i64(t1, arg1);
460 tcg_gen_extu_i32_i64(t2, arg2);
461 tcg_gen_ctz_i64(t1, t1, t2);
462 tcg_gen_extrl_i64_i32(ret, t1);
463 tcg_temp_free_i64(t1);
464 tcg_temp_free_i64(t2);
14e99210
RH
465 } else if (TCG_TARGET_HAS_ctpop_i32
466 || TCG_TARGET_HAS_ctpop_i64
467 || TCG_TARGET_HAS_clz_i32
468 || TCG_TARGET_HAS_clz_i64) {
5dd48602 469 TCGv_i32 z, t = tcg_temp_ebb_new_i32();
14e99210
RH
470
471 if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
472 tcg_gen_subi_i32(t, arg1, 1);
473 tcg_gen_andc_i32(t, t, arg1);
474 tcg_gen_ctpop_i32(t, t);
475 } else {
476 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
477 tcg_gen_neg_i32(t, arg1);
478 tcg_gen_and_i32(t, t, arg1);
479 tcg_gen_clzi_i32(t, t, 32);
480 tcg_gen_xori_i32(t, t, 31);
481 }
11d11d61 482 z = tcg_constant_i32(0);
14e99210
RH
483 tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
484 tcg_temp_free_i32(t);
0e28d006
RH
485 } else {
486 gen_helper_ctz_i32(ret, arg1, arg2);
487 }
488}
489
490void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
491{
14e99210
RH
492 if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
493 /* This equivalence has the advantage of not requiring a fixup. */
5dd48602 494 TCGv_i32 t = tcg_temp_ebb_new_i32();
14e99210
RH
495 tcg_gen_subi_i32(t, arg1, 1);
496 tcg_gen_andc_i32(t, t, arg1);
497 tcg_gen_ctpop_i32(ret, t);
498 tcg_temp_free_i32(t);
499 } else {
11d11d61 500 tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
14e99210 501 }
0e28d006
RH
502}
503
086920c2
RH
504void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
505{
506 if (TCG_TARGET_HAS_clz_i32) {
5dd48602 507 TCGv_i32 t = tcg_temp_ebb_new_i32();
086920c2
RH
508 tcg_gen_sari_i32(t, arg, 31);
509 tcg_gen_xor_i32(t, t, arg);
510 tcg_gen_clzi_i32(t, t, 32);
511 tcg_gen_subi_i32(ret, t, 1);
512 tcg_temp_free_i32(t);
513 } else {
514 gen_helper_clrsb_i32(ret, arg);
515 }
516}
517
a768e4e9
RH
518void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
519{
520 if (TCG_TARGET_HAS_ctpop_i32) {
521 tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
522 } else if (TCG_TARGET_HAS_ctpop_i64) {
5dd48602 523 TCGv_i64 t = tcg_temp_ebb_new_i64();
a768e4e9
RH
524 tcg_gen_extu_i32_i64(t, arg1);
525 tcg_gen_ctpop_i64(t, t);
526 tcg_gen_extrl_i64_i32(ret, t);
527 tcg_temp_free_i64(t);
528 } else {
529 gen_helper_ctpop_i32(ret, arg1);
530 }
531}
532
951c6300
RH
533void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
534{
535 if (TCG_TARGET_HAS_rot_i32) {
536 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
537 } else {
538 TCGv_i32 t0, t1;
539
5dd48602
RH
540 t0 = tcg_temp_ebb_new_i32();
541 t1 = tcg_temp_ebb_new_i32();
951c6300
RH
542 tcg_gen_shl_i32(t0, arg1, arg2);
543 tcg_gen_subfi_i32(t1, 32, arg2);
544 tcg_gen_shr_i32(t1, arg1, t1);
545 tcg_gen_or_i32(ret, t0, t1);
546 tcg_temp_free_i32(t0);
547 tcg_temp_free_i32(t1);
548 }
549}
550
07dada03 551void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
951c6300 552{
07dada03 553 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
951c6300
RH
554 /* some cases can be optimized here */
555 if (arg2 == 0) {
556 tcg_gen_mov_i32(ret, arg1);
557 } else if (TCG_TARGET_HAS_rot_i32) {
11d11d61 558 tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
559 } else {
560 TCGv_i32 t0, t1;
5dd48602
RH
561 t0 = tcg_temp_ebb_new_i32();
562 t1 = tcg_temp_ebb_new_i32();
951c6300
RH
563 tcg_gen_shli_i32(t0, arg1, arg2);
564 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
565 tcg_gen_or_i32(ret, t0, t1);
566 tcg_temp_free_i32(t0);
567 tcg_temp_free_i32(t1);
568 }
569}
570
571void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
572{
573 if (TCG_TARGET_HAS_rot_i32) {
574 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
575 } else {
576 TCGv_i32 t0, t1;
577
5dd48602
RH
578 t0 = tcg_temp_ebb_new_i32();
579 t1 = tcg_temp_ebb_new_i32();
951c6300
RH
580 tcg_gen_shr_i32(t0, arg1, arg2);
581 tcg_gen_subfi_i32(t1, 32, arg2);
582 tcg_gen_shl_i32(t1, arg1, t1);
583 tcg_gen_or_i32(ret, t0, t1);
584 tcg_temp_free_i32(t0);
585 tcg_temp_free_i32(t1);
586 }
587}
588
07dada03 589void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
951c6300 590{
07dada03 591 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
951c6300
RH
592 /* some cases can be optimized here */
593 if (arg2 == 0) {
594 tcg_gen_mov_i32(ret, arg1);
595 } else {
596 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
597 }
598}
599
600void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
601 unsigned int ofs, unsigned int len)
602{
603 uint32_t mask;
604 TCGv_i32 t1;
605
606 tcg_debug_assert(ofs < 32);
0d0d309d 607 tcg_debug_assert(len > 0);
951c6300
RH
608 tcg_debug_assert(len <= 32);
609 tcg_debug_assert(ofs + len <= 32);
610
0d0d309d 611 if (len == 32) {
951c6300
RH
612 tcg_gen_mov_i32(ret, arg2);
613 return;
614 }
615 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
616 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
617 return;
618 }
619
5dd48602 620 t1 = tcg_temp_ebb_new_i32();
951c6300 621
b0a60567
RH
622 if (TCG_TARGET_HAS_extract2_i32) {
623 if (ofs + len == 32) {
624 tcg_gen_shli_i32(t1, arg1, len);
625 tcg_gen_extract2_i32(ret, t1, arg2, len);
626 goto done;
627 }
628 if (ofs == 0) {
629 tcg_gen_extract2_i32(ret, arg1, arg2, len);
630 tcg_gen_rotli_i32(ret, ret, len);
631 goto done;
632 }
633 }
634
635 mask = (1u << len) - 1;
951c6300
RH
636 if (ofs + len < 32) {
637 tcg_gen_andi_i32(t1, arg2, mask);
638 tcg_gen_shli_i32(t1, t1, ofs);
639 } else {
640 tcg_gen_shli_i32(t1, arg2, ofs);
641 }
642 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
643 tcg_gen_or_i32(ret, ret, t1);
b0a60567 644 done:
951c6300
RH
645 tcg_temp_free_i32(t1);
646}
647
07cc68d5
RH
648void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
649 unsigned int ofs, unsigned int len)
650{
651 tcg_debug_assert(ofs < 32);
652 tcg_debug_assert(len > 0);
653 tcg_debug_assert(len <= 32);
654 tcg_debug_assert(ofs + len <= 32);
655
656 if (ofs + len == 32) {
657 tcg_gen_shli_i32(ret, arg, ofs);
658 } else if (ofs == 0) {
659 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
660 } else if (TCG_TARGET_HAS_deposit_i32
661 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
11d11d61 662 TCGv_i32 zero = tcg_constant_i32(0);
07cc68d5 663 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
07cc68d5
RH
664 } else {
665 /* To help two-operand hosts we prefer to zero-extend first,
666 which allows ARG to stay live. */
667 switch (len) {
668 case 16:
669 if (TCG_TARGET_HAS_ext16u_i32) {
670 tcg_gen_ext16u_i32(ret, arg);
671 tcg_gen_shli_i32(ret, ret, ofs);
672 return;
673 }
674 break;
675 case 8:
676 if (TCG_TARGET_HAS_ext8u_i32) {
677 tcg_gen_ext8u_i32(ret, arg);
678 tcg_gen_shli_i32(ret, ret, ofs);
679 return;
680 }
681 break;
682 }
683 /* Otherwise prefer zero-extension over AND for code size. */
684 switch (ofs + len) {
685 case 16:
686 if (TCG_TARGET_HAS_ext16u_i32) {
687 tcg_gen_shli_i32(ret, arg, ofs);
688 tcg_gen_ext16u_i32(ret, ret);
689 return;
690 }
691 break;
692 case 8:
693 if (TCG_TARGET_HAS_ext8u_i32) {
694 tcg_gen_shli_i32(ret, arg, ofs);
695 tcg_gen_ext8u_i32(ret, ret);
696 return;
697 }
698 break;
699 }
700 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
701 tcg_gen_shli_i32(ret, ret, ofs);
702 }
703}
704
7ec8bab3
RH
705void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
706 unsigned int ofs, unsigned int len)
707{
708 tcg_debug_assert(ofs < 32);
709 tcg_debug_assert(len > 0);
710 tcg_debug_assert(len <= 32);
711 tcg_debug_assert(ofs + len <= 32);
712
713 /* Canonicalize certain special cases, even if extract is supported. */
714 if (ofs + len == 32) {
715 tcg_gen_shri_i32(ret, arg, 32 - len);
716 return;
717 }
718 if (ofs == 0) {
719 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
720 return;
721 }
722
723 if (TCG_TARGET_HAS_extract_i32
724 && TCG_TARGET_extract_i32_valid(ofs, len)) {
725 tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
726 return;
727 }
728
729 /* Assume that zero-extension, if available, is cheaper than a shift. */
730 switch (ofs + len) {
731 case 16:
732 if (TCG_TARGET_HAS_ext16u_i32) {
733 tcg_gen_ext16u_i32(ret, arg);
734 tcg_gen_shri_i32(ret, ret, ofs);
735 return;
736 }
737 break;
738 case 8:
739 if (TCG_TARGET_HAS_ext8u_i32) {
740 tcg_gen_ext8u_i32(ret, arg);
741 tcg_gen_shri_i32(ret, ret, ofs);
742 return;
743 }
744 break;
745 }
746
747 /* ??? Ideally we'd know what values are available for immediate AND.
748 Assume that 8 bits are available, plus the special case of 16,
749 so that we get ext8u, ext16u. */
750 switch (len) {
751 case 1 ... 8: case 16:
752 tcg_gen_shri_i32(ret, arg, ofs);
753 tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
754 break;
755 default:
756 tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
757 tcg_gen_shri_i32(ret, ret, 32 - len);
758 break;
759 }
760}
761
762void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
763 unsigned int ofs, unsigned int len)
764{
765 tcg_debug_assert(ofs < 32);
766 tcg_debug_assert(len > 0);
767 tcg_debug_assert(len <= 32);
768 tcg_debug_assert(ofs + len <= 32);
769
770 /* Canonicalize certain special cases, even if extract is supported. */
771 if (ofs + len == 32) {
772 tcg_gen_sari_i32(ret, arg, 32 - len);
773 return;
774 }
775 if (ofs == 0) {
776 switch (len) {
777 case 16:
778 tcg_gen_ext16s_i32(ret, arg);
779 return;
780 case 8:
781 tcg_gen_ext8s_i32(ret, arg);
782 return;
783 }
784 }
785
786 if (TCG_TARGET_HAS_sextract_i32
787 && TCG_TARGET_extract_i32_valid(ofs, len)) {
788 tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
789 return;
790 }
791
792 /* Assume that sign-extension, if available, is cheaper than a shift. */
793 switch (ofs + len) {
794 case 16:
795 if (TCG_TARGET_HAS_ext16s_i32) {
796 tcg_gen_ext16s_i32(ret, arg);
797 tcg_gen_sari_i32(ret, ret, ofs);
798 return;
799 }
800 break;
801 case 8:
802 if (TCG_TARGET_HAS_ext8s_i32) {
803 tcg_gen_ext8s_i32(ret, arg);
804 tcg_gen_sari_i32(ret, ret, ofs);
805 return;
806 }
807 break;
808 }
809 switch (len) {
810 case 16:
811 if (TCG_TARGET_HAS_ext16s_i32) {
812 tcg_gen_shri_i32(ret, arg, ofs);
813 tcg_gen_ext16s_i32(ret, ret);
814 return;
815 }
816 break;
817 case 8:
818 if (TCG_TARGET_HAS_ext8s_i32) {
819 tcg_gen_shri_i32(ret, arg, ofs);
820 tcg_gen_ext8s_i32(ret, ret);
821 return;
822 }
823 break;
824 }
825
826 tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
827 tcg_gen_sari_i32(ret, ret, 32 - len);
828}
829
2089fcc9
DH
830/*
831 * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
832 * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
833 */
834void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
835 unsigned int ofs)
836{
837 tcg_debug_assert(ofs <= 32);
838 if (ofs == 0) {
839 tcg_gen_mov_i32(ret, al);
840 } else if (ofs == 32) {
841 tcg_gen_mov_i32(ret, ah);
842 } else if (al == ah) {
843 tcg_gen_rotri_i32(ret, al, ofs);
fce1296f
RH
844 } else if (TCG_TARGET_HAS_extract2_i32) {
845 tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
2089fcc9 846 } else {
5dd48602 847 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
2089fcc9
DH
848 tcg_gen_shri_i32(t0, al, ofs);
849 tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
850 tcg_temp_free_i32(t0);
851 }
852}
853
951c6300
RH
854void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
855 TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
856{
37ed3bf1
RH
857 if (cond == TCG_COND_ALWAYS) {
858 tcg_gen_mov_i32(ret, v1);
859 } else if (cond == TCG_COND_NEVER) {
860 tcg_gen_mov_i32(ret, v2);
861 } else if (TCG_TARGET_HAS_movcond_i32) {
951c6300
RH
862 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
863 } else {
5dd48602
RH
864 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
865 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
4a883870 866 tcg_gen_negsetcond_i32(cond, t0, c1, c2);
951c6300
RH
867 tcg_gen_and_i32(t1, v1, t0);
868 tcg_gen_andc_i32(ret, v2, t0);
869 tcg_gen_or_i32(ret, ret, t1);
870 tcg_temp_free_i32(t0);
871 tcg_temp_free_i32(t1);
872 }
873}
874
875void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
876 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
877{
878 if (TCG_TARGET_HAS_add2_i32) {
879 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
951c6300 880 } else {
5dd48602
RH
881 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
882 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
883 tcg_gen_concat_i32_i64(t0, al, ah);
884 tcg_gen_concat_i32_i64(t1, bl, bh);
885 tcg_gen_add_i64(t0, t0, t1);
886 tcg_gen_extr_i64_i32(rl, rh, t0);
887 tcg_temp_free_i64(t0);
888 tcg_temp_free_i64(t1);
889 }
890}
891
892void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
893 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
894{
895 if (TCG_TARGET_HAS_sub2_i32) {
896 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
951c6300 897 } else {
5dd48602
RH
898 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
899 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
900 tcg_gen_concat_i32_i64(t0, al, ah);
901 tcg_gen_concat_i32_i64(t1, bl, bh);
902 tcg_gen_sub_i64(t0, t0, t1);
903 tcg_gen_extr_i64_i32(rl, rh, t0);
904 tcg_temp_free_i64(t0);
905 tcg_temp_free_i64(t1);
906 }
907}
908
909void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
910{
911 if (TCG_TARGET_HAS_mulu2_i32) {
912 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
951c6300 913 } else if (TCG_TARGET_HAS_muluh_i32) {
5dd48602 914 TCGv_i32 t = tcg_temp_ebb_new_i32();
951c6300
RH
915 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
916 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
917 tcg_gen_mov_i32(rl, t);
918 tcg_temp_free_i32(t);
9fd86b51 919 } else if (TCG_TARGET_REG_BITS == 64) {
5dd48602
RH
920 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
921 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
922 tcg_gen_extu_i32_i64(t0, arg1);
923 tcg_gen_extu_i32_i64(t1, arg2);
924 tcg_gen_mul_i64(t0, t0, t1);
925 tcg_gen_extr_i64_i32(rl, rh, t0);
926 tcg_temp_free_i64(t0);
927 tcg_temp_free_i64(t1);
9fd86b51
RH
928 } else {
929 qemu_build_not_reached();
951c6300
RH
930 }
931}
932
933void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
934{
935 if (TCG_TARGET_HAS_muls2_i32) {
936 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
951c6300 937 } else if (TCG_TARGET_HAS_mulsh_i32) {
5dd48602 938 TCGv_i32 t = tcg_temp_ebb_new_i32();
951c6300
RH
939 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
940 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
941 tcg_gen_mov_i32(rl, t);
942 tcg_temp_free_i32(t);
943 } else if (TCG_TARGET_REG_BITS == 32) {
5dd48602
RH
944 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
945 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
946 TCGv_i32 t2 = tcg_temp_ebb_new_i32();
947 TCGv_i32 t3 = tcg_temp_ebb_new_i32();
951c6300
RH
948 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
949 /* Adjust for negative inputs. */
950 tcg_gen_sari_i32(t2, arg1, 31);
951 tcg_gen_sari_i32(t3, arg2, 31);
952 tcg_gen_and_i32(t2, t2, arg2);
953 tcg_gen_and_i32(t3, t3, arg1);
954 tcg_gen_sub_i32(rh, t1, t2);
955 tcg_gen_sub_i32(rh, rh, t3);
956 tcg_gen_mov_i32(rl, t0);
957 tcg_temp_free_i32(t0);
958 tcg_temp_free_i32(t1);
959 tcg_temp_free_i32(t2);
960 tcg_temp_free_i32(t3);
961 } else {
5dd48602
RH
962 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
963 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
964 tcg_gen_ext_i32_i64(t0, arg1);
965 tcg_gen_ext_i32_i64(t1, arg2);
966 tcg_gen_mul_i64(t0, t0, t1);
967 tcg_gen_extr_i64_i32(rl, rh, t0);
968 tcg_temp_free_i64(t0);
969 tcg_temp_free_i64(t1);
970 }
971}
972
5087abfb
RH
973void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
974{
975 if (TCG_TARGET_REG_BITS == 32) {
5dd48602
RH
976 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
977 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
978 TCGv_i32 t2 = tcg_temp_ebb_new_i32();
5087abfb
RH
979 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
980 /* Adjust for negative input for the signed arg1. */
981 tcg_gen_sari_i32(t2, arg1, 31);
982 tcg_gen_and_i32(t2, t2, arg2);
983 tcg_gen_sub_i32(rh, t1, t2);
984 tcg_gen_mov_i32(rl, t0);
985 tcg_temp_free_i32(t0);
986 tcg_temp_free_i32(t1);
987 tcg_temp_free_i32(t2);
988 } else {
5dd48602
RH
989 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
990 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
5087abfb
RH
991 tcg_gen_ext_i32_i64(t0, arg1);
992 tcg_gen_extu_i32_i64(t1, arg2);
993 tcg_gen_mul_i64(t0, t0, t1);
994 tcg_gen_extr_i64_i32(rl, rh, t0);
995 tcg_temp_free_i64(t0);
996 tcg_temp_free_i64(t1);
997 }
998}
999
951c6300
RH
1000void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1001{
1002 if (TCG_TARGET_HAS_ext8s_i32) {
1003 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
1004 } else {
1005 tcg_gen_shli_i32(ret, arg, 24);
1006 tcg_gen_sari_i32(ret, ret, 24);
1007 }
1008}
1009
1010void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1011{
1012 if (TCG_TARGET_HAS_ext16s_i32) {
1013 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1014 } else {
1015 tcg_gen_shli_i32(ret, arg, 16);
1016 tcg_gen_sari_i32(ret, ret, 16);
1017 }
1018}
1019
1020void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1021{
1022 if (TCG_TARGET_HAS_ext8u_i32) {
1023 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1024 } else {
1025 tcg_gen_andi_i32(ret, arg, 0xffu);
1026 }
1027}
1028
1029void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1030{
1031 if (TCG_TARGET_HAS_ext16u_i32) {
1032 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1033 } else {
1034 tcg_gen_andi_i32(ret, arg, 0xffffu);
1035 }
1036}
1037
4de5a76a
PMD
1038/*
1039 * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1040 *
1041 * Byte pattern: xxab -> yyba
1042 *
1043 * With TCG_BSWAP_IZ, x == zero, else undefined.
1044 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1045 */
2b836c2a 1046void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
951c6300 1047{
2b836c2a
RH
1048 /* Only one extension flag may be present. */
1049 tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1050
951c6300 1051 if (TCG_TARGET_HAS_bswap16_i32) {
2b836c2a 1052 tcg_gen_op3i_i32(INDEX_op_bswap16_i32, ret, arg, flags);
951c6300 1053 } else {
5dd48602
RH
1054 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1055 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
2b836c2a 1056
4de5a76a
PMD
1057 /* arg = ..ab (IZ) xxab (!IZ) */
1058 tcg_gen_shri_i32(t0, arg, 8); /* t0 = ...a (IZ) .xxa (!IZ) */
2b836c2a 1059 if (!(flags & TCG_BSWAP_IZ)) {
4de5a76a 1060 tcg_gen_ext8u_i32(t0, t0); /* t0 = ...a */
2b836c2a 1061 }
951c6300 1062
2b836c2a 1063 if (flags & TCG_BSWAP_OS) {
4de5a76a
PMD
1064 tcg_gen_shli_i32(t1, arg, 24); /* t1 = b... */
1065 tcg_gen_sari_i32(t1, t1, 16); /* t1 = ssb. */
2b836c2a 1066 } else if (flags & TCG_BSWAP_OZ) {
4de5a76a
PMD
1067 tcg_gen_ext8u_i32(t1, arg); /* t1 = ...b */
1068 tcg_gen_shli_i32(t1, t1, 8); /* t1 = ..b. */
2b836c2a 1069 } else {
4de5a76a 1070 tcg_gen_shli_i32(t1, arg, 8); /* t1 = xab. */
2b836c2a
RH
1071 }
1072
4de5a76a
PMD
1073 tcg_gen_or_i32(ret, t0, t1); /* ret = ..ba (OZ) */
1074 /* = ssba (OS) */
1075 /* = xaba (no flag) */
951c6300 1076 tcg_temp_free_i32(t0);
2b836c2a 1077 tcg_temp_free_i32(t1);
951c6300
RH
1078 }
1079}
1080
92964556
PMD
1081/*
1082 * bswap32_i32: 32-bit byte swap on a 32-bit value.
1083 *
1084 * Byte pattern: abcd -> dcba
1085 */
951c6300
RH
1086void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1087{
1088 if (TCG_TARGET_HAS_bswap32_i32) {
587195bd 1089 tcg_gen_op3i_i32(INDEX_op_bswap32_i32, ret, arg, 0);
951c6300 1090 } else {
5dd48602
RH
1091 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1092 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
11d11d61 1093 TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
951c6300 1094
a686dc71
RH
1095 /* arg = abcd */
1096 tcg_gen_shri_i32(t0, arg, 8); /* t0 = .abc */
1097 tcg_gen_and_i32(t1, arg, t2); /* t1 = .b.d */
1098 tcg_gen_and_i32(t0, t0, t2); /* t0 = .a.c */
1099 tcg_gen_shli_i32(t1, t1, 8); /* t1 = b.d. */
1100 tcg_gen_or_i32(ret, t0, t1); /* ret = badc */
951c6300 1101
a686dc71
RH
1102 tcg_gen_shri_i32(t0, ret, 16); /* t0 = ..ba */
1103 tcg_gen_shli_i32(t1, ret, 16); /* t1 = dc.. */
1104 tcg_gen_or_i32(ret, t0, t1); /* ret = dcba */
951c6300 1105
951c6300
RH
1106 tcg_temp_free_i32(t0);
1107 tcg_temp_free_i32(t1);
1108 }
1109}
1110
b8976aa5
PMD
1111/*
1112 * hswap_i32: Swap 16-bit halfwords within a 32-bit value.
1113 *
1114 * Byte pattern: abcd -> cdab
1115 */
46be8425
RH
1116void tcg_gen_hswap_i32(TCGv_i32 ret, TCGv_i32 arg)
1117{
1118 /* Swapping 2 16-bit elements is a rotate. */
1119 tcg_gen_rotli_i32(ret, arg, 16);
1120}
1121
b87fb8cd
RH
1122void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1123{
1124 tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1125}
1126
1127void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1128{
1129 tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1130}
1131
1132void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1133{
1134 tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1135}
1136
1137void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1138{
1139 tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1140}
1141
ff1f11f7
RH
1142void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1143{
5dd48602 1144 TCGv_i32 t = tcg_temp_ebb_new_i32();
ff1f11f7
RH
1145
1146 tcg_gen_sari_i32(t, a, 31);
1147 tcg_gen_xor_i32(ret, a, t);
1148 tcg_gen_sub_i32(ret, ret, t);
1149 tcg_temp_free_i32(t);
1150}
1151
951c6300
RH
1152/* 64-bit ops */
1153
1154#if TCG_TARGET_REG_BITS == 32
1155/* These are all inline for TCG_TARGET_REG_BITS == 64. */
1156
1157void tcg_gen_discard_i64(TCGv_i64 arg)
1158{
1159 tcg_gen_discard_i32(TCGV_LOW(arg));
1160 tcg_gen_discard_i32(TCGV_HIGH(arg));
1161}
1162
1163void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1164{
11d11d61
RH
1165 TCGTemp *ts = tcgv_i64_temp(arg);
1166
1167 /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1168 if (ts->kind == TEMP_CONST) {
1169 tcg_gen_movi_i64(ret, ts->val);
1170 } else {
1171 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1172 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1173 }
951c6300
RH
1174}
1175
1176void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1177{
1178 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1179 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1180}
1181
1182void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1183{
1184 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1185 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1186}
1187
1188void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1189{
1190 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
3ff91d7e 1191 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
951c6300
RH
1192}
1193
1194void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1195{
1196 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1197 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1198}
1199
1200void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1201{
1202 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1203 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1204}
1205
1206void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1207{
1208 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1209 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1210}
1211
1212void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1213{
1214 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1215 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1216}
1217
1218void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1219{
1220 /* Since arg2 and ret have different types,
1221 they cannot be the same temporary */
e03b5686 1222#if HOST_BIG_ENDIAN
951c6300
RH
1223 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1224 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1225#else
1226 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1227 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1228#endif
1229}
1230
d56fea79
RH
1231void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1232{
1233 tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
1234}
1235
1236void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1237{
1238 tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
1239}
1240
1241void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1242{
1243 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1244}
1245
951c6300
RH
1246void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1247{
e03b5686 1248#if HOST_BIG_ENDIAN
951c6300
RH
1249 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1250 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1251#else
1252 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1253 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1254#endif
1255}
1256
d56fea79
RH
1257void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1258{
1259 tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1260 TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1261}
1262
1263void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1264{
1265 tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1266 TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1267}
1268
951c6300
RH
1269void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1270{
1271 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1272 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1273}
1274
1275void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1276{
1277 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1278 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1279}
1280
1281void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1282{
1283 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1284 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1285}
1286
1287void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1288{
1289 gen_helper_shl_i64(ret, arg1, arg2);
1290}
1291
1292void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1293{
1294 gen_helper_shr_i64(ret, arg1, arg2);
1295}
1296
1297void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1298{
1299 gen_helper_sar_i64(ret, arg1, arg2);
1300}
1301
1302void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1303{
1304 TCGv_i64 t0;
1305 TCGv_i32 t1;
1306
5dd48602
RH
1307 t0 = tcg_temp_ebb_new_i64();
1308 t1 = tcg_temp_ebb_new_i32();
951c6300
RH
1309
1310 tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1311 TCGV_LOW(arg1), TCGV_LOW(arg2));
1312
1313 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1314 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1315 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1316 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1317
1318 tcg_gen_mov_i64(ret, t0);
1319 tcg_temp_free_i64(t0);
1320 tcg_temp_free_i32(t1);
1321}
11d11d61
RH
1322
1323#else
1324
1325void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1326{
1327 tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1328}
1329
951c6300
RH
1330#endif /* TCG_TARGET_REG_SIZE == 32 */
1331
1332void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1333{
1334 /* some cases can be optimized here */
1335 if (arg2 == 0) {
1336 tcg_gen_mov_i64(ret, arg1);
11d11d61
RH
1337 } else if (TCG_TARGET_REG_BITS == 64) {
1338 tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300 1339 } else {
11d11d61
RH
1340 tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1341 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1342 tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
951c6300
RH
1343 }
1344}
1345
1346void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1347{
1348 if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
1349 /* Don't recurse with tcg_gen_neg_i64. */
1350 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
11d11d61
RH
1351 } else if (TCG_TARGET_REG_BITS == 64) {
1352 tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
951c6300 1353 } else {
11d11d61
RH
1354 tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1355 tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1356 TCGV_LOW(arg2), TCGV_HIGH(arg2));
951c6300
RH
1357 }
1358}
1359
1360void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1361{
1362 /* some cases can be optimized here */
1363 if (arg2 == 0) {
1364 tcg_gen_mov_i64(ret, arg1);
11d11d61
RH
1365 } else if (TCG_TARGET_REG_BITS == 64) {
1366 tcg_gen_sub_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300 1367 } else {
11d11d61
RH
1368 tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1369 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1370 tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
951c6300
RH
1371 }
1372}
1373
474b2e8f 1374void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
951c6300 1375{
3a13c3f3
RH
1376 if (TCG_TARGET_REG_BITS == 32) {
1377 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1378 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1379 return;
1380 }
1381
951c6300
RH
1382 /* Some cases can be optimized here. */
1383 switch (arg2) {
1384 case 0:
1385 tcg_gen_movi_i64(ret, 0);
1386 return;
474b2e8f 1387 case -1:
951c6300
RH
1388 tcg_gen_mov_i64(ret, arg1);
1389 return;
474b2e8f 1390 case 0xff:
951c6300
RH
1391 /* Don't recurse with tcg_gen_ext8u_i64. */
1392 if (TCG_TARGET_HAS_ext8u_i64) {
1393 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1394 return;
1395 }
1396 break;
474b2e8f 1397 case 0xffff:
951c6300
RH
1398 if (TCG_TARGET_HAS_ext16u_i64) {
1399 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1400 return;
1401 }
1402 break;
474b2e8f 1403 case 0xffffffffu:
951c6300
RH
1404 if (TCG_TARGET_HAS_ext32u_i64) {
1405 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1406 return;
1407 }
1408 break;
1409 }
11d11d61
RH
1410
1411 tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300
RH
1412}
1413
1414void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1415{
3a13c3f3
RH
1416 if (TCG_TARGET_REG_BITS == 32) {
1417 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1418 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1419 return;
1420 }
951c6300
RH
1421 /* Some cases can be optimized here. */
1422 if (arg2 == -1) {
1423 tcg_gen_movi_i64(ret, -1);
1424 } else if (arg2 == 0) {
1425 tcg_gen_mov_i64(ret, arg1);
1426 } else {
11d11d61 1427 tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300 1428 }
951c6300
RH
1429}
1430
1431void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1432{
3a13c3f3
RH
1433 if (TCG_TARGET_REG_BITS == 32) {
1434 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1435 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1436 return;
1437 }
951c6300
RH
1438 /* Some cases can be optimized here. */
1439 if (arg2 == 0) {
1440 tcg_gen_mov_i64(ret, arg1);
1441 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1442 /* Don't recurse with tcg_gen_not_i64. */
1443 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1444 } else {
11d11d61 1445 tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300 1446 }
951c6300
RH
1447}
1448
951c6300
RH
1449static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1450 unsigned c, bool right, bool arith)
1451{
1452 tcg_debug_assert(c < 64);
1453 if (c == 0) {
1454 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1455 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1456 } else if (c >= 32) {
1457 c -= 32;
1458 if (right) {
1459 if (arith) {
1460 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1461 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1462 } else {
1463 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1464 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1465 }
1466 } else {
1467 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1468 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1469 }
02616bad
RH
1470 } else if (right) {
1471 if (TCG_TARGET_HAS_extract2_i32) {
1472 tcg_gen_extract2_i32(TCGV_LOW(ret),
1473 TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1474 } else {
951c6300 1475 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
02616bad
RH
1476 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1477 TCGV_HIGH(arg1), 32 - c, c);
1478 }
1479 if (arith) {
1480 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1481 } else {
1482 tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1483 }
1484 } else {
1485 if (TCG_TARGET_HAS_extract2_i32) {
1486 tcg_gen_extract2_i32(TCGV_HIGH(ret),
1487 TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
951c6300 1488 } else {
5dd48602 1489 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300 1490 tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
02616bad
RH
1491 tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1492 TCGV_HIGH(arg1), c, 32 - c);
1493 tcg_temp_free_i32(t0);
951c6300 1494 }
02616bad 1495 tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
951c6300
RH
1496 }
1497}
1498
474b2e8f 1499void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
951c6300 1500{
474b2e8f 1501 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
3a13c3f3
RH
1502 if (TCG_TARGET_REG_BITS == 32) {
1503 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1504 } else if (arg2 == 0) {
951c6300
RH
1505 tcg_gen_mov_i64(ret, arg1);
1506 } else {
11d11d61 1507 tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300
RH
1508 }
1509}
1510
474b2e8f 1511void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
951c6300 1512{
474b2e8f 1513 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
3a13c3f3
RH
1514 if (TCG_TARGET_REG_BITS == 32) {
1515 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1516 } else if (arg2 == 0) {
951c6300
RH
1517 tcg_gen_mov_i64(ret, arg1);
1518 } else {
11d11d61 1519 tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300
RH
1520 }
1521}
1522
474b2e8f 1523void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
951c6300 1524{
474b2e8f 1525 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
3a13c3f3
RH
1526 if (TCG_TARGET_REG_BITS == 32) {
1527 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1528 } else if (arg2 == 0) {
951c6300
RH
1529 tcg_gen_mov_i64(ret, arg1);
1530 } else {
11d11d61 1531 tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300
RH
1532 }
1533}
951c6300 1534
42a268c2 1535void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
951c6300
RH
1536{
1537 if (cond == TCG_COND_ALWAYS) {
42a268c2 1538 tcg_gen_br(l);
951c6300 1539 } else if (cond != TCG_COND_NEVER) {
3a13c3f3
RH
1540 if (TCG_TARGET_REG_BITS == 32) {
1541 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1542 TCGV_HIGH(arg1), TCGV_LOW(arg2),
42a268c2 1543 TCGV_HIGH(arg2), cond, label_arg(l));
3a13c3f3 1544 } else {
42a268c2
RH
1545 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1546 label_arg(l));
3a13c3f3 1547 }
f85b1fc4 1548 add_last_as_label_use(l);
951c6300
RH
1549 }
1550}
1551
42a268c2 1552void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
951c6300 1553{
11d11d61
RH
1554 if (TCG_TARGET_REG_BITS == 64) {
1555 tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1556 } else if (cond == TCG_COND_ALWAYS) {
42a268c2 1557 tcg_gen_br(l);
951c6300 1558 } else if (cond != TCG_COND_NEVER) {
11d11d61
RH
1559 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1560 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1561 tcg_constant_i32(arg2),
1562 tcg_constant_i32(arg2 >> 32),
1563 cond, label_arg(l));
f85b1fc4 1564 add_last_as_label_use(l);
951c6300
RH
1565 }
1566}
1567
1568void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1569 TCGv_i64 arg1, TCGv_i64 arg2)
1570{
1571 if (cond == TCG_COND_ALWAYS) {
1572 tcg_gen_movi_i64(ret, 1);
1573 } else if (cond == TCG_COND_NEVER) {
1574 tcg_gen_movi_i64(ret, 0);
1575 } else {
3a13c3f3
RH
1576 if (TCG_TARGET_REG_BITS == 32) {
1577 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1578 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1579 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1580 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1581 } else {
1582 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1583 }
951c6300
RH
1584 }
1585}
1586
1587void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1588 TCGv_i64 arg1, int64_t arg2)
1589{
11d11d61
RH
1590 if (TCG_TARGET_REG_BITS == 64) {
1591 tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1592 } else if (cond == TCG_COND_ALWAYS) {
1593 tcg_gen_movi_i64(ret, 1);
1594 } else if (cond == TCG_COND_NEVER) {
1595 tcg_gen_movi_i64(ret, 0);
1596 } else {
1597 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1598 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1599 tcg_constant_i32(arg2),
1600 tcg_constant_i32(arg2 >> 32), cond);
1601 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1602 }
951c6300
RH
1603}
1604
3635502d
RH
1605void tcg_gen_negsetcond_i64(TCGCond cond, TCGv_i64 ret,
1606 TCGv_i64 arg1, TCGv_i64 arg2)
1607{
1608 if (cond == TCG_COND_ALWAYS) {
1609 tcg_gen_movi_i64(ret, -1);
1610 } else if (cond == TCG_COND_NEVER) {
1611 tcg_gen_movi_i64(ret, 0);
1612 } else if (TCG_TARGET_HAS_negsetcond_i64) {
1613 tcg_gen_op4i_i64(INDEX_op_negsetcond_i64, ret, arg1, arg2, cond);
1614 } else if (TCG_TARGET_REG_BITS == 32) {
1615 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1616 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1617 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1618 tcg_gen_neg_i32(TCGV_LOW(ret), TCGV_LOW(ret));
1619 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_LOW(ret));
1620 } else {
1621 tcg_gen_setcond_i64(cond, ret, arg1, arg2);
1622 tcg_gen_neg_i64(ret, ret);
1623 }
1624}
1625
951c6300
RH
1626void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1627{
b2e3ae94
RH
1628 if (arg2 == 0) {
1629 tcg_gen_movi_i64(ret, 0);
1630 } else if (is_power_of_2(arg2)) {
1631 tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
1632 } else {
f04de891 1633 tcg_gen_mul_i64(ret, arg1, tcg_constant_i64(arg2));
b2e3ae94 1634 }
951c6300
RH
1635}
1636
1637void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1638{
1639 if (TCG_TARGET_HAS_div_i64) {
1640 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1641 } else if (TCG_TARGET_HAS_div2_i64) {
5dd48602 1642 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
1643 tcg_gen_sari_i64(t0, arg1, 63);
1644 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1645 tcg_temp_free_i64(t0);
1646 } else {
1647 gen_helper_div_i64(ret, arg1, arg2);
1648 }
1649}
1650
1651void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1652{
1653 if (TCG_TARGET_HAS_rem_i64) {
1654 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1655 } else if (TCG_TARGET_HAS_div_i64) {
5dd48602 1656 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
1657 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1658 tcg_gen_mul_i64(t0, t0, arg2);
1659 tcg_gen_sub_i64(ret, arg1, t0);
1660 tcg_temp_free_i64(t0);
1661 } else if (TCG_TARGET_HAS_div2_i64) {
5dd48602 1662 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
1663 tcg_gen_sari_i64(t0, arg1, 63);
1664 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1665 tcg_temp_free_i64(t0);
1666 } else {
1667 gen_helper_rem_i64(ret, arg1, arg2);
1668 }
1669}
1670
1671void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1672{
1673 if (TCG_TARGET_HAS_div_i64) {
1674 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1675 } else if (TCG_TARGET_HAS_div2_i64) {
5dd48602 1676 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
1677 tcg_gen_movi_i64(t0, 0);
1678 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1679 tcg_temp_free_i64(t0);
1680 } else {
1681 gen_helper_divu_i64(ret, arg1, arg2);
1682 }
1683}
1684
1685void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1686{
1687 if (TCG_TARGET_HAS_rem_i64) {
1688 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1689 } else if (TCG_TARGET_HAS_div_i64) {
5dd48602 1690 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
1691 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1692 tcg_gen_mul_i64(t0, t0, arg2);
1693 tcg_gen_sub_i64(ret, arg1, t0);
1694 tcg_temp_free_i64(t0);
1695 } else if (TCG_TARGET_HAS_div2_i64) {
5dd48602 1696 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
1697 tcg_gen_movi_i64(t0, 0);
1698 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1699 tcg_temp_free_i64(t0);
1700 } else {
1701 gen_helper_remu_i64(ret, arg1, arg2);
1702 }
1703}
1704
1705void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1706{
3a13c3f3
RH
1707 if (TCG_TARGET_REG_BITS == 32) {
1708 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1709 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1710 } else if (TCG_TARGET_HAS_ext8s_i64) {
951c6300
RH
1711 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1712 } else {
1713 tcg_gen_shli_i64(ret, arg, 56);
1714 tcg_gen_sari_i64(ret, ret, 56);
1715 }
951c6300
RH
1716}
1717
1718void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1719{
3a13c3f3
RH
1720 if (TCG_TARGET_REG_BITS == 32) {
1721 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1722 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1723 } else if (TCG_TARGET_HAS_ext16s_i64) {
951c6300
RH
1724 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1725 } else {
1726 tcg_gen_shli_i64(ret, arg, 48);
1727 tcg_gen_sari_i64(ret, ret, 48);
1728 }
951c6300
RH
1729}
1730
1731void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1732{
3a13c3f3
RH
1733 if (TCG_TARGET_REG_BITS == 32) {
1734 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1735 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1736 } else if (TCG_TARGET_HAS_ext32s_i64) {
951c6300
RH
1737 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1738 } else {
1739 tcg_gen_shli_i64(ret, arg, 32);
1740 tcg_gen_sari_i64(ret, ret, 32);
1741 }
951c6300
RH
1742}
1743
1744void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1745{
3a13c3f3
RH
1746 if (TCG_TARGET_REG_BITS == 32) {
1747 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1748 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1749 } else if (TCG_TARGET_HAS_ext8u_i64) {
951c6300
RH
1750 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1751 } else {
1752 tcg_gen_andi_i64(ret, arg, 0xffu);
1753 }
951c6300
RH
1754}
1755
1756void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1757{
3a13c3f3
RH
1758 if (TCG_TARGET_REG_BITS == 32) {
1759 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1760 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1761 } else if (TCG_TARGET_HAS_ext16u_i64) {
951c6300
RH
1762 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1763 } else {
1764 tcg_gen_andi_i64(ret, arg, 0xffffu);
1765 }
951c6300
RH
1766}
1767
1768void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1769{
3a13c3f3
RH
1770 if (TCG_TARGET_REG_BITS == 32) {
1771 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1772 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1773 } else if (TCG_TARGET_HAS_ext32u_i64) {
951c6300
RH
1774 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1775 } else {
1776 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1777 }
951c6300
RH
1778}
1779
8b078800
PMD
1780/*
1781 * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
1782 *
1783 * Byte pattern: xxxxxxxxab -> yyyyyyyyba
1784 *
1785 * With TCG_BSWAP_IZ, x == zero, else undefined.
1786 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1787 */
2b836c2a 1788void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
951c6300 1789{
2b836c2a
RH
1790 /* Only one extension flag may be present. */
1791 tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1792
3a13c3f3 1793 if (TCG_TARGET_REG_BITS == 32) {
2b836c2a
RH
1794 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg), flags);
1795 if (flags & TCG_BSWAP_OS) {
1796 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1797 } else {
1798 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1799 }
3a13c3f3 1800 } else if (TCG_TARGET_HAS_bswap16_i64) {
2b836c2a 1801 tcg_gen_op3i_i64(INDEX_op_bswap16_i64, ret, arg, flags);
951c6300 1802 } else {
5dd48602
RH
1803 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1804 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
951c6300 1805
8b078800
PMD
1806 /* arg = ......ab or xxxxxxab */
1807 tcg_gen_shri_i64(t0, arg, 8); /* t0 = .......a or .xxxxxxa */
2b836c2a 1808 if (!(flags & TCG_BSWAP_IZ)) {
8b078800 1809 tcg_gen_ext8u_i64(t0, t0); /* t0 = .......a */
2b836c2a
RH
1810 }
1811
1812 if (flags & TCG_BSWAP_OS) {
8b078800
PMD
1813 tcg_gen_shli_i64(t1, arg, 56); /* t1 = b....... */
1814 tcg_gen_sari_i64(t1, t1, 48); /* t1 = ssssssb. */
2b836c2a 1815 } else if (flags & TCG_BSWAP_OZ) {
8b078800
PMD
1816 tcg_gen_ext8u_i64(t1, arg); /* t1 = .......b */
1817 tcg_gen_shli_i64(t1, t1, 8); /* t1 = ......b. */
2b836c2a 1818 } else {
8b078800 1819 tcg_gen_shli_i64(t1, arg, 8); /* t1 = xxxxxab. */
2b836c2a
RH
1820 }
1821
8b078800
PMD
1822 tcg_gen_or_i64(ret, t0, t1); /* ret = ......ba (OZ) */
1823 /* ssssssba (OS) */
1824 /* xxxxxaba (no flag) */
951c6300 1825 tcg_temp_free_i64(t0);
2b836c2a 1826 tcg_temp_free_i64(t1);
951c6300 1827 }
951c6300
RH
1828}
1829
9c406215
PMD
1830/*
1831 * bswap32_i64: 32-bit byte swap on the low bits of a 64-bit value.
1832 *
1833 * Byte pattern: xxxxabcd -> yyyydcba
1834 *
1835 * With TCG_BSWAP_IZ, x == zero, else undefined.
1836 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1837 */
2b836c2a 1838void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
951c6300 1839{
2b836c2a
RH
1840 /* Only one extension flag may be present. */
1841 tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1842
3a13c3f3
RH
1843 if (TCG_TARGET_REG_BITS == 32) {
1844 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2b836c2a
RH
1845 if (flags & TCG_BSWAP_OS) {
1846 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1847 } else {
1848 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1849 }
3a13c3f3 1850 } else if (TCG_TARGET_HAS_bswap32_i64) {
2b836c2a 1851 tcg_gen_op3i_i64(INDEX_op_bswap32_i64, ret, arg, flags);
951c6300 1852 } else {
5dd48602
RH
1853 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1854 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
11d11d61 1855 TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
951c6300 1856
2b836c2a
RH
1857 /* arg = xxxxabcd */
1858 tcg_gen_shri_i64(t0, arg, 8); /* t0 = .xxxxabc */
1859 tcg_gen_and_i64(t1, arg, t2); /* t1 = .....b.d */
1860 tcg_gen_and_i64(t0, t0, t2); /* t0 = .....a.c */
1861 tcg_gen_shli_i64(t1, t1, 8); /* t1 = ....b.d. */
1862 tcg_gen_or_i64(ret, t0, t1); /* ret = ....badc */
1863
1864 tcg_gen_shli_i64(t1, ret, 48); /* t1 = dc...... */
1865 tcg_gen_shri_i64(t0, ret, 16); /* t0 = ......ba */
1866 if (flags & TCG_BSWAP_OS) {
1867 tcg_gen_sari_i64(t1, t1, 32); /* t1 = ssssdc.. */
1868 } else {
1869 tcg_gen_shri_i64(t1, t1, 32); /* t1 = ....dc.. */
1870 }
9c406215
PMD
1871 tcg_gen_or_i64(ret, t0, t1); /* ret = ssssdcba (OS) */
1872 /* ....dcba (else) */
951c6300 1873
951c6300
RH
1874 tcg_temp_free_i64(t0);
1875 tcg_temp_free_i64(t1);
1876 }
951c6300
RH
1877}
1878
95180e75
PMD
1879/*
1880 * bswap64_i64: 64-bit byte swap on a 64-bit value.
1881 *
1882 * Byte pattern: abcdefgh -> hgfedcba
1883 */
951c6300
RH
1884void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1885{
3a13c3f3
RH
1886 if (TCG_TARGET_REG_BITS == 32) {
1887 TCGv_i32 t0, t1;
5dd48602
RH
1888 t0 = tcg_temp_ebb_new_i32();
1889 t1 = tcg_temp_ebb_new_i32();
951c6300 1890
3a13c3f3
RH
1891 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1892 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1893 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1894 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1895 tcg_temp_free_i32(t0);
1896 tcg_temp_free_i32(t1);
1897 } else if (TCG_TARGET_HAS_bswap64_i64) {
587195bd 1898 tcg_gen_op3i_i64(INDEX_op_bswap64_i64, ret, arg, 0);
951c6300 1899 } else {
5dd48602
RH
1900 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1901 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1902 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
951c6300 1903
9e821eab
RH
1904 /* arg = abcdefgh */
1905 tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
1906 tcg_gen_shri_i64(t0, arg, 8); /* t0 = .abcdefg */
1907 tcg_gen_and_i64(t1, arg, t2); /* t1 = .b.d.f.h */
1908 tcg_gen_and_i64(t0, t0, t2); /* t0 = .a.c.e.g */
1909 tcg_gen_shli_i64(t1, t1, 8); /* t1 = b.d.f.h. */
1910 tcg_gen_or_i64(ret, t0, t1); /* ret = badcfehg */
1911
1912 tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
1913 tcg_gen_shri_i64(t0, ret, 16); /* t0 = ..badcfe */
1914 tcg_gen_and_i64(t1, ret, t2); /* t1 = ..dc..hg */
1915 tcg_gen_and_i64(t0, t0, t2); /* t0 = ..ba..fe */
1916 tcg_gen_shli_i64(t1, t1, 16); /* t1 = dc..hg.. */
1917 tcg_gen_or_i64(ret, t0, t1); /* ret = dcbahgfe */
1918
1919 tcg_gen_shri_i64(t0, ret, 32); /* t0 = ....dcba */
1920 tcg_gen_shli_i64(t1, ret, 32); /* t1 = hgfe.... */
1921 tcg_gen_or_i64(ret, t0, t1); /* ret = hgfedcba */
951c6300 1922
951c6300
RH
1923 tcg_temp_free_i64(t0);
1924 tcg_temp_free_i64(t1);
9e821eab 1925 tcg_temp_free_i64(t2);
951c6300 1926 }
951c6300
RH
1927}
1928
b8976aa5
PMD
1929/*
1930 * hswap_i64: Swap 16-bit halfwords within a 64-bit value.
1931 * See also include/qemu/bitops.h, hswap64.
1932 *
1933 * Byte pattern: abcdefgh -> ghefcdab
1934 */
46be8425
RH
1935void tcg_gen_hswap_i64(TCGv_i64 ret, TCGv_i64 arg)
1936{
1937 uint64_t m = 0x0000ffff0000ffffull;
5dd48602
RH
1938 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1939 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
46be8425 1940
b8976aa5
PMD
1941 /* arg = abcdefgh */
1942 tcg_gen_rotli_i64(t1, arg, 32); /* t1 = efghabcd */
1943 tcg_gen_andi_i64(t0, t1, m); /* t0 = ..gh..cd */
1944 tcg_gen_shli_i64(t0, t0, 16); /* t0 = gh..cd.. */
1945 tcg_gen_shri_i64(t1, t1, 16); /* t1 = ..efghab */
1946 tcg_gen_andi_i64(t1, t1, m); /* t1 = ..ef..ab */
1947 tcg_gen_or_i64(ret, t0, t1); /* ret = ghefcdab */
46be8425
RH
1948
1949 tcg_temp_free_i64(t0);
1950 tcg_temp_free_i64(t1);
1951}
1952
ad262fb5
PMD
1953/*
1954 * wswap_i64: Swap 32-bit words within a 64-bit value.
1955 *
1956 * Byte pattern: abcdefgh -> efghabcd
1957 */
46be8425
RH
1958void tcg_gen_wswap_i64(TCGv_i64 ret, TCGv_i64 arg)
1959{
1960 /* Swapping 2 32-bit elements is a rotate. */
1961 tcg_gen_rotli_i64(ret, arg, 32);
1962}
1963
951c6300
RH
1964void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1965{
3a13c3f3
RH
1966 if (TCG_TARGET_REG_BITS == 32) {
1967 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1968 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1969 } else if (TCG_TARGET_HAS_not_i64) {
951c6300
RH
1970 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1971 } else {
1972 tcg_gen_xori_i64(ret, arg, -1);
1973 }
951c6300
RH
1974}
1975
1976void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1977{
3a13c3f3
RH
1978 if (TCG_TARGET_REG_BITS == 32) {
1979 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1980 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1981 } else if (TCG_TARGET_HAS_andc_i64) {
951c6300
RH
1982 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1983 } else {
5dd48602 1984 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
1985 tcg_gen_not_i64(t0, arg2);
1986 tcg_gen_and_i64(ret, arg1, t0);
1987 tcg_temp_free_i64(t0);
1988 }
951c6300
RH
1989}
1990
1991void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1992{
3a13c3f3
RH
1993 if (TCG_TARGET_REG_BITS == 32) {
1994 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1995 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1996 } else if (TCG_TARGET_HAS_eqv_i64) {
951c6300
RH
1997 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1998 } else {
1999 tcg_gen_xor_i64(ret, arg1, arg2);
2000 tcg_gen_not_i64(ret, ret);
2001 }
951c6300
RH
2002}
2003
2004void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2005{
3a13c3f3
RH
2006 if (TCG_TARGET_REG_BITS == 32) {
2007 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2008 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2009 } else if (TCG_TARGET_HAS_nand_i64) {
951c6300
RH
2010 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
2011 } else {
2012 tcg_gen_and_i64(ret, arg1, arg2);
2013 tcg_gen_not_i64(ret, ret);
2014 }
951c6300
RH
2015}
2016
2017void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2018{
3a13c3f3
RH
2019 if (TCG_TARGET_REG_BITS == 32) {
2020 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2021 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2022 } else if (TCG_TARGET_HAS_nor_i64) {
951c6300
RH
2023 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
2024 } else {
2025 tcg_gen_or_i64(ret, arg1, arg2);
2026 tcg_gen_not_i64(ret, ret);
2027 }
951c6300
RH
2028}
2029
2030void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2031{
3a13c3f3
RH
2032 if (TCG_TARGET_REG_BITS == 32) {
2033 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2034 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2035 } else if (TCG_TARGET_HAS_orc_i64) {
951c6300
RH
2036 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
2037 } else {
5dd48602 2038 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
2039 tcg_gen_not_i64(t0, arg2);
2040 tcg_gen_or_i64(ret, arg1, t0);
2041 tcg_temp_free_i64(t0);
2042 }
951c6300
RH
2043}
2044
0e28d006
RH
2045void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2046{
2047 if (TCG_TARGET_HAS_clz_i64) {
2048 tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
2049 } else {
2050 gen_helper_clz_i64(ret, arg1, arg2);
2051 }
2052}
2053
2054void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2055{
2056 if (TCG_TARGET_REG_BITS == 32
2057 && TCG_TARGET_HAS_clz_i32
2058 && arg2 <= 0xffffffffu) {
5dd48602 2059 TCGv_i32 t = tcg_temp_ebb_new_i32();
11d11d61 2060 tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
0e28d006
RH
2061 tcg_gen_addi_i32(t, t, 32);
2062 tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
2063 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2064 tcg_temp_free_i32(t);
2065 } else {
f04de891 2066 tcg_gen_clz_i64(ret, arg1, tcg_constant_i64(arg2));
0e28d006
RH
2067 }
2068}
2069
2070void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2071{
2072 if (TCG_TARGET_HAS_ctz_i64) {
2073 tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
14e99210 2074 } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
5dd48602 2075 TCGv_i64 z, t = tcg_temp_ebb_new_i64();
14e99210
RH
2076
2077 if (TCG_TARGET_HAS_ctpop_i64) {
2078 tcg_gen_subi_i64(t, arg1, 1);
2079 tcg_gen_andc_i64(t, t, arg1);
2080 tcg_gen_ctpop_i64(t, t);
2081 } else {
2082 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
2083 tcg_gen_neg_i64(t, arg1);
2084 tcg_gen_and_i64(t, t, arg1);
2085 tcg_gen_clzi_i64(t, t, 64);
2086 tcg_gen_xori_i64(t, t, 63);
2087 }
11d11d61 2088 z = tcg_constant_i64(0);
14e99210
RH
2089 tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
2090 tcg_temp_free_i64(t);
2091 tcg_temp_free_i64(z);
0e28d006
RH
2092 } else {
2093 gen_helper_ctz_i64(ret, arg1, arg2);
2094 }
2095}
2096
2097void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2098{
2099 if (TCG_TARGET_REG_BITS == 32
2100 && TCG_TARGET_HAS_ctz_i32
2101 && arg2 <= 0xffffffffu) {
5dd48602 2102 TCGv_i32 t32 = tcg_temp_ebb_new_i32();
11d11d61 2103 tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
0e28d006
RH
2104 tcg_gen_addi_i32(t32, t32, 32);
2105 tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
2106 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2107 tcg_temp_free_i32(t32);
14e99210
RH
2108 } else if (!TCG_TARGET_HAS_ctz_i64
2109 && TCG_TARGET_HAS_ctpop_i64
2110 && arg2 == 64) {
2111 /* This equivalence has the advantage of not requiring a fixup. */
5dd48602 2112 TCGv_i64 t = tcg_temp_ebb_new_i64();
14e99210
RH
2113 tcg_gen_subi_i64(t, arg1, 1);
2114 tcg_gen_andc_i64(t, t, arg1);
2115 tcg_gen_ctpop_i64(ret, t);
2116 tcg_temp_free_i64(t);
0e28d006 2117 } else {
f04de891 2118 tcg_gen_ctz_i64(ret, arg1, tcg_constant_i64(arg2));
0e28d006
RH
2119 }
2120}
2121
086920c2
RH
2122void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
2123{
2124 if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
5dd48602 2125 TCGv_i64 t = tcg_temp_ebb_new_i64();
086920c2
RH
2126 tcg_gen_sari_i64(t, arg, 63);
2127 tcg_gen_xor_i64(t, t, arg);
2128 tcg_gen_clzi_i64(t, t, 64);
2129 tcg_gen_subi_i64(ret, t, 1);
2130 tcg_temp_free_i64(t);
2131 } else {
2132 gen_helper_clrsb_i64(ret, arg);
2133 }
2134}
2135
a768e4e9
RH
2136void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
2137{
2138 if (TCG_TARGET_HAS_ctpop_i64) {
2139 tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
2140 } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
2141 tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2142 tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2143 tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
2144 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2145 } else {
2146 gen_helper_ctpop_i64(ret, arg1);
2147 }
2148}
2149
951c6300
RH
2150void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2151{
2152 if (TCG_TARGET_HAS_rot_i64) {
2153 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
2154 } else {
2155 TCGv_i64 t0, t1;
5dd48602
RH
2156 t0 = tcg_temp_ebb_new_i64();
2157 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
2158 tcg_gen_shl_i64(t0, arg1, arg2);
2159 tcg_gen_subfi_i64(t1, 64, arg2);
2160 tcg_gen_shr_i64(t1, arg1, t1);
2161 tcg_gen_or_i64(ret, t0, t1);
2162 tcg_temp_free_i64(t0);
2163 tcg_temp_free_i64(t1);
2164 }
2165}
2166
07dada03 2167void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
951c6300 2168{
07dada03 2169 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
951c6300
RH
2170 /* some cases can be optimized here */
2171 if (arg2 == 0) {
2172 tcg_gen_mov_i64(ret, arg1);
2173 } else if (TCG_TARGET_HAS_rot_i64) {
11d11d61 2174 tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300
RH
2175 } else {
2176 TCGv_i64 t0, t1;
5dd48602
RH
2177 t0 = tcg_temp_ebb_new_i64();
2178 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
2179 tcg_gen_shli_i64(t0, arg1, arg2);
2180 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2181 tcg_gen_or_i64(ret, t0, t1);
2182 tcg_temp_free_i64(t0);
2183 tcg_temp_free_i64(t1);
2184 }
2185}
2186
2187void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2188{
2189 if (TCG_TARGET_HAS_rot_i64) {
2190 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2191 } else {
2192 TCGv_i64 t0, t1;
5dd48602
RH
2193 t0 = tcg_temp_ebb_new_i64();
2194 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
2195 tcg_gen_shr_i64(t0, arg1, arg2);
2196 tcg_gen_subfi_i64(t1, 64, arg2);
2197 tcg_gen_shl_i64(t1, arg1, t1);
2198 tcg_gen_or_i64(ret, t0, t1);
2199 tcg_temp_free_i64(t0);
2200 tcg_temp_free_i64(t1);
2201 }
2202}
2203
07dada03 2204void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
951c6300 2205{
07dada03 2206 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
951c6300
RH
2207 /* some cases can be optimized here */
2208 if (arg2 == 0) {
2209 tcg_gen_mov_i64(ret, arg1);
2210 } else {
2211 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2212 }
2213}
2214
2215void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2216 unsigned int ofs, unsigned int len)
2217{
2218 uint64_t mask;
2219 TCGv_i64 t1;
2220
2221 tcg_debug_assert(ofs < 64);
0d0d309d 2222 tcg_debug_assert(len > 0);
951c6300
RH
2223 tcg_debug_assert(len <= 64);
2224 tcg_debug_assert(ofs + len <= 64);
2225
0d0d309d 2226 if (len == 64) {
951c6300
RH
2227 tcg_gen_mov_i64(ret, arg2);
2228 return;
2229 }
2230 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2231 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2232 return;
2233 }
2234
3a13c3f3
RH
2235 if (TCG_TARGET_REG_BITS == 32) {
2236 if (ofs >= 32) {
2237 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2238 TCGV_LOW(arg2), ofs - 32, len);
2239 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2240 return;
2241 }
2242 if (ofs + len <= 32) {
2243 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2244 TCGV_LOW(arg2), ofs, len);
2245 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2246 return;
2247 }
951c6300 2248 }
951c6300 2249
5dd48602 2250 t1 = tcg_temp_ebb_new_i64();
951c6300 2251
b0a60567
RH
2252 if (TCG_TARGET_HAS_extract2_i64) {
2253 if (ofs + len == 64) {
2254 tcg_gen_shli_i64(t1, arg1, len);
2255 tcg_gen_extract2_i64(ret, t1, arg2, len);
2256 goto done;
2257 }
2258 if (ofs == 0) {
2259 tcg_gen_extract2_i64(ret, arg1, arg2, len);
2260 tcg_gen_rotli_i64(ret, ret, len);
2261 goto done;
2262 }
2263 }
2264
2265 mask = (1ull << len) - 1;
951c6300
RH
2266 if (ofs + len < 64) {
2267 tcg_gen_andi_i64(t1, arg2, mask);
2268 tcg_gen_shli_i64(t1, t1, ofs);
2269 } else {
2270 tcg_gen_shli_i64(t1, arg2, ofs);
2271 }
2272 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2273 tcg_gen_or_i64(ret, ret, t1);
b0a60567 2274 done:
951c6300
RH
2275 tcg_temp_free_i64(t1);
2276}
2277
07cc68d5
RH
2278void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2279 unsigned int ofs, unsigned int len)
2280{
2281 tcg_debug_assert(ofs < 64);
2282 tcg_debug_assert(len > 0);
2283 tcg_debug_assert(len <= 64);
2284 tcg_debug_assert(ofs + len <= 64);
2285
2286 if (ofs + len == 64) {
2287 tcg_gen_shli_i64(ret, arg, ofs);
2288 } else if (ofs == 0) {
2289 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2290 } else if (TCG_TARGET_HAS_deposit_i64
2291 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
11d11d61 2292 TCGv_i64 zero = tcg_constant_i64(0);
07cc68d5 2293 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
07cc68d5
RH
2294 } else {
2295 if (TCG_TARGET_REG_BITS == 32) {
2296 if (ofs >= 32) {
2297 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2298 ofs - 32, len);
2299 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2300 return;
2301 }
2302 if (ofs + len <= 32) {
2303 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2304 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2305 return;
2306 }
2307 }
2308 /* To help two-operand hosts we prefer to zero-extend first,
2309 which allows ARG to stay live. */
2310 switch (len) {
2311 case 32:
2312 if (TCG_TARGET_HAS_ext32u_i64) {
2313 tcg_gen_ext32u_i64(ret, arg);
2314 tcg_gen_shli_i64(ret, ret, ofs);
2315 return;
2316 }
2317 break;
2318 case 16:
2319 if (TCG_TARGET_HAS_ext16u_i64) {
2320 tcg_gen_ext16u_i64(ret, arg);
2321 tcg_gen_shli_i64(ret, ret, ofs);
2322 return;
2323 }
2324 break;
2325 case 8:
2326 if (TCG_TARGET_HAS_ext8u_i64) {
2327 tcg_gen_ext8u_i64(ret, arg);
2328 tcg_gen_shli_i64(ret, ret, ofs);
2329 return;
2330 }
2331 break;
2332 }
2333 /* Otherwise prefer zero-extension over AND for code size. */
2334 switch (ofs + len) {
2335 case 32:
2336 if (TCG_TARGET_HAS_ext32u_i64) {
2337 tcg_gen_shli_i64(ret, arg, ofs);
2338 tcg_gen_ext32u_i64(ret, ret);
2339 return;
2340 }
2341 break;
2342 case 16:
2343 if (TCG_TARGET_HAS_ext16u_i64) {
2344 tcg_gen_shli_i64(ret, arg, ofs);
2345 tcg_gen_ext16u_i64(ret, ret);
2346 return;
2347 }
2348 break;
2349 case 8:
2350 if (TCG_TARGET_HAS_ext8u_i64) {
2351 tcg_gen_shli_i64(ret, arg, ofs);
2352 tcg_gen_ext8u_i64(ret, ret);
2353 return;
2354 }
2355 break;
2356 }
2357 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2358 tcg_gen_shli_i64(ret, ret, ofs);
2359 }
2360}
2361
7ec8bab3
RH
2362void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2363 unsigned int ofs, unsigned int len)
2364{
2365 tcg_debug_assert(ofs < 64);
2366 tcg_debug_assert(len > 0);
2367 tcg_debug_assert(len <= 64);
2368 tcg_debug_assert(ofs + len <= 64);
2369
2370 /* Canonicalize certain special cases, even if extract is supported. */
2371 if (ofs + len == 64) {
2372 tcg_gen_shri_i64(ret, arg, 64 - len);
2373 return;
2374 }
2375 if (ofs == 0) {
2376 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2377 return;
2378 }
2379
2380 if (TCG_TARGET_REG_BITS == 32) {
2381 /* Look for a 32-bit extract within one of the two words. */
2382 if (ofs >= 32) {
2383 tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2384 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2385 return;
2386 }
2387 if (ofs + len <= 32) {
2388 tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2389 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2390 return;
2391 }
2392 /* The field is split across two words. One double-word
2393 shift is better than two double-word shifts. */
2394 goto do_shift_and;
2395 }
2396
2397 if (TCG_TARGET_HAS_extract_i64
2398 && TCG_TARGET_extract_i64_valid(ofs, len)) {
2399 tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2400 return;
2401 }
2402
2403 /* Assume that zero-extension, if available, is cheaper than a shift. */
2404 switch (ofs + len) {
2405 case 32:
2406 if (TCG_TARGET_HAS_ext32u_i64) {
2407 tcg_gen_ext32u_i64(ret, arg);
2408 tcg_gen_shri_i64(ret, ret, ofs);
2409 return;
2410 }
2411 break;
2412 case 16:
2413 if (TCG_TARGET_HAS_ext16u_i64) {
2414 tcg_gen_ext16u_i64(ret, arg);
2415 tcg_gen_shri_i64(ret, ret, ofs);
2416 return;
2417 }
2418 break;
2419 case 8:
2420 if (TCG_TARGET_HAS_ext8u_i64) {
2421 tcg_gen_ext8u_i64(ret, arg);
2422 tcg_gen_shri_i64(ret, ret, ofs);
2423 return;
2424 }
2425 break;
2426 }
2427
2428 /* ??? Ideally we'd know what values are available for immediate AND.
2429 Assume that 8 bits are available, plus the special cases of 16 and 32,
2430 so that we get ext8u, ext16u, and ext32u. */
2431 switch (len) {
2432 case 1 ... 8: case 16: case 32:
2433 do_shift_and:
2434 tcg_gen_shri_i64(ret, arg, ofs);
2435 tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2436 break;
2437 default:
2438 tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2439 tcg_gen_shri_i64(ret, ret, 64 - len);
2440 break;
2441 }
2442}
2443
2444void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2445 unsigned int ofs, unsigned int len)
2446{
2447 tcg_debug_assert(ofs < 64);
2448 tcg_debug_assert(len > 0);
2449 tcg_debug_assert(len <= 64);
2450 tcg_debug_assert(ofs + len <= 64);
2451
2452 /* Canonicalize certain special cases, even if sextract is supported. */
2453 if (ofs + len == 64) {
2454 tcg_gen_sari_i64(ret, arg, 64 - len);
2455 return;
2456 }
2457 if (ofs == 0) {
2458 switch (len) {
2459 case 32:
2460 tcg_gen_ext32s_i64(ret, arg);
2461 return;
2462 case 16:
2463 tcg_gen_ext16s_i64(ret, arg);
2464 return;
2465 case 8:
2466 tcg_gen_ext8s_i64(ret, arg);
2467 return;
2468 }
2469 }
2470
2471 if (TCG_TARGET_REG_BITS == 32) {
2472 /* Look for a 32-bit extract within one of the two words. */
2473 if (ofs >= 32) {
2474 tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2475 } else if (ofs + len <= 32) {
2476 tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2477 } else if (ofs == 0) {
2478 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2479 tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2480 return;
2481 } else if (len > 32) {
5dd48602 2482 TCGv_i32 t = tcg_temp_ebb_new_i32();
7ec8bab3
RH
2483 /* Extract the bits for the high word normally. */
2484 tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2485 /* Shift the field down for the low part. */
2486 tcg_gen_shri_i64(ret, arg, ofs);
2487 /* Overwrite the shift into the high part. */
2488 tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2489 tcg_temp_free_i32(t);
2490 return;
2491 } else {
2492 /* Shift the field down for the low part, such that the
2493 field sits at the MSB. */
2494 tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2495 /* Shift the field down from the MSB, sign extending. */
2496 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2497 }
2498 /* Sign-extend the field from 32 bits. */
2499 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2500 return;
2501 }
2502
2503 if (TCG_TARGET_HAS_sextract_i64
2504 && TCG_TARGET_extract_i64_valid(ofs, len)) {
2505 tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2506 return;
2507 }
2508
2509 /* Assume that sign-extension, if available, is cheaper than a shift. */
2510 switch (ofs + len) {
2511 case 32:
2512 if (TCG_TARGET_HAS_ext32s_i64) {
2513 tcg_gen_ext32s_i64(ret, arg);
2514 tcg_gen_sari_i64(ret, ret, ofs);
2515 return;
2516 }
2517 break;
2518 case 16:
2519 if (TCG_TARGET_HAS_ext16s_i64) {
2520 tcg_gen_ext16s_i64(ret, arg);
2521 tcg_gen_sari_i64(ret, ret, ofs);
2522 return;
2523 }
2524 break;
2525 case 8:
2526 if (TCG_TARGET_HAS_ext8s_i64) {
2527 tcg_gen_ext8s_i64(ret, arg);
2528 tcg_gen_sari_i64(ret, ret, ofs);
2529 return;
2530 }
2531 break;
2532 }
2533 switch (len) {
2534 case 32:
2535 if (TCG_TARGET_HAS_ext32s_i64) {
2536 tcg_gen_shri_i64(ret, arg, ofs);
2537 tcg_gen_ext32s_i64(ret, ret);
2538 return;
2539 }
2540 break;
2541 case 16:
2542 if (TCG_TARGET_HAS_ext16s_i64) {
2543 tcg_gen_shri_i64(ret, arg, ofs);
2544 tcg_gen_ext16s_i64(ret, ret);
2545 return;
2546 }
2547 break;
2548 case 8:
2549 if (TCG_TARGET_HAS_ext8s_i64) {
2550 tcg_gen_shri_i64(ret, arg, ofs);
2551 tcg_gen_ext8s_i64(ret, ret);
2552 return;
2553 }
2554 break;
2555 }
2556 tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2557 tcg_gen_sari_i64(ret, ret, 64 - len);
2558}
2559
2089fcc9
DH
2560/*
2561 * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2562 * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2563 */
2564void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2565 unsigned int ofs)
2566{
2567 tcg_debug_assert(ofs <= 64);
2568 if (ofs == 0) {
2569 tcg_gen_mov_i64(ret, al);
2570 } else if (ofs == 64) {
2571 tcg_gen_mov_i64(ret, ah);
2572 } else if (al == ah) {
2573 tcg_gen_rotri_i64(ret, al, ofs);
fce1296f
RH
2574 } else if (TCG_TARGET_HAS_extract2_i64) {
2575 tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2089fcc9 2576 } else {
5dd48602 2577 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2089fcc9
DH
2578 tcg_gen_shri_i64(t0, al, ofs);
2579 tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2580 tcg_temp_free_i64(t0);
2581 }
2582}
2583
951c6300
RH
2584void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2585 TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2586{
37ed3bf1
RH
2587 if (cond == TCG_COND_ALWAYS) {
2588 tcg_gen_mov_i64(ret, v1);
2589 } else if (cond == TCG_COND_NEVER) {
2590 tcg_gen_mov_i64(ret, v2);
2591 } else if (TCG_TARGET_REG_BITS == 32) {
5dd48602
RH
2592 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
2593 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
3a13c3f3
RH
2594 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2595 TCGV_LOW(c1), TCGV_HIGH(c1),
2596 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2597
2598 if (TCG_TARGET_HAS_movcond_i32) {
2599 tcg_gen_movi_i32(t1, 0);
2600 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2601 TCGV_LOW(v1), TCGV_LOW(v2));
2602 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2603 TCGV_HIGH(v1), TCGV_HIGH(v2));
2604 } else {
2605 tcg_gen_neg_i32(t0, t0);
951c6300 2606
3a13c3f3
RH
2607 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2608 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2609 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
951c6300 2610
3a13c3f3
RH
2611 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2612 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2613 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2614 }
2615 tcg_temp_free_i32(t0);
2616 tcg_temp_free_i32(t1);
2617 } else if (TCG_TARGET_HAS_movcond_i64) {
951c6300
RH
2618 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2619 } else {
5dd48602
RH
2620 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2621 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
4a883870 2622 tcg_gen_negsetcond_i64(cond, t0, c1, c2);
951c6300
RH
2623 tcg_gen_and_i64(t1, v1, t0);
2624 tcg_gen_andc_i64(ret, v2, t0);
2625 tcg_gen_or_i64(ret, ret, t1);
2626 tcg_temp_free_i64(t0);
2627 tcg_temp_free_i64(t1);
2628 }
951c6300
RH
2629}
2630
2631void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2632 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2633{
2634 if (TCG_TARGET_HAS_add2_i64) {
2635 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
951c6300 2636 } else {
5dd48602
RH
2637 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2638 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
2639 tcg_gen_add_i64(t0, al, bl);
2640 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2641 tcg_gen_add_i64(rh, ah, bh);
2642 tcg_gen_add_i64(rh, rh, t1);
2643 tcg_gen_mov_i64(rl, t0);
2644 tcg_temp_free_i64(t0);
2645 tcg_temp_free_i64(t1);
2646 }
2647}
2648
2649void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2650 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2651{
2652 if (TCG_TARGET_HAS_sub2_i64) {
2653 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
951c6300 2654 } else {
5dd48602
RH
2655 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2656 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
2657 tcg_gen_sub_i64(t0, al, bl);
2658 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2659 tcg_gen_sub_i64(rh, ah, bh);
2660 tcg_gen_sub_i64(rh, rh, t1);
2661 tcg_gen_mov_i64(rl, t0);
2662 tcg_temp_free_i64(t0);
2663 tcg_temp_free_i64(t1);
2664 }
2665}
2666
2667void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2668{
2669 if (TCG_TARGET_HAS_mulu2_i64) {
2670 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
951c6300 2671 } else if (TCG_TARGET_HAS_muluh_i64) {
5dd48602 2672 TCGv_i64 t = tcg_temp_ebb_new_i64();
951c6300
RH
2673 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2674 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2675 tcg_gen_mov_i64(rl, t);
2676 tcg_temp_free_i64(t);
2677 } else {
5dd48602 2678 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
2679 tcg_gen_mul_i64(t0, arg1, arg2);
2680 gen_helper_muluh_i64(rh, arg1, arg2);
2681 tcg_gen_mov_i64(rl, t0);
2682 tcg_temp_free_i64(t0);
2683 }
2684}
2685
2686void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2687{
2688 if (TCG_TARGET_HAS_muls2_i64) {
2689 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
951c6300 2690 } else if (TCG_TARGET_HAS_mulsh_i64) {
5dd48602 2691 TCGv_i64 t = tcg_temp_ebb_new_i64();
951c6300
RH
2692 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2693 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2694 tcg_gen_mov_i64(rl, t);
2695 tcg_temp_free_i64(t);
2696 } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
5dd48602
RH
2697 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2698 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2699 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2700 TCGv_i64 t3 = tcg_temp_ebb_new_i64();
951c6300
RH
2701 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2702 /* Adjust for negative inputs. */
2703 tcg_gen_sari_i64(t2, arg1, 63);
2704 tcg_gen_sari_i64(t3, arg2, 63);
2705 tcg_gen_and_i64(t2, t2, arg2);
2706 tcg_gen_and_i64(t3, t3, arg1);
2707 tcg_gen_sub_i64(rh, t1, t2);
2708 tcg_gen_sub_i64(rh, rh, t3);
2709 tcg_gen_mov_i64(rl, t0);
2710 tcg_temp_free_i64(t0);
2711 tcg_temp_free_i64(t1);
2712 tcg_temp_free_i64(t2);
2713 tcg_temp_free_i64(t3);
2714 } else {
5dd48602 2715 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
2716 tcg_gen_mul_i64(t0, arg1, arg2);
2717 gen_helper_mulsh_i64(rh, arg1, arg2);
2718 tcg_gen_mov_i64(rl, t0);
2719 tcg_temp_free_i64(t0);
2720 }
2721}
2722
5087abfb
RH
2723void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2724{
5dd48602
RH
2725 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2726 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2727 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
5087abfb
RH
2728 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2729 /* Adjust for negative input for the signed arg1. */
2730 tcg_gen_sari_i64(t2, arg1, 63);
2731 tcg_gen_and_i64(t2, t2, arg2);
2732 tcg_gen_sub_i64(rh, t1, t2);
2733 tcg_gen_mov_i64(rl, t0);
2734 tcg_temp_free_i64(t0);
2735 tcg_temp_free_i64(t1);
2736 tcg_temp_free_i64(t2);
2737}
2738
b87fb8cd
RH
2739void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2740{
2741 tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
2742}
2743
2744void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2745{
2746 tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
2747}
2748
2749void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2750{
2751 tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
2752}
2753
2754void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2755{
2756 tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
2757}
2758
ff1f11f7
RH
2759void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
2760{
5dd48602 2761 TCGv_i64 t = tcg_temp_ebb_new_i64();
ff1f11f7
RH
2762
2763 tcg_gen_sari_i64(t, a, 63);
2764 tcg_gen_xor_i64(ret, a, t);
2765 tcg_gen_sub_i64(ret, ret, t);
2766 tcg_temp_free_i64(t);
2767}
2768
951c6300
RH
2769/* Size changing operations. */
2770
609ad705 2771void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
951c6300 2772{
3a13c3f3 2773 if (TCG_TARGET_REG_BITS == 32) {
609ad705 2774 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
13d885b0 2775 } else if (TCG_TARGET_HAS_extr_i64_i32) {
b7e8b17a 2776 tcg_gen_op2(INDEX_op_extrl_i64_i32,
ae8b75dc 2777 tcgv_i32_arg(ret), tcgv_i64_arg(arg));
609ad705 2778 } else {
dc41aa7d 2779 tcg_gen_mov_i32(ret, (TCGv_i32)arg);
609ad705
RH
2780 }
2781}
2782
2783void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2784{
2785 if (TCG_TARGET_REG_BITS == 32) {
2786 tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
13d885b0 2787 } else if (TCG_TARGET_HAS_extr_i64_i32) {
b7e8b17a 2788 tcg_gen_op2(INDEX_op_extrh_i64_i32,
ae8b75dc 2789 tcgv_i32_arg(ret), tcgv_i64_arg(arg));
951c6300 2790 } else {
5dd48602 2791 TCGv_i64 t = tcg_temp_ebb_new_i64();
609ad705 2792 tcg_gen_shri_i64(t, arg, 32);
dc41aa7d 2793 tcg_gen_mov_i32(ret, (TCGv_i32)t);
951c6300
RH
2794 tcg_temp_free_i64(t);
2795 }
951c6300
RH
2796}
2797
2798void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2799{
3a13c3f3
RH
2800 if (TCG_TARGET_REG_BITS == 32) {
2801 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2802 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2803 } else {
b7e8b17a 2804 tcg_gen_op2(INDEX_op_extu_i32_i64,
ae8b75dc 2805 tcgv_i64_arg(ret), tcgv_i32_arg(arg));
3a13c3f3 2806 }
951c6300
RH
2807}
2808
2809void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2810{
3a13c3f3
RH
2811 if (TCG_TARGET_REG_BITS == 32) {
2812 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2813 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2814 } else {
b7e8b17a 2815 tcg_gen_op2(INDEX_op_ext_i32_i64,
ae8b75dc 2816 tcgv_i64_arg(ret), tcgv_i32_arg(arg));
3a13c3f3 2817 }
951c6300
RH
2818}
2819
2820void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
2821{
3a13c3f3
RH
2822 TCGv_i64 tmp;
2823
2824 if (TCG_TARGET_REG_BITS == 32) {
2825 tcg_gen_mov_i32(TCGV_LOW(dest), low);
2826 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2827 return;
2828 }
2829
5dd48602 2830 tmp = tcg_temp_ebb_new_i64();
951c6300
RH
2831 /* These extensions are only needed for type correctness.
2832 We may be able to do better given target specific information. */
2833 tcg_gen_extu_i32_i64(tmp, high);
2834 tcg_gen_extu_i32_i64(dest, low);
2835 /* If deposit is available, use it. Otherwise use the extra
2836 knowledge that we have of the zero-extensions above. */
2837 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2838 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2839 } else {
2840 tcg_gen_shli_i64(tmp, tmp, 32);
2841 tcg_gen_or_i64(dest, dest, tmp);
2842 }
2843 tcg_temp_free_i64(tmp);
951c6300
RH
2844}
2845
2846void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2847{
3a13c3f3
RH
2848 if (TCG_TARGET_REG_BITS == 32) {
2849 tcg_gen_mov_i32(lo, TCGV_LOW(arg));
2850 tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
2851 } else {
609ad705
RH
2852 tcg_gen_extrl_i64_i32(lo, arg);
2853 tcg_gen_extrh_i64_i32(hi, arg);
3a13c3f3 2854 }
951c6300
RH
2855}
2856
2857void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2858{
2859 tcg_gen_ext32u_i64(lo, arg);
2860 tcg_gen_shri_i64(hi, arg, 32);
2861}
2862
4771e71c
RH
2863void tcg_gen_extr_i128_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i128 arg)
2864{
2865 tcg_gen_mov_i64(lo, TCGV128_LOW(arg));
2866 tcg_gen_mov_i64(hi, TCGV128_HIGH(arg));
2867}
2868
2869void tcg_gen_concat_i64_i128(TCGv_i128 ret, TCGv_i64 lo, TCGv_i64 hi)
2870{
2871 tcg_gen_mov_i64(TCGV128_LOW(ret), lo);
2872 tcg_gen_mov_i64(TCGV128_HIGH(ret), hi);
2873}
2874
2875void tcg_gen_mov_i128(TCGv_i128 dst, TCGv_i128 src)
2876{
2877 if (dst != src) {
2878 tcg_gen_mov_i64(TCGV128_LOW(dst), TCGV128_LOW(src));
2879 tcg_gen_mov_i64(TCGV128_HIGH(dst), TCGV128_HIGH(src));
2880 }
2881}
2882
951c6300
RH
2883/* QEMU specific operations. */
2884
d9971435 2885void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
07ea28b4 2886{
eba40358
RH
2887 /*
2888 * Let the jit code return the read-only version of the
2889 * TranslationBlock, so that we minimize the pc-relative
2890 * distance of the address of the exit_tb code to TB.
2891 * This will improve utilization of pc-relative address loads.
2892 *
2893 * TODO: Move this to translator_loop, so that all const
2894 * TranslationBlock pointers refer to read-only memory.
2895 * This requires coordination with targets that do not use
2896 * the translator_loop.
2897 */
2898 uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
07ea28b4
RH
2899
2900 if (tb == NULL) {
2901 tcg_debug_assert(idx == 0);
2902 } else if (idx <= TB_EXIT_IDXMAX) {
2903#ifdef CONFIG_DEBUG_TCG
2904 /* This is an exit following a goto_tb. Verify that we have
2905 seen this numbered exit before, via tcg_gen_goto_tb. */
2906 tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
2907#endif
2908 } else {
2909 /* This is an exit via the exitreq label. */
2910 tcg_debug_assert(idx == TB_EXIT_REQUESTED);
2911 }
2912
2913 tcg_gen_op1i(INDEX_op_exit_tb, val);
2914}
2915
951c6300
RH
2916void tcg_gen_goto_tb(unsigned idx)
2917{
84f15616 2918 /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
b7e4afbd 2919 tcg_debug_assert(!(tcg_ctx->gen_tb->cflags & CF_NO_GOTO_TB));
951c6300 2920 /* We only support two chained exits. */
07ea28b4 2921 tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
951c6300 2922#ifdef CONFIG_DEBUG_TCG
a4761232 2923 /* Verify that we haven't seen this numbered exit before. */
b1311c4a
EC
2924 tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
2925 tcg_ctx->goto_tb_issue_mask |= 1 << idx;
951c6300 2926#endif
e6d86bed 2927 plugin_gen_disable_mem_helpers();
84f15616 2928 tcg_gen_op1i(INDEX_op_goto_tb, idx);
951c6300
RH
2929}
2930
7f11636d 2931void tcg_gen_lookup_and_goto_ptr(void)
cedbcb01 2932{
84f15616 2933 TCGv_ptr ptr;
e6d86bed 2934
b7e4afbd 2935 if (tcg_ctx->gen_tb->cflags & CF_NO_GOTO_PTR) {
07ea28b4 2936 tcg_gen_exit_tb(NULL, 0);
84f15616 2937 return;
cedbcb01 2938 }
84f15616
RH
2939
2940 plugin_gen_disable_mem_helpers();
5dd48602 2941 ptr = tcg_temp_ebb_new_ptr();
ad75a51e 2942 gen_helper_lookup_tb_ptr(ptr, tcg_env);
84f15616
RH
2943 tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
2944 tcg_temp_free_ptr(ptr);
cedbcb01 2945}