]> git.proxmox.com Git - mirror_qemu.git/blob - tcg/tcg-op.c
tcg: Unexport tcg_gen_op*_{i32,i64}
[mirror_qemu.git] / tcg / tcg-op.c
1 /*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2008 Fabrice Bellard
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25 #include "qemu/osdep.h"
26 #include "tcg/tcg.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
32
33
34 /*
35 * Encourage the compiler to tail-call to a function, rather than inlining.
36 * Minimizes code size across 99 bottles of beer on the wall.
37 */
38 #define NI __attribute__((noinline))
39
40 void NI tcg_gen_op1(TCGOpcode opc, TCGArg a1)
41 {
42 TCGOp *op = tcg_emit_op(opc, 1);
43 op->args[0] = a1;
44 }
45
46 void NI tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
47 {
48 TCGOp *op = tcg_emit_op(opc, 2);
49 op->args[0] = a1;
50 op->args[1] = a2;
51 }
52
53 void NI tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
54 {
55 TCGOp *op = tcg_emit_op(opc, 3);
56 op->args[0] = a1;
57 op->args[1] = a2;
58 op->args[2] = a3;
59 }
60
61 void NI tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3, TCGArg a4)
62 {
63 TCGOp *op = tcg_emit_op(opc, 4);
64 op->args[0] = a1;
65 op->args[1] = a2;
66 op->args[2] = a3;
67 op->args[3] = a4;
68 }
69
70 void NI tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
71 TCGArg a4, TCGArg a5)
72 {
73 TCGOp *op = tcg_emit_op(opc, 5);
74 op->args[0] = a1;
75 op->args[1] = a2;
76 op->args[2] = a3;
77 op->args[3] = a4;
78 op->args[4] = a5;
79 }
80
81 void NI tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
82 TCGArg a4, TCGArg a5, TCGArg a6)
83 {
84 TCGOp *op = tcg_emit_op(opc, 6);
85 op->args[0] = a1;
86 op->args[1] = a2;
87 op->args[2] = a3;
88 op->args[3] = a4;
89 op->args[4] = a5;
90 op->args[5] = a6;
91 }
92
93 /*
94 * With CONFIG_DEBUG_TCG, tcgv_*_tmp via tcgv_*_arg, is an out-of-line
95 * assertion check. Force tail calls to avoid too much code expansion.
96 */
97 #ifdef CONFIG_DEBUG_TCG
98 # define DNI NI
99 #else
100 # define DNI
101 #endif
102
103 static void DNI tcg_gen_op1_i32(TCGOpcode opc, TCGv_i32 a1)
104 {
105 tcg_gen_op1(opc, tcgv_i32_arg(a1));
106 }
107
108 static void DNI tcg_gen_op1_i64(TCGOpcode opc, TCGv_i64 a1)
109 {
110 tcg_gen_op1(opc, tcgv_i64_arg(a1));
111 }
112
113 static void DNI tcg_gen_op1i(TCGOpcode opc, TCGArg a1)
114 {
115 tcg_gen_op1(opc, a1);
116 }
117
118 static void DNI tcg_gen_op2_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2)
119 {
120 tcg_gen_op2(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2));
121 }
122
123 static void DNI tcg_gen_op2_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2)
124 {
125 tcg_gen_op2(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2));
126 }
127
128 static void DNI tcg_gen_op3_i32(TCGOpcode opc, TCGv_i32 a1,
129 TCGv_i32 a2, TCGv_i32 a3)
130 {
131 tcg_gen_op3(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2), tcgv_i32_arg(a3));
132 }
133
134 static void DNI tcg_gen_op3_i64(TCGOpcode opc, TCGv_i64 a1,
135 TCGv_i64 a2, TCGv_i64 a3)
136 {
137 tcg_gen_op3(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2), tcgv_i64_arg(a3));
138 }
139
140 static void DNI tcg_gen_op3i_i32(TCGOpcode opc, TCGv_i32 a1,
141 TCGv_i32 a2, TCGArg a3)
142 {
143 tcg_gen_op3(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2), a3);
144 }
145
146 static void DNI tcg_gen_op3i_i64(TCGOpcode opc, TCGv_i64 a1,
147 TCGv_i64 a2, TCGArg a3)
148 {
149 tcg_gen_op3(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2), a3);
150 }
151
152 static void DNI tcg_gen_ldst_op_i32(TCGOpcode opc, TCGv_i32 val,
153 TCGv_ptr base, TCGArg offset)
154 {
155 tcg_gen_op3(opc, tcgv_i32_arg(val), tcgv_ptr_arg(base), offset);
156 }
157
158 static void DNI tcg_gen_ldst_op_i64(TCGOpcode opc, TCGv_i64 val,
159 TCGv_ptr base, TCGArg offset)
160 {
161 tcg_gen_op3(opc, tcgv_i64_arg(val), tcgv_ptr_arg(base), offset);
162 }
163
164 static void DNI tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
165 TCGv_i32 a3, TCGv_i32 a4)
166 {
167 tcg_gen_op4(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
168 tcgv_i32_arg(a3), tcgv_i32_arg(a4));
169 }
170
171 static void DNI tcg_gen_op4_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
172 TCGv_i64 a3, TCGv_i64 a4)
173 {
174 tcg_gen_op4(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
175 tcgv_i64_arg(a3), tcgv_i64_arg(a4));
176 }
177
178 static void DNI tcg_gen_op4i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
179 TCGv_i32 a3, TCGArg a4)
180 {
181 tcg_gen_op4(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
182 tcgv_i32_arg(a3), a4);
183 }
184
185 static void DNI tcg_gen_op4i_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
186 TCGv_i64 a3, TCGArg a4)
187 {
188 tcg_gen_op4(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
189 tcgv_i64_arg(a3), a4);
190 }
191
192 static void DNI tcg_gen_op4ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
193 TCGArg a3, TCGArg a4)
194 {
195 tcg_gen_op4(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2), a3, a4);
196 }
197
198 static void DNI tcg_gen_op4ii_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
199 TCGArg a3, TCGArg a4)
200 {
201 tcg_gen_op4(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2), a3, a4);
202 }
203
204 static void DNI tcg_gen_op5_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
205 TCGv_i32 a3, TCGv_i32 a4, TCGv_i32 a5)
206 {
207 tcg_gen_op5(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
208 tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5));
209 }
210
211 static void DNI tcg_gen_op5_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
212 TCGv_i64 a3, TCGv_i64 a4, TCGv_i64 a5)
213 {
214 tcg_gen_op5(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
215 tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5));
216 }
217
218 static void DNI tcg_gen_op5ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
219 TCGv_i32 a3, TCGArg a4, TCGArg a5)
220 {
221 tcg_gen_op5(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
222 tcgv_i32_arg(a3), a4, a5);
223 }
224
225 static void DNI tcg_gen_op5ii_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
226 TCGv_i64 a3, TCGArg a4, TCGArg a5)
227 {
228 tcg_gen_op5(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
229 tcgv_i64_arg(a3), a4, a5);
230 }
231
232 static void DNI tcg_gen_op6_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
233 TCGv_i32 a3, TCGv_i32 a4,
234 TCGv_i32 a5, TCGv_i32 a6)
235 {
236 tcg_gen_op6(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
237 tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5),
238 tcgv_i32_arg(a6));
239 }
240
241 static void DNI tcg_gen_op6_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
242 TCGv_i64 a3, TCGv_i64 a4,
243 TCGv_i64 a5, TCGv_i64 a6)
244 {
245 tcg_gen_op6(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
246 tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5),
247 tcgv_i64_arg(a6));
248 }
249
250 static void DNI tcg_gen_op6i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
251 TCGv_i32 a3, TCGv_i32 a4,
252 TCGv_i32 a5, TCGArg a6)
253 {
254 tcg_gen_op6(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
255 tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5), a6);
256 }
257
258 static void DNI tcg_gen_op6i_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
259 TCGv_i64 a3, TCGv_i64 a4,
260 TCGv_i64 a5, TCGArg a6)
261 {
262 tcg_gen_op6(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
263 tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5), a6);
264 }
265
266 static void DNI tcg_gen_op6ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
267 TCGv_i32 a3, TCGv_i32 a4,
268 TCGArg a5, TCGArg a6)
269 {
270 tcg_gen_op6(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
271 tcgv_i32_arg(a3), tcgv_i32_arg(a4), a5, a6);
272 }
273
274 /* Generic ops. */
275
276 void gen_set_label(TCGLabel *l)
277 {
278 l->present = 1;
279 tcg_gen_op1(INDEX_op_set_label, label_arg(l));
280 }
281
282 static void add_last_as_label_use(TCGLabel *l)
283 {
284 TCGLabelUse *u = tcg_malloc(sizeof(TCGLabelUse));
285
286 u->op = tcg_last_op();
287 QSIMPLEQ_INSERT_TAIL(&l->branches, u, next);
288 }
289
290 void tcg_gen_br(TCGLabel *l)
291 {
292 tcg_gen_op1(INDEX_op_br, label_arg(l));
293 add_last_as_label_use(l);
294 }
295
296 void tcg_gen_mb(TCGBar mb_type)
297 {
298 #ifdef CONFIG_USER_ONLY
299 bool parallel = tcg_ctx->gen_tb->cflags & CF_PARALLEL;
300 #else
301 /*
302 * It is tempting to elide the barrier in a uniprocessor context.
303 * However, even with a single cpu we have i/o threads running in
304 * parallel, and lack of memory order can result in e.g. virtio
305 * queue entries being read incorrectly.
306 */
307 bool parallel = true;
308 #endif
309
310 if (parallel) {
311 tcg_gen_op1(INDEX_op_mb, mb_type);
312 }
313 }
314
315 void tcg_gen_plugin_cb_start(unsigned from, unsigned type, unsigned wr)
316 {
317 tcg_gen_op3(INDEX_op_plugin_cb_start, from, type, wr);
318 }
319
320 void tcg_gen_plugin_cb_end(void)
321 {
322 tcg_emit_op(INDEX_op_plugin_cb_end, 0);
323 }
324
325 /* 32 bit ops */
326
327 void tcg_gen_discard_i32(TCGv_i32 arg)
328 {
329 tcg_gen_op1_i32(INDEX_op_discard, arg);
330 }
331
332 void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
333 {
334 if (ret != arg) {
335 tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg);
336 }
337 }
338
339 void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
340 {
341 tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
342 }
343
344 void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
345 {
346 tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2);
347 }
348
349 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
350 {
351 /* some cases can be optimized here */
352 if (arg2 == 0) {
353 tcg_gen_mov_i32(ret, arg1);
354 } else {
355 tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
356 }
357 }
358
359 void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
360 {
361 tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2);
362 }
363
364 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
365 {
366 if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
367 /* Don't recurse with tcg_gen_neg_i32. */
368 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
369 } else {
370 tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
371 }
372 }
373
374 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
375 {
376 /* some cases can be optimized here */
377 if (arg2 == 0) {
378 tcg_gen_mov_i32(ret, arg1);
379 } else {
380 tcg_gen_sub_i32(ret, arg1, tcg_constant_i32(arg2));
381 }
382 }
383
384 void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg)
385 {
386 if (TCG_TARGET_HAS_neg_i32) {
387 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg);
388 } else {
389 tcg_gen_subfi_i32(ret, 0, arg);
390 }
391 }
392
393 void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
394 {
395 tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2);
396 }
397
398 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
399 {
400 /* Some cases can be optimized here. */
401 switch (arg2) {
402 case 0:
403 tcg_gen_movi_i32(ret, 0);
404 return;
405 case -1:
406 tcg_gen_mov_i32(ret, arg1);
407 return;
408 case 0xff:
409 /* Don't recurse with tcg_gen_ext8u_i32. */
410 if (TCG_TARGET_HAS_ext8u_i32) {
411 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
412 return;
413 }
414 break;
415 case 0xffff:
416 if (TCG_TARGET_HAS_ext16u_i32) {
417 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
418 return;
419 }
420 break;
421 }
422
423 tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
424 }
425
426 void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
427 {
428 tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2);
429 }
430
431 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
432 {
433 /* Some cases can be optimized here. */
434 if (arg2 == -1) {
435 tcg_gen_movi_i32(ret, -1);
436 } else if (arg2 == 0) {
437 tcg_gen_mov_i32(ret, arg1);
438 } else {
439 tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
440 }
441 }
442
443 void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
444 {
445 tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2);
446 }
447
448 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
449 {
450 /* Some cases can be optimized here. */
451 if (arg2 == 0) {
452 tcg_gen_mov_i32(ret, arg1);
453 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
454 /* Don't recurse with tcg_gen_not_i32. */
455 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
456 } else {
457 tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
458 }
459 }
460
461 void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
462 {
463 if (TCG_TARGET_HAS_not_i32) {
464 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg);
465 } else {
466 tcg_gen_xori_i32(ret, arg, -1);
467 }
468 }
469
470 void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
471 {
472 tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2);
473 }
474
475 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
476 {
477 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
478 if (arg2 == 0) {
479 tcg_gen_mov_i32(ret, arg1);
480 } else {
481 tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
482 }
483 }
484
485 void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
486 {
487 tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2);
488 }
489
490 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
491 {
492 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
493 if (arg2 == 0) {
494 tcg_gen_mov_i32(ret, arg1);
495 } else {
496 tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
497 }
498 }
499
500 void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
501 {
502 tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2);
503 }
504
505 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
506 {
507 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
508 if (arg2 == 0) {
509 tcg_gen_mov_i32(ret, arg1);
510 } else {
511 tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
512 }
513 }
514
515 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
516 {
517 if (cond == TCG_COND_ALWAYS) {
518 tcg_gen_br(l);
519 } else if (cond != TCG_COND_NEVER) {
520 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
521 add_last_as_label_use(l);
522 }
523 }
524
525 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
526 {
527 if (cond == TCG_COND_ALWAYS) {
528 tcg_gen_br(l);
529 } else if (cond != TCG_COND_NEVER) {
530 tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
531 }
532 }
533
534 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
535 TCGv_i32 arg1, TCGv_i32 arg2)
536 {
537 if (cond == TCG_COND_ALWAYS) {
538 tcg_gen_movi_i32(ret, 1);
539 } else if (cond == TCG_COND_NEVER) {
540 tcg_gen_movi_i32(ret, 0);
541 } else {
542 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
543 }
544 }
545
546 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
547 TCGv_i32 arg1, int32_t arg2)
548 {
549 tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
550 }
551
552 void tcg_gen_negsetcond_i32(TCGCond cond, TCGv_i32 ret,
553 TCGv_i32 arg1, TCGv_i32 arg2)
554 {
555 if (cond == TCG_COND_ALWAYS) {
556 tcg_gen_movi_i32(ret, -1);
557 } else if (cond == TCG_COND_NEVER) {
558 tcg_gen_movi_i32(ret, 0);
559 } else if (TCG_TARGET_HAS_negsetcond_i32) {
560 tcg_gen_op4i_i32(INDEX_op_negsetcond_i32, ret, arg1, arg2, cond);
561 } else {
562 tcg_gen_setcond_i32(cond, ret, arg1, arg2);
563 tcg_gen_neg_i32(ret, ret);
564 }
565 }
566
567 void tcg_gen_negsetcondi_i32(TCGCond cond, TCGv_i32 ret,
568 TCGv_i32 arg1, int32_t arg2)
569 {
570 tcg_gen_negsetcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
571 }
572
573 void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
574 {
575 tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2);
576 }
577
578 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
579 {
580 if (arg2 == 0) {
581 tcg_gen_movi_i32(ret, 0);
582 } else if (is_power_of_2(arg2)) {
583 tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
584 } else {
585 tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
586 }
587 }
588
589 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
590 {
591 if (TCG_TARGET_HAS_div_i32) {
592 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
593 } else if (TCG_TARGET_HAS_div2_i32) {
594 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
595 tcg_gen_sari_i32(t0, arg1, 31);
596 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
597 tcg_temp_free_i32(t0);
598 } else {
599 gen_helper_div_i32(ret, arg1, arg2);
600 }
601 }
602
603 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
604 {
605 if (TCG_TARGET_HAS_rem_i32) {
606 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
607 } else if (TCG_TARGET_HAS_div_i32) {
608 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
609 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
610 tcg_gen_mul_i32(t0, t0, arg2);
611 tcg_gen_sub_i32(ret, arg1, t0);
612 tcg_temp_free_i32(t0);
613 } else if (TCG_TARGET_HAS_div2_i32) {
614 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
615 tcg_gen_sari_i32(t0, arg1, 31);
616 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
617 tcg_temp_free_i32(t0);
618 } else {
619 gen_helper_rem_i32(ret, arg1, arg2);
620 }
621 }
622
623 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
624 {
625 if (TCG_TARGET_HAS_div_i32) {
626 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
627 } else if (TCG_TARGET_HAS_div2_i32) {
628 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
629 TCGv_i32 zero = tcg_constant_i32(0);
630 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, zero, arg2);
631 tcg_temp_free_i32(t0);
632 } else {
633 gen_helper_divu_i32(ret, arg1, arg2);
634 }
635 }
636
637 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
638 {
639 if (TCG_TARGET_HAS_rem_i32) {
640 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
641 } else if (TCG_TARGET_HAS_div_i32) {
642 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
643 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
644 tcg_gen_mul_i32(t0, t0, arg2);
645 tcg_gen_sub_i32(ret, arg1, t0);
646 tcg_temp_free_i32(t0);
647 } else if (TCG_TARGET_HAS_div2_i32) {
648 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
649 TCGv_i32 zero = tcg_constant_i32(0);
650 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, zero, arg2);
651 tcg_temp_free_i32(t0);
652 } else {
653 gen_helper_remu_i32(ret, arg1, arg2);
654 }
655 }
656
657 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
658 {
659 if (TCG_TARGET_HAS_andc_i32) {
660 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
661 } else {
662 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
663 tcg_gen_not_i32(t0, arg2);
664 tcg_gen_and_i32(ret, arg1, t0);
665 tcg_temp_free_i32(t0);
666 }
667 }
668
669 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
670 {
671 if (TCG_TARGET_HAS_eqv_i32) {
672 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
673 } else {
674 tcg_gen_xor_i32(ret, arg1, arg2);
675 tcg_gen_not_i32(ret, ret);
676 }
677 }
678
679 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
680 {
681 if (TCG_TARGET_HAS_nand_i32) {
682 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
683 } else {
684 tcg_gen_and_i32(ret, arg1, arg2);
685 tcg_gen_not_i32(ret, ret);
686 }
687 }
688
689 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
690 {
691 if (TCG_TARGET_HAS_nor_i32) {
692 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
693 } else {
694 tcg_gen_or_i32(ret, arg1, arg2);
695 tcg_gen_not_i32(ret, ret);
696 }
697 }
698
699 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
700 {
701 if (TCG_TARGET_HAS_orc_i32) {
702 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
703 } else {
704 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
705 tcg_gen_not_i32(t0, arg2);
706 tcg_gen_or_i32(ret, arg1, t0);
707 tcg_temp_free_i32(t0);
708 }
709 }
710
711 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
712 {
713 if (TCG_TARGET_HAS_clz_i32) {
714 tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
715 } else if (TCG_TARGET_HAS_clz_i64) {
716 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
717 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
718 tcg_gen_extu_i32_i64(t1, arg1);
719 tcg_gen_extu_i32_i64(t2, arg2);
720 tcg_gen_addi_i64(t2, t2, 32);
721 tcg_gen_clz_i64(t1, t1, t2);
722 tcg_gen_extrl_i64_i32(ret, t1);
723 tcg_temp_free_i64(t1);
724 tcg_temp_free_i64(t2);
725 tcg_gen_subi_i32(ret, ret, 32);
726 } else {
727 gen_helper_clz_i32(ret, arg1, arg2);
728 }
729 }
730
731 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
732 {
733 tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
734 }
735
736 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
737 {
738 if (TCG_TARGET_HAS_ctz_i32) {
739 tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
740 } else if (TCG_TARGET_HAS_ctz_i64) {
741 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
742 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
743 tcg_gen_extu_i32_i64(t1, arg1);
744 tcg_gen_extu_i32_i64(t2, arg2);
745 tcg_gen_ctz_i64(t1, t1, t2);
746 tcg_gen_extrl_i64_i32(ret, t1);
747 tcg_temp_free_i64(t1);
748 tcg_temp_free_i64(t2);
749 } else if (TCG_TARGET_HAS_ctpop_i32
750 || TCG_TARGET_HAS_ctpop_i64
751 || TCG_TARGET_HAS_clz_i32
752 || TCG_TARGET_HAS_clz_i64) {
753 TCGv_i32 z, t = tcg_temp_ebb_new_i32();
754
755 if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
756 tcg_gen_subi_i32(t, arg1, 1);
757 tcg_gen_andc_i32(t, t, arg1);
758 tcg_gen_ctpop_i32(t, t);
759 } else {
760 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
761 tcg_gen_neg_i32(t, arg1);
762 tcg_gen_and_i32(t, t, arg1);
763 tcg_gen_clzi_i32(t, t, 32);
764 tcg_gen_xori_i32(t, t, 31);
765 }
766 z = tcg_constant_i32(0);
767 tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
768 tcg_temp_free_i32(t);
769 } else {
770 gen_helper_ctz_i32(ret, arg1, arg2);
771 }
772 }
773
774 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
775 {
776 if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
777 /* This equivalence has the advantage of not requiring a fixup. */
778 TCGv_i32 t = tcg_temp_ebb_new_i32();
779 tcg_gen_subi_i32(t, arg1, 1);
780 tcg_gen_andc_i32(t, t, arg1);
781 tcg_gen_ctpop_i32(ret, t);
782 tcg_temp_free_i32(t);
783 } else {
784 tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
785 }
786 }
787
788 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
789 {
790 if (TCG_TARGET_HAS_clz_i32) {
791 TCGv_i32 t = tcg_temp_ebb_new_i32();
792 tcg_gen_sari_i32(t, arg, 31);
793 tcg_gen_xor_i32(t, t, arg);
794 tcg_gen_clzi_i32(t, t, 32);
795 tcg_gen_subi_i32(ret, t, 1);
796 tcg_temp_free_i32(t);
797 } else {
798 gen_helper_clrsb_i32(ret, arg);
799 }
800 }
801
802 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
803 {
804 if (TCG_TARGET_HAS_ctpop_i32) {
805 tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
806 } else if (TCG_TARGET_HAS_ctpop_i64) {
807 TCGv_i64 t = tcg_temp_ebb_new_i64();
808 tcg_gen_extu_i32_i64(t, arg1);
809 tcg_gen_ctpop_i64(t, t);
810 tcg_gen_extrl_i64_i32(ret, t);
811 tcg_temp_free_i64(t);
812 } else {
813 gen_helper_ctpop_i32(ret, arg1);
814 }
815 }
816
817 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
818 {
819 if (TCG_TARGET_HAS_rot_i32) {
820 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
821 } else {
822 TCGv_i32 t0, t1;
823
824 t0 = tcg_temp_ebb_new_i32();
825 t1 = tcg_temp_ebb_new_i32();
826 tcg_gen_shl_i32(t0, arg1, arg2);
827 tcg_gen_subfi_i32(t1, 32, arg2);
828 tcg_gen_shr_i32(t1, arg1, t1);
829 tcg_gen_or_i32(ret, t0, t1);
830 tcg_temp_free_i32(t0);
831 tcg_temp_free_i32(t1);
832 }
833 }
834
835 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
836 {
837 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
838 /* some cases can be optimized here */
839 if (arg2 == 0) {
840 tcg_gen_mov_i32(ret, arg1);
841 } else if (TCG_TARGET_HAS_rot_i32) {
842 tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
843 } else {
844 TCGv_i32 t0, t1;
845 t0 = tcg_temp_ebb_new_i32();
846 t1 = tcg_temp_ebb_new_i32();
847 tcg_gen_shli_i32(t0, arg1, arg2);
848 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
849 tcg_gen_or_i32(ret, t0, t1);
850 tcg_temp_free_i32(t0);
851 tcg_temp_free_i32(t1);
852 }
853 }
854
855 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
856 {
857 if (TCG_TARGET_HAS_rot_i32) {
858 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
859 } else {
860 TCGv_i32 t0, t1;
861
862 t0 = tcg_temp_ebb_new_i32();
863 t1 = tcg_temp_ebb_new_i32();
864 tcg_gen_shr_i32(t0, arg1, arg2);
865 tcg_gen_subfi_i32(t1, 32, arg2);
866 tcg_gen_shl_i32(t1, arg1, t1);
867 tcg_gen_or_i32(ret, t0, t1);
868 tcg_temp_free_i32(t0);
869 tcg_temp_free_i32(t1);
870 }
871 }
872
873 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
874 {
875 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
876 /* some cases can be optimized here */
877 if (arg2 == 0) {
878 tcg_gen_mov_i32(ret, arg1);
879 } else {
880 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
881 }
882 }
883
884 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
885 unsigned int ofs, unsigned int len)
886 {
887 uint32_t mask;
888 TCGv_i32 t1;
889
890 tcg_debug_assert(ofs < 32);
891 tcg_debug_assert(len > 0);
892 tcg_debug_assert(len <= 32);
893 tcg_debug_assert(ofs + len <= 32);
894
895 if (len == 32) {
896 tcg_gen_mov_i32(ret, arg2);
897 return;
898 }
899 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
900 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
901 return;
902 }
903
904 t1 = tcg_temp_ebb_new_i32();
905
906 if (TCG_TARGET_HAS_extract2_i32) {
907 if (ofs + len == 32) {
908 tcg_gen_shli_i32(t1, arg1, len);
909 tcg_gen_extract2_i32(ret, t1, arg2, len);
910 goto done;
911 }
912 if (ofs == 0) {
913 tcg_gen_extract2_i32(ret, arg1, arg2, len);
914 tcg_gen_rotli_i32(ret, ret, len);
915 goto done;
916 }
917 }
918
919 mask = (1u << len) - 1;
920 if (ofs + len < 32) {
921 tcg_gen_andi_i32(t1, arg2, mask);
922 tcg_gen_shli_i32(t1, t1, ofs);
923 } else {
924 tcg_gen_shli_i32(t1, arg2, ofs);
925 }
926 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
927 tcg_gen_or_i32(ret, ret, t1);
928 done:
929 tcg_temp_free_i32(t1);
930 }
931
932 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
933 unsigned int ofs, unsigned int len)
934 {
935 tcg_debug_assert(ofs < 32);
936 tcg_debug_assert(len > 0);
937 tcg_debug_assert(len <= 32);
938 tcg_debug_assert(ofs + len <= 32);
939
940 if (ofs + len == 32) {
941 tcg_gen_shli_i32(ret, arg, ofs);
942 } else if (ofs == 0) {
943 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
944 } else if (TCG_TARGET_HAS_deposit_i32
945 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
946 TCGv_i32 zero = tcg_constant_i32(0);
947 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
948 } else {
949 /* To help two-operand hosts we prefer to zero-extend first,
950 which allows ARG to stay live. */
951 switch (len) {
952 case 16:
953 if (TCG_TARGET_HAS_ext16u_i32) {
954 tcg_gen_ext16u_i32(ret, arg);
955 tcg_gen_shli_i32(ret, ret, ofs);
956 return;
957 }
958 break;
959 case 8:
960 if (TCG_TARGET_HAS_ext8u_i32) {
961 tcg_gen_ext8u_i32(ret, arg);
962 tcg_gen_shli_i32(ret, ret, ofs);
963 return;
964 }
965 break;
966 }
967 /* Otherwise prefer zero-extension over AND for code size. */
968 switch (ofs + len) {
969 case 16:
970 if (TCG_TARGET_HAS_ext16u_i32) {
971 tcg_gen_shli_i32(ret, arg, ofs);
972 tcg_gen_ext16u_i32(ret, ret);
973 return;
974 }
975 break;
976 case 8:
977 if (TCG_TARGET_HAS_ext8u_i32) {
978 tcg_gen_shli_i32(ret, arg, ofs);
979 tcg_gen_ext8u_i32(ret, ret);
980 return;
981 }
982 break;
983 }
984 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
985 tcg_gen_shli_i32(ret, ret, ofs);
986 }
987 }
988
989 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
990 unsigned int ofs, unsigned int len)
991 {
992 tcg_debug_assert(ofs < 32);
993 tcg_debug_assert(len > 0);
994 tcg_debug_assert(len <= 32);
995 tcg_debug_assert(ofs + len <= 32);
996
997 /* Canonicalize certain special cases, even if extract is supported. */
998 if (ofs + len == 32) {
999 tcg_gen_shri_i32(ret, arg, 32 - len);
1000 return;
1001 }
1002 if (ofs == 0) {
1003 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
1004 return;
1005 }
1006
1007 if (TCG_TARGET_HAS_extract_i32
1008 && TCG_TARGET_extract_i32_valid(ofs, len)) {
1009 tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
1010 return;
1011 }
1012
1013 /* Assume that zero-extension, if available, is cheaper than a shift. */
1014 switch (ofs + len) {
1015 case 16:
1016 if (TCG_TARGET_HAS_ext16u_i32) {
1017 tcg_gen_ext16u_i32(ret, arg);
1018 tcg_gen_shri_i32(ret, ret, ofs);
1019 return;
1020 }
1021 break;
1022 case 8:
1023 if (TCG_TARGET_HAS_ext8u_i32) {
1024 tcg_gen_ext8u_i32(ret, arg);
1025 tcg_gen_shri_i32(ret, ret, ofs);
1026 return;
1027 }
1028 break;
1029 }
1030
1031 /* ??? Ideally we'd know what values are available for immediate AND.
1032 Assume that 8 bits are available, plus the special case of 16,
1033 so that we get ext8u, ext16u. */
1034 switch (len) {
1035 case 1 ... 8: case 16:
1036 tcg_gen_shri_i32(ret, arg, ofs);
1037 tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
1038 break;
1039 default:
1040 tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
1041 tcg_gen_shri_i32(ret, ret, 32 - len);
1042 break;
1043 }
1044 }
1045
1046 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
1047 unsigned int ofs, unsigned int len)
1048 {
1049 tcg_debug_assert(ofs < 32);
1050 tcg_debug_assert(len > 0);
1051 tcg_debug_assert(len <= 32);
1052 tcg_debug_assert(ofs + len <= 32);
1053
1054 /* Canonicalize certain special cases, even if extract is supported. */
1055 if (ofs + len == 32) {
1056 tcg_gen_sari_i32(ret, arg, 32 - len);
1057 return;
1058 }
1059 if (ofs == 0) {
1060 switch (len) {
1061 case 16:
1062 tcg_gen_ext16s_i32(ret, arg);
1063 return;
1064 case 8:
1065 tcg_gen_ext8s_i32(ret, arg);
1066 return;
1067 }
1068 }
1069
1070 if (TCG_TARGET_HAS_sextract_i32
1071 && TCG_TARGET_extract_i32_valid(ofs, len)) {
1072 tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
1073 return;
1074 }
1075
1076 /* Assume that sign-extension, if available, is cheaper than a shift. */
1077 switch (ofs + len) {
1078 case 16:
1079 if (TCG_TARGET_HAS_ext16s_i32) {
1080 tcg_gen_ext16s_i32(ret, arg);
1081 tcg_gen_sari_i32(ret, ret, ofs);
1082 return;
1083 }
1084 break;
1085 case 8:
1086 if (TCG_TARGET_HAS_ext8s_i32) {
1087 tcg_gen_ext8s_i32(ret, arg);
1088 tcg_gen_sari_i32(ret, ret, ofs);
1089 return;
1090 }
1091 break;
1092 }
1093 switch (len) {
1094 case 16:
1095 if (TCG_TARGET_HAS_ext16s_i32) {
1096 tcg_gen_shri_i32(ret, arg, ofs);
1097 tcg_gen_ext16s_i32(ret, ret);
1098 return;
1099 }
1100 break;
1101 case 8:
1102 if (TCG_TARGET_HAS_ext8s_i32) {
1103 tcg_gen_shri_i32(ret, arg, ofs);
1104 tcg_gen_ext8s_i32(ret, ret);
1105 return;
1106 }
1107 break;
1108 }
1109
1110 tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
1111 tcg_gen_sari_i32(ret, ret, 32 - len);
1112 }
1113
1114 /*
1115 * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
1116 * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
1117 */
1118 void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
1119 unsigned int ofs)
1120 {
1121 tcg_debug_assert(ofs <= 32);
1122 if (ofs == 0) {
1123 tcg_gen_mov_i32(ret, al);
1124 } else if (ofs == 32) {
1125 tcg_gen_mov_i32(ret, ah);
1126 } else if (al == ah) {
1127 tcg_gen_rotri_i32(ret, al, ofs);
1128 } else if (TCG_TARGET_HAS_extract2_i32) {
1129 tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
1130 } else {
1131 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1132 tcg_gen_shri_i32(t0, al, ofs);
1133 tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
1134 tcg_temp_free_i32(t0);
1135 }
1136 }
1137
1138 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
1139 TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
1140 {
1141 if (cond == TCG_COND_ALWAYS) {
1142 tcg_gen_mov_i32(ret, v1);
1143 } else if (cond == TCG_COND_NEVER) {
1144 tcg_gen_mov_i32(ret, v2);
1145 } else if (TCG_TARGET_HAS_movcond_i32) {
1146 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
1147 } else {
1148 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1149 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1150 tcg_gen_negsetcond_i32(cond, t0, c1, c2);
1151 tcg_gen_and_i32(t1, v1, t0);
1152 tcg_gen_andc_i32(ret, v2, t0);
1153 tcg_gen_or_i32(ret, ret, t1);
1154 tcg_temp_free_i32(t0);
1155 tcg_temp_free_i32(t1);
1156 }
1157 }
1158
1159 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
1160 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
1161 {
1162 if (TCG_TARGET_HAS_add2_i32) {
1163 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
1164 } else {
1165 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1166 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1167 tcg_gen_concat_i32_i64(t0, al, ah);
1168 tcg_gen_concat_i32_i64(t1, bl, bh);
1169 tcg_gen_add_i64(t0, t0, t1);
1170 tcg_gen_extr_i64_i32(rl, rh, t0);
1171 tcg_temp_free_i64(t0);
1172 tcg_temp_free_i64(t1);
1173 }
1174 }
1175
1176 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
1177 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
1178 {
1179 if (TCG_TARGET_HAS_sub2_i32) {
1180 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
1181 } else {
1182 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1183 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1184 tcg_gen_concat_i32_i64(t0, al, ah);
1185 tcg_gen_concat_i32_i64(t1, bl, bh);
1186 tcg_gen_sub_i64(t0, t0, t1);
1187 tcg_gen_extr_i64_i32(rl, rh, t0);
1188 tcg_temp_free_i64(t0);
1189 tcg_temp_free_i64(t1);
1190 }
1191 }
1192
1193 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1194 {
1195 if (TCG_TARGET_HAS_mulu2_i32) {
1196 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
1197 } else if (TCG_TARGET_HAS_muluh_i32) {
1198 TCGv_i32 t = tcg_temp_ebb_new_i32();
1199 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
1200 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
1201 tcg_gen_mov_i32(rl, t);
1202 tcg_temp_free_i32(t);
1203 } else if (TCG_TARGET_REG_BITS == 64) {
1204 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1205 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1206 tcg_gen_extu_i32_i64(t0, arg1);
1207 tcg_gen_extu_i32_i64(t1, arg2);
1208 tcg_gen_mul_i64(t0, t0, t1);
1209 tcg_gen_extr_i64_i32(rl, rh, t0);
1210 tcg_temp_free_i64(t0);
1211 tcg_temp_free_i64(t1);
1212 } else {
1213 qemu_build_not_reached();
1214 }
1215 }
1216
1217 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1218 {
1219 if (TCG_TARGET_HAS_muls2_i32) {
1220 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
1221 } else if (TCG_TARGET_HAS_mulsh_i32) {
1222 TCGv_i32 t = tcg_temp_ebb_new_i32();
1223 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
1224 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
1225 tcg_gen_mov_i32(rl, t);
1226 tcg_temp_free_i32(t);
1227 } else if (TCG_TARGET_REG_BITS == 32) {
1228 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1229 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1230 TCGv_i32 t2 = tcg_temp_ebb_new_i32();
1231 TCGv_i32 t3 = tcg_temp_ebb_new_i32();
1232 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
1233 /* Adjust for negative inputs. */
1234 tcg_gen_sari_i32(t2, arg1, 31);
1235 tcg_gen_sari_i32(t3, arg2, 31);
1236 tcg_gen_and_i32(t2, t2, arg2);
1237 tcg_gen_and_i32(t3, t3, arg1);
1238 tcg_gen_sub_i32(rh, t1, t2);
1239 tcg_gen_sub_i32(rh, rh, t3);
1240 tcg_gen_mov_i32(rl, t0);
1241 tcg_temp_free_i32(t0);
1242 tcg_temp_free_i32(t1);
1243 tcg_temp_free_i32(t2);
1244 tcg_temp_free_i32(t3);
1245 } else {
1246 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1247 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1248 tcg_gen_ext_i32_i64(t0, arg1);
1249 tcg_gen_ext_i32_i64(t1, arg2);
1250 tcg_gen_mul_i64(t0, t0, t1);
1251 tcg_gen_extr_i64_i32(rl, rh, t0);
1252 tcg_temp_free_i64(t0);
1253 tcg_temp_free_i64(t1);
1254 }
1255 }
1256
1257 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1258 {
1259 if (TCG_TARGET_REG_BITS == 32) {
1260 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1261 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1262 TCGv_i32 t2 = tcg_temp_ebb_new_i32();
1263 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
1264 /* Adjust for negative input for the signed arg1. */
1265 tcg_gen_sari_i32(t2, arg1, 31);
1266 tcg_gen_and_i32(t2, t2, arg2);
1267 tcg_gen_sub_i32(rh, t1, t2);
1268 tcg_gen_mov_i32(rl, t0);
1269 tcg_temp_free_i32(t0);
1270 tcg_temp_free_i32(t1);
1271 tcg_temp_free_i32(t2);
1272 } else {
1273 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1274 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1275 tcg_gen_ext_i32_i64(t0, arg1);
1276 tcg_gen_extu_i32_i64(t1, arg2);
1277 tcg_gen_mul_i64(t0, t0, t1);
1278 tcg_gen_extr_i64_i32(rl, rh, t0);
1279 tcg_temp_free_i64(t0);
1280 tcg_temp_free_i64(t1);
1281 }
1282 }
1283
1284 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1285 {
1286 if (TCG_TARGET_HAS_ext8s_i32) {
1287 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
1288 } else {
1289 tcg_gen_shli_i32(ret, arg, 24);
1290 tcg_gen_sari_i32(ret, ret, 24);
1291 }
1292 }
1293
1294 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1295 {
1296 if (TCG_TARGET_HAS_ext16s_i32) {
1297 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1298 } else {
1299 tcg_gen_shli_i32(ret, arg, 16);
1300 tcg_gen_sari_i32(ret, ret, 16);
1301 }
1302 }
1303
1304 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1305 {
1306 if (TCG_TARGET_HAS_ext8u_i32) {
1307 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1308 } else {
1309 tcg_gen_andi_i32(ret, arg, 0xffu);
1310 }
1311 }
1312
1313 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1314 {
1315 if (TCG_TARGET_HAS_ext16u_i32) {
1316 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1317 } else {
1318 tcg_gen_andi_i32(ret, arg, 0xffffu);
1319 }
1320 }
1321
1322 /*
1323 * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1324 *
1325 * Byte pattern: xxab -> yyba
1326 *
1327 * With TCG_BSWAP_IZ, x == zero, else undefined.
1328 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1329 */
1330 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
1331 {
1332 /* Only one extension flag may be present. */
1333 tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1334
1335 if (TCG_TARGET_HAS_bswap16_i32) {
1336 tcg_gen_op3i_i32(INDEX_op_bswap16_i32, ret, arg, flags);
1337 } else {
1338 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1339 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1340
1341 /* arg = ..ab (IZ) xxab (!IZ) */
1342 tcg_gen_shri_i32(t0, arg, 8); /* t0 = ...a (IZ) .xxa (!IZ) */
1343 if (!(flags & TCG_BSWAP_IZ)) {
1344 tcg_gen_ext8u_i32(t0, t0); /* t0 = ...a */
1345 }
1346
1347 if (flags & TCG_BSWAP_OS) {
1348 tcg_gen_shli_i32(t1, arg, 24); /* t1 = b... */
1349 tcg_gen_sari_i32(t1, t1, 16); /* t1 = ssb. */
1350 } else if (flags & TCG_BSWAP_OZ) {
1351 tcg_gen_ext8u_i32(t1, arg); /* t1 = ...b */
1352 tcg_gen_shli_i32(t1, t1, 8); /* t1 = ..b. */
1353 } else {
1354 tcg_gen_shli_i32(t1, arg, 8); /* t1 = xab. */
1355 }
1356
1357 tcg_gen_or_i32(ret, t0, t1); /* ret = ..ba (OZ) */
1358 /* = ssba (OS) */
1359 /* = xaba (no flag) */
1360 tcg_temp_free_i32(t0);
1361 tcg_temp_free_i32(t1);
1362 }
1363 }
1364
1365 /*
1366 * bswap32_i32: 32-bit byte swap on a 32-bit value.
1367 *
1368 * Byte pattern: abcd -> dcba
1369 */
1370 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1371 {
1372 if (TCG_TARGET_HAS_bswap32_i32) {
1373 tcg_gen_op3i_i32(INDEX_op_bswap32_i32, ret, arg, 0);
1374 } else {
1375 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1376 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1377 TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
1378
1379 /* arg = abcd */
1380 tcg_gen_shri_i32(t0, arg, 8); /* t0 = .abc */
1381 tcg_gen_and_i32(t1, arg, t2); /* t1 = .b.d */
1382 tcg_gen_and_i32(t0, t0, t2); /* t0 = .a.c */
1383 tcg_gen_shli_i32(t1, t1, 8); /* t1 = b.d. */
1384 tcg_gen_or_i32(ret, t0, t1); /* ret = badc */
1385
1386 tcg_gen_shri_i32(t0, ret, 16); /* t0 = ..ba */
1387 tcg_gen_shli_i32(t1, ret, 16); /* t1 = dc.. */
1388 tcg_gen_or_i32(ret, t0, t1); /* ret = dcba */
1389
1390 tcg_temp_free_i32(t0);
1391 tcg_temp_free_i32(t1);
1392 }
1393 }
1394
1395 /*
1396 * hswap_i32: Swap 16-bit halfwords within a 32-bit value.
1397 *
1398 * Byte pattern: abcd -> cdab
1399 */
1400 void tcg_gen_hswap_i32(TCGv_i32 ret, TCGv_i32 arg)
1401 {
1402 /* Swapping 2 16-bit elements is a rotate. */
1403 tcg_gen_rotli_i32(ret, arg, 16);
1404 }
1405
1406 void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1407 {
1408 tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1409 }
1410
1411 void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1412 {
1413 tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1414 }
1415
1416 void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1417 {
1418 tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1419 }
1420
1421 void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1422 {
1423 tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1424 }
1425
1426 void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1427 {
1428 TCGv_i32 t = tcg_temp_ebb_new_i32();
1429
1430 tcg_gen_sari_i32(t, a, 31);
1431 tcg_gen_xor_i32(ret, a, t);
1432 tcg_gen_sub_i32(ret, ret, t);
1433 tcg_temp_free_i32(t);
1434 }
1435
1436 void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1437 {
1438 tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset);
1439 }
1440
1441 void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1442 {
1443 tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset);
1444 }
1445
1446 void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1447 {
1448 tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset);
1449 }
1450
1451 void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1452 {
1453 tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset);
1454 }
1455
1456 void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1457 {
1458 tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset);
1459 }
1460
1461 void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1462 {
1463 tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset);
1464 }
1465
1466 void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1467 {
1468 tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset);
1469 }
1470
1471 void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1472 {
1473 tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset);
1474 }
1475
1476
1477 /* 64-bit ops */
1478
1479 void tcg_gen_discard_i64(TCGv_i64 arg)
1480 {
1481 if (TCG_TARGET_REG_BITS == 64) {
1482 tcg_gen_op1_i64(INDEX_op_discard, arg);
1483 } else {
1484 tcg_gen_discard_i32(TCGV_LOW(arg));
1485 tcg_gen_discard_i32(TCGV_HIGH(arg));
1486 }
1487 }
1488
1489 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1490 {
1491 if (ret == arg) {
1492 return;
1493 }
1494 if (TCG_TARGET_REG_BITS == 64) {
1495 tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg);
1496 } else {
1497 TCGTemp *ts = tcgv_i64_temp(arg);
1498
1499 /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1500 if (ts->kind == TEMP_CONST) {
1501 tcg_gen_movi_i64(ret, ts->val);
1502 } else {
1503 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1504 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1505 }
1506 }
1507 }
1508
1509 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1510 {
1511 if (TCG_TARGET_REG_BITS == 64) {
1512 tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1513 } else {
1514 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1515 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1516 }
1517 }
1518
1519 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1520 {
1521 if (TCG_TARGET_REG_BITS == 64) {
1522 tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset);
1523 } else {
1524 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1525 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1526 }
1527 }
1528
1529 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1530 {
1531 if (TCG_TARGET_REG_BITS == 64) {
1532 tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset);
1533 } else {
1534 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1535 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1536 }
1537 }
1538
1539 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1540 {
1541 if (TCG_TARGET_REG_BITS == 64) {
1542 tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset);
1543 } else {
1544 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1545 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1546 }
1547 }
1548
1549 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1550 {
1551 if (TCG_TARGET_REG_BITS == 64) {
1552 tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset);
1553 } else {
1554 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1555 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1556 }
1557 }
1558
1559 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1560 {
1561 if (TCG_TARGET_REG_BITS == 64) {
1562 tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset);
1563 } else {
1564 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1565 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1566 }
1567 }
1568
1569 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1570 {
1571 if (TCG_TARGET_REG_BITS == 64) {
1572 tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset);
1573 } else {
1574 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1575 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1576 }
1577 }
1578
1579 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1580 {
1581 /*
1582 * For 32-bit host, since arg2 and ret have different types,
1583 * they cannot be the same temporary -- no chance of overlap.
1584 */
1585 if (TCG_TARGET_REG_BITS == 64) {
1586 tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset);
1587 } else if (HOST_BIG_ENDIAN) {
1588 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1589 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1590 } else {
1591 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1592 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1593 }
1594 }
1595
1596 void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1597 {
1598 if (TCG_TARGET_REG_BITS == 64) {
1599 tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset);
1600 } else {
1601 tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
1602 }
1603 }
1604
1605 void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1606 {
1607 if (TCG_TARGET_REG_BITS == 64) {
1608 tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset);
1609 } else {
1610 tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
1611 }
1612 }
1613
1614 void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1615 {
1616 if (TCG_TARGET_REG_BITS == 64) {
1617 tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset);
1618 } else {
1619 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1620 }
1621 }
1622
1623 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1624 {
1625 if (TCG_TARGET_REG_BITS == 64) {
1626 tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset);
1627 } else if (HOST_BIG_ENDIAN) {
1628 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1629 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1630 } else {
1631 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1632 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1633 }
1634 }
1635
1636 void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1637 {
1638 if (TCG_TARGET_REG_BITS == 64) {
1639 tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2);
1640 } else {
1641 tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1642 TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1643 }
1644 }
1645
1646 void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1647 {
1648 if (TCG_TARGET_REG_BITS == 64) {
1649 tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2);
1650 } else {
1651 tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1652 TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1653 }
1654 }
1655
1656 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1657 {
1658 if (TCG_TARGET_REG_BITS == 64) {
1659 tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2);
1660 } else {
1661 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1662 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1663 }
1664 }
1665
1666 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1667 {
1668 if (TCG_TARGET_REG_BITS == 64) {
1669 tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2);
1670 } else {
1671 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1672 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1673 }
1674 }
1675
1676 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1677 {
1678 if (TCG_TARGET_REG_BITS == 64) {
1679 tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2);
1680 } else {
1681 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1682 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1683 }
1684 }
1685
1686 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1687 {
1688 if (TCG_TARGET_REG_BITS == 64) {
1689 tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2);
1690 } else {
1691 gen_helper_shl_i64(ret, arg1, arg2);
1692 }
1693 }
1694
1695 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1696 {
1697 if (TCG_TARGET_REG_BITS == 64) {
1698 tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2);
1699 } else {
1700 gen_helper_shr_i64(ret, arg1, arg2);
1701 }
1702 }
1703
1704 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1705 {
1706 if (TCG_TARGET_REG_BITS == 64) {
1707 tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2);
1708 } else {
1709 gen_helper_sar_i64(ret, arg1, arg2);
1710 }
1711 }
1712
1713 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1714 {
1715 TCGv_i64 t0;
1716 TCGv_i32 t1;
1717
1718 if (TCG_TARGET_REG_BITS == 64) {
1719 tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2);
1720 return;
1721 }
1722
1723
1724 t0 = tcg_temp_ebb_new_i64();
1725 t1 = tcg_temp_ebb_new_i32();
1726
1727 tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1728 TCGV_LOW(arg1), TCGV_LOW(arg2));
1729
1730 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1731 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1732 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1733 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1734
1735 tcg_gen_mov_i64(ret, t0);
1736 tcg_temp_free_i64(t0);
1737 tcg_temp_free_i32(t1);
1738 }
1739
1740 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1741 {
1742 /* some cases can be optimized here */
1743 if (arg2 == 0) {
1744 tcg_gen_mov_i64(ret, arg1);
1745 } else if (TCG_TARGET_REG_BITS == 64) {
1746 tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
1747 } else {
1748 tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1749 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1750 tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1751 }
1752 }
1753
1754 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1755 {
1756 if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
1757 /* Don't recurse with tcg_gen_neg_i64. */
1758 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
1759 } else if (TCG_TARGET_REG_BITS == 64) {
1760 tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
1761 } else {
1762 tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1763 tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1764 TCGV_LOW(arg2), TCGV_HIGH(arg2));
1765 }
1766 }
1767
1768 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1769 {
1770 /* some cases can be optimized here */
1771 if (arg2 == 0) {
1772 tcg_gen_mov_i64(ret, arg1);
1773 } else if (TCG_TARGET_REG_BITS == 64) {
1774 tcg_gen_sub_i64(ret, arg1, tcg_constant_i64(arg2));
1775 } else {
1776 tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1777 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1778 tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1779 }
1780 }
1781
1782 void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
1783 {
1784 if (TCG_TARGET_HAS_neg_i64) {
1785 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
1786 } else {
1787 tcg_gen_subfi_i64(ret, 0, arg);
1788 }
1789 }
1790
1791 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1792 {
1793 if (TCG_TARGET_REG_BITS == 32) {
1794 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1795 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1796 return;
1797 }
1798
1799 /* Some cases can be optimized here. */
1800 switch (arg2) {
1801 case 0:
1802 tcg_gen_movi_i64(ret, 0);
1803 return;
1804 case -1:
1805 tcg_gen_mov_i64(ret, arg1);
1806 return;
1807 case 0xff:
1808 /* Don't recurse with tcg_gen_ext8u_i64. */
1809 if (TCG_TARGET_HAS_ext8u_i64) {
1810 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1811 return;
1812 }
1813 break;
1814 case 0xffff:
1815 if (TCG_TARGET_HAS_ext16u_i64) {
1816 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1817 return;
1818 }
1819 break;
1820 case 0xffffffffu:
1821 if (TCG_TARGET_HAS_ext32u_i64) {
1822 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1823 return;
1824 }
1825 break;
1826 }
1827
1828 tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
1829 }
1830
1831 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1832 {
1833 if (TCG_TARGET_REG_BITS == 32) {
1834 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1835 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1836 return;
1837 }
1838 /* Some cases can be optimized here. */
1839 if (arg2 == -1) {
1840 tcg_gen_movi_i64(ret, -1);
1841 } else if (arg2 == 0) {
1842 tcg_gen_mov_i64(ret, arg1);
1843 } else {
1844 tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
1845 }
1846 }
1847
1848 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1849 {
1850 if (TCG_TARGET_REG_BITS == 32) {
1851 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1852 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1853 return;
1854 }
1855 /* Some cases can be optimized here. */
1856 if (arg2 == 0) {
1857 tcg_gen_mov_i64(ret, arg1);
1858 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1859 /* Don't recurse with tcg_gen_not_i64. */
1860 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1861 } else {
1862 tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
1863 }
1864 }
1865
1866 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1867 unsigned c, bool right, bool arith)
1868 {
1869 tcg_debug_assert(c < 64);
1870 if (c == 0) {
1871 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1872 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1873 } else if (c >= 32) {
1874 c -= 32;
1875 if (right) {
1876 if (arith) {
1877 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1878 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1879 } else {
1880 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1881 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1882 }
1883 } else {
1884 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1885 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1886 }
1887 } else if (right) {
1888 if (TCG_TARGET_HAS_extract2_i32) {
1889 tcg_gen_extract2_i32(TCGV_LOW(ret),
1890 TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1891 } else {
1892 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1893 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1894 TCGV_HIGH(arg1), 32 - c, c);
1895 }
1896 if (arith) {
1897 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1898 } else {
1899 tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1900 }
1901 } else {
1902 if (TCG_TARGET_HAS_extract2_i32) {
1903 tcg_gen_extract2_i32(TCGV_HIGH(ret),
1904 TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1905 } else {
1906 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1907 tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1908 tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1909 TCGV_HIGH(arg1), c, 32 - c);
1910 tcg_temp_free_i32(t0);
1911 }
1912 tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1913 }
1914 }
1915
1916 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1917 {
1918 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1919 if (TCG_TARGET_REG_BITS == 32) {
1920 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1921 } else if (arg2 == 0) {
1922 tcg_gen_mov_i64(ret, arg1);
1923 } else {
1924 tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
1925 }
1926 }
1927
1928 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1929 {
1930 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1931 if (TCG_TARGET_REG_BITS == 32) {
1932 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1933 } else if (arg2 == 0) {
1934 tcg_gen_mov_i64(ret, arg1);
1935 } else {
1936 tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
1937 }
1938 }
1939
1940 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1941 {
1942 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1943 if (TCG_TARGET_REG_BITS == 32) {
1944 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1945 } else if (arg2 == 0) {
1946 tcg_gen_mov_i64(ret, arg1);
1947 } else {
1948 tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
1949 }
1950 }
1951
1952 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1953 {
1954 if (cond == TCG_COND_ALWAYS) {
1955 tcg_gen_br(l);
1956 } else if (cond != TCG_COND_NEVER) {
1957 if (TCG_TARGET_REG_BITS == 32) {
1958 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1959 TCGV_HIGH(arg1), TCGV_LOW(arg2),
1960 TCGV_HIGH(arg2), cond, label_arg(l));
1961 } else {
1962 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1963 label_arg(l));
1964 }
1965 add_last_as_label_use(l);
1966 }
1967 }
1968
1969 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1970 {
1971 if (TCG_TARGET_REG_BITS == 64) {
1972 tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1973 } else if (cond == TCG_COND_ALWAYS) {
1974 tcg_gen_br(l);
1975 } else if (cond != TCG_COND_NEVER) {
1976 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1977 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1978 tcg_constant_i32(arg2),
1979 tcg_constant_i32(arg2 >> 32),
1980 cond, label_arg(l));
1981 add_last_as_label_use(l);
1982 }
1983 }
1984
1985 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1986 TCGv_i64 arg1, TCGv_i64 arg2)
1987 {
1988 if (cond == TCG_COND_ALWAYS) {
1989 tcg_gen_movi_i64(ret, 1);
1990 } else if (cond == TCG_COND_NEVER) {
1991 tcg_gen_movi_i64(ret, 0);
1992 } else {
1993 if (TCG_TARGET_REG_BITS == 32) {
1994 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1995 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1996 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1997 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1998 } else {
1999 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
2000 }
2001 }
2002 }
2003
2004 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
2005 TCGv_i64 arg1, int64_t arg2)
2006 {
2007 if (TCG_TARGET_REG_BITS == 64) {
2008 tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
2009 } else if (cond == TCG_COND_ALWAYS) {
2010 tcg_gen_movi_i64(ret, 1);
2011 } else if (cond == TCG_COND_NEVER) {
2012 tcg_gen_movi_i64(ret, 0);
2013 } else {
2014 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
2015 TCGV_LOW(arg1), TCGV_HIGH(arg1),
2016 tcg_constant_i32(arg2),
2017 tcg_constant_i32(arg2 >> 32), cond);
2018 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2019 }
2020 }
2021
2022 void tcg_gen_negsetcondi_i64(TCGCond cond, TCGv_i64 ret,
2023 TCGv_i64 arg1, int64_t arg2)
2024 {
2025 tcg_gen_negsetcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
2026 }
2027
2028 void tcg_gen_negsetcond_i64(TCGCond cond, TCGv_i64 ret,
2029 TCGv_i64 arg1, TCGv_i64 arg2)
2030 {
2031 if (cond == TCG_COND_ALWAYS) {
2032 tcg_gen_movi_i64(ret, -1);
2033 } else if (cond == TCG_COND_NEVER) {
2034 tcg_gen_movi_i64(ret, 0);
2035 } else if (TCG_TARGET_HAS_negsetcond_i64) {
2036 tcg_gen_op4i_i64(INDEX_op_negsetcond_i64, ret, arg1, arg2, cond);
2037 } else if (TCG_TARGET_REG_BITS == 32) {
2038 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
2039 TCGV_LOW(arg1), TCGV_HIGH(arg1),
2040 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
2041 tcg_gen_neg_i32(TCGV_LOW(ret), TCGV_LOW(ret));
2042 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_LOW(ret));
2043 } else {
2044 tcg_gen_setcond_i64(cond, ret, arg1, arg2);
2045 tcg_gen_neg_i64(ret, ret);
2046 }
2047 }
2048
2049 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2050 {
2051 if (arg2 == 0) {
2052 tcg_gen_movi_i64(ret, 0);
2053 } else if (is_power_of_2(arg2)) {
2054 tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
2055 } else {
2056 tcg_gen_mul_i64(ret, arg1, tcg_constant_i64(arg2));
2057 }
2058 }
2059
2060 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2061 {
2062 if (TCG_TARGET_HAS_div_i64) {
2063 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
2064 } else if (TCG_TARGET_HAS_div2_i64) {
2065 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2066 tcg_gen_sari_i64(t0, arg1, 63);
2067 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
2068 tcg_temp_free_i64(t0);
2069 } else {
2070 gen_helper_div_i64(ret, arg1, arg2);
2071 }
2072 }
2073
2074 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2075 {
2076 if (TCG_TARGET_HAS_rem_i64) {
2077 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
2078 } else if (TCG_TARGET_HAS_div_i64) {
2079 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2080 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
2081 tcg_gen_mul_i64(t0, t0, arg2);
2082 tcg_gen_sub_i64(ret, arg1, t0);
2083 tcg_temp_free_i64(t0);
2084 } else if (TCG_TARGET_HAS_div2_i64) {
2085 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2086 tcg_gen_sari_i64(t0, arg1, 63);
2087 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
2088 tcg_temp_free_i64(t0);
2089 } else {
2090 gen_helper_rem_i64(ret, arg1, arg2);
2091 }
2092 }
2093
2094 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2095 {
2096 if (TCG_TARGET_HAS_div_i64) {
2097 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
2098 } else if (TCG_TARGET_HAS_div2_i64) {
2099 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2100 TCGv_i64 zero = tcg_constant_i64(0);
2101 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, zero, arg2);
2102 tcg_temp_free_i64(t0);
2103 } else {
2104 gen_helper_divu_i64(ret, arg1, arg2);
2105 }
2106 }
2107
2108 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2109 {
2110 if (TCG_TARGET_HAS_rem_i64) {
2111 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
2112 } else if (TCG_TARGET_HAS_div_i64) {
2113 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2114 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
2115 tcg_gen_mul_i64(t0, t0, arg2);
2116 tcg_gen_sub_i64(ret, arg1, t0);
2117 tcg_temp_free_i64(t0);
2118 } else if (TCG_TARGET_HAS_div2_i64) {
2119 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2120 TCGv_i64 zero = tcg_constant_i64(0);
2121 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, zero, arg2);
2122 tcg_temp_free_i64(t0);
2123 } else {
2124 gen_helper_remu_i64(ret, arg1, arg2);
2125 }
2126 }
2127
2128 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
2129 {
2130 if (TCG_TARGET_REG_BITS == 32) {
2131 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2132 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2133 } else if (TCG_TARGET_HAS_ext8s_i64) {
2134 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
2135 } else {
2136 tcg_gen_shli_i64(ret, arg, 56);
2137 tcg_gen_sari_i64(ret, ret, 56);
2138 }
2139 }
2140
2141 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
2142 {
2143 if (TCG_TARGET_REG_BITS == 32) {
2144 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2145 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2146 } else if (TCG_TARGET_HAS_ext16s_i64) {
2147 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
2148 } else {
2149 tcg_gen_shli_i64(ret, arg, 48);
2150 tcg_gen_sari_i64(ret, ret, 48);
2151 }
2152 }
2153
2154 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
2155 {
2156 if (TCG_TARGET_REG_BITS == 32) {
2157 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2158 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2159 } else if (TCG_TARGET_HAS_ext32s_i64) {
2160 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
2161 } else {
2162 tcg_gen_shli_i64(ret, arg, 32);
2163 tcg_gen_sari_i64(ret, ret, 32);
2164 }
2165 }
2166
2167 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
2168 {
2169 if (TCG_TARGET_REG_BITS == 32) {
2170 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2171 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2172 } else if (TCG_TARGET_HAS_ext8u_i64) {
2173 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
2174 } else {
2175 tcg_gen_andi_i64(ret, arg, 0xffu);
2176 }
2177 }
2178
2179 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
2180 {
2181 if (TCG_TARGET_REG_BITS == 32) {
2182 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2183 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2184 } else if (TCG_TARGET_HAS_ext16u_i64) {
2185 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
2186 } else {
2187 tcg_gen_andi_i64(ret, arg, 0xffffu);
2188 }
2189 }
2190
2191 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
2192 {
2193 if (TCG_TARGET_REG_BITS == 32) {
2194 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2195 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2196 } else if (TCG_TARGET_HAS_ext32u_i64) {
2197 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
2198 } else {
2199 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
2200 }
2201 }
2202
2203 /*
2204 * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
2205 *
2206 * Byte pattern: xxxxxxxxab -> yyyyyyyyba
2207 *
2208 * With TCG_BSWAP_IZ, x == zero, else undefined.
2209 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2210 */
2211 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
2212 {
2213 /* Only one extension flag may be present. */
2214 tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
2215
2216 if (TCG_TARGET_REG_BITS == 32) {
2217 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg), flags);
2218 if (flags & TCG_BSWAP_OS) {
2219 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2220 } else {
2221 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2222 }
2223 } else if (TCG_TARGET_HAS_bswap16_i64) {
2224 tcg_gen_op3i_i64(INDEX_op_bswap16_i64, ret, arg, flags);
2225 } else {
2226 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2227 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2228
2229 /* arg = ......ab or xxxxxxab */
2230 tcg_gen_shri_i64(t0, arg, 8); /* t0 = .......a or .xxxxxxa */
2231 if (!(flags & TCG_BSWAP_IZ)) {
2232 tcg_gen_ext8u_i64(t0, t0); /* t0 = .......a */
2233 }
2234
2235 if (flags & TCG_BSWAP_OS) {
2236 tcg_gen_shli_i64(t1, arg, 56); /* t1 = b....... */
2237 tcg_gen_sari_i64(t1, t1, 48); /* t1 = ssssssb. */
2238 } else if (flags & TCG_BSWAP_OZ) {
2239 tcg_gen_ext8u_i64(t1, arg); /* t1 = .......b */
2240 tcg_gen_shli_i64(t1, t1, 8); /* t1 = ......b. */
2241 } else {
2242 tcg_gen_shli_i64(t1, arg, 8); /* t1 = xxxxxab. */
2243 }
2244
2245 tcg_gen_or_i64(ret, t0, t1); /* ret = ......ba (OZ) */
2246 /* ssssssba (OS) */
2247 /* xxxxxaba (no flag) */
2248 tcg_temp_free_i64(t0);
2249 tcg_temp_free_i64(t1);
2250 }
2251 }
2252
2253 /*
2254 * bswap32_i64: 32-bit byte swap on the low bits of a 64-bit value.
2255 *
2256 * Byte pattern: xxxxabcd -> yyyydcba
2257 *
2258 * With TCG_BSWAP_IZ, x == zero, else undefined.
2259 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2260 */
2261 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
2262 {
2263 /* Only one extension flag may be present. */
2264 tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
2265
2266 if (TCG_TARGET_REG_BITS == 32) {
2267 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2268 if (flags & TCG_BSWAP_OS) {
2269 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2270 } else {
2271 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2272 }
2273 } else if (TCG_TARGET_HAS_bswap32_i64) {
2274 tcg_gen_op3i_i64(INDEX_op_bswap32_i64, ret, arg, flags);
2275 } else {
2276 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2277 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2278 TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
2279
2280 /* arg = xxxxabcd */
2281 tcg_gen_shri_i64(t0, arg, 8); /* t0 = .xxxxabc */
2282 tcg_gen_and_i64(t1, arg, t2); /* t1 = .....b.d */
2283 tcg_gen_and_i64(t0, t0, t2); /* t0 = .....a.c */
2284 tcg_gen_shli_i64(t1, t1, 8); /* t1 = ....b.d. */
2285 tcg_gen_or_i64(ret, t0, t1); /* ret = ....badc */
2286
2287 tcg_gen_shli_i64(t1, ret, 48); /* t1 = dc...... */
2288 tcg_gen_shri_i64(t0, ret, 16); /* t0 = ......ba */
2289 if (flags & TCG_BSWAP_OS) {
2290 tcg_gen_sari_i64(t1, t1, 32); /* t1 = ssssdc.. */
2291 } else {
2292 tcg_gen_shri_i64(t1, t1, 32); /* t1 = ....dc.. */
2293 }
2294 tcg_gen_or_i64(ret, t0, t1); /* ret = ssssdcba (OS) */
2295 /* ....dcba (else) */
2296
2297 tcg_temp_free_i64(t0);
2298 tcg_temp_free_i64(t1);
2299 }
2300 }
2301
2302 /*
2303 * bswap64_i64: 64-bit byte swap on a 64-bit value.
2304 *
2305 * Byte pattern: abcdefgh -> hgfedcba
2306 */
2307 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
2308 {
2309 if (TCG_TARGET_REG_BITS == 32) {
2310 TCGv_i32 t0, t1;
2311 t0 = tcg_temp_ebb_new_i32();
2312 t1 = tcg_temp_ebb_new_i32();
2313
2314 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
2315 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
2316 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
2317 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
2318 tcg_temp_free_i32(t0);
2319 tcg_temp_free_i32(t1);
2320 } else if (TCG_TARGET_HAS_bswap64_i64) {
2321 tcg_gen_op3i_i64(INDEX_op_bswap64_i64, ret, arg, 0);
2322 } else {
2323 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2324 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2325 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2326
2327 /* arg = abcdefgh */
2328 tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
2329 tcg_gen_shri_i64(t0, arg, 8); /* t0 = .abcdefg */
2330 tcg_gen_and_i64(t1, arg, t2); /* t1 = .b.d.f.h */
2331 tcg_gen_and_i64(t0, t0, t2); /* t0 = .a.c.e.g */
2332 tcg_gen_shli_i64(t1, t1, 8); /* t1 = b.d.f.h. */
2333 tcg_gen_or_i64(ret, t0, t1); /* ret = badcfehg */
2334
2335 tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
2336 tcg_gen_shri_i64(t0, ret, 16); /* t0 = ..badcfe */
2337 tcg_gen_and_i64(t1, ret, t2); /* t1 = ..dc..hg */
2338 tcg_gen_and_i64(t0, t0, t2); /* t0 = ..ba..fe */
2339 tcg_gen_shli_i64(t1, t1, 16); /* t1 = dc..hg.. */
2340 tcg_gen_or_i64(ret, t0, t1); /* ret = dcbahgfe */
2341
2342 tcg_gen_shri_i64(t0, ret, 32); /* t0 = ....dcba */
2343 tcg_gen_shli_i64(t1, ret, 32); /* t1 = hgfe.... */
2344 tcg_gen_or_i64(ret, t0, t1); /* ret = hgfedcba */
2345
2346 tcg_temp_free_i64(t0);
2347 tcg_temp_free_i64(t1);
2348 tcg_temp_free_i64(t2);
2349 }
2350 }
2351
2352 /*
2353 * hswap_i64: Swap 16-bit halfwords within a 64-bit value.
2354 * See also include/qemu/bitops.h, hswap64.
2355 *
2356 * Byte pattern: abcdefgh -> ghefcdab
2357 */
2358 void tcg_gen_hswap_i64(TCGv_i64 ret, TCGv_i64 arg)
2359 {
2360 uint64_t m = 0x0000ffff0000ffffull;
2361 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2362 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2363
2364 /* arg = abcdefgh */
2365 tcg_gen_rotli_i64(t1, arg, 32); /* t1 = efghabcd */
2366 tcg_gen_andi_i64(t0, t1, m); /* t0 = ..gh..cd */
2367 tcg_gen_shli_i64(t0, t0, 16); /* t0 = gh..cd.. */
2368 tcg_gen_shri_i64(t1, t1, 16); /* t1 = ..efghab */
2369 tcg_gen_andi_i64(t1, t1, m); /* t1 = ..ef..ab */
2370 tcg_gen_or_i64(ret, t0, t1); /* ret = ghefcdab */
2371
2372 tcg_temp_free_i64(t0);
2373 tcg_temp_free_i64(t1);
2374 }
2375
2376 /*
2377 * wswap_i64: Swap 32-bit words within a 64-bit value.
2378 *
2379 * Byte pattern: abcdefgh -> efghabcd
2380 */
2381 void tcg_gen_wswap_i64(TCGv_i64 ret, TCGv_i64 arg)
2382 {
2383 /* Swapping 2 32-bit elements is a rotate. */
2384 tcg_gen_rotli_i64(ret, arg, 32);
2385 }
2386
2387 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
2388 {
2389 if (TCG_TARGET_REG_BITS == 32) {
2390 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2391 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
2392 } else if (TCG_TARGET_HAS_not_i64) {
2393 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
2394 } else {
2395 tcg_gen_xori_i64(ret, arg, -1);
2396 }
2397 }
2398
2399 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2400 {
2401 if (TCG_TARGET_REG_BITS == 32) {
2402 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2403 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2404 } else if (TCG_TARGET_HAS_andc_i64) {
2405 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
2406 } else {
2407 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2408 tcg_gen_not_i64(t0, arg2);
2409 tcg_gen_and_i64(ret, arg1, t0);
2410 tcg_temp_free_i64(t0);
2411 }
2412 }
2413
2414 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2415 {
2416 if (TCG_TARGET_REG_BITS == 32) {
2417 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2418 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2419 } else if (TCG_TARGET_HAS_eqv_i64) {
2420 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
2421 } else {
2422 tcg_gen_xor_i64(ret, arg1, arg2);
2423 tcg_gen_not_i64(ret, ret);
2424 }
2425 }
2426
2427 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2428 {
2429 if (TCG_TARGET_REG_BITS == 32) {
2430 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2431 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2432 } else if (TCG_TARGET_HAS_nand_i64) {
2433 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
2434 } else {
2435 tcg_gen_and_i64(ret, arg1, arg2);
2436 tcg_gen_not_i64(ret, ret);
2437 }
2438 }
2439
2440 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2441 {
2442 if (TCG_TARGET_REG_BITS == 32) {
2443 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2444 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2445 } else if (TCG_TARGET_HAS_nor_i64) {
2446 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
2447 } else {
2448 tcg_gen_or_i64(ret, arg1, arg2);
2449 tcg_gen_not_i64(ret, ret);
2450 }
2451 }
2452
2453 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2454 {
2455 if (TCG_TARGET_REG_BITS == 32) {
2456 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2457 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2458 } else if (TCG_TARGET_HAS_orc_i64) {
2459 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
2460 } else {
2461 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2462 tcg_gen_not_i64(t0, arg2);
2463 tcg_gen_or_i64(ret, arg1, t0);
2464 tcg_temp_free_i64(t0);
2465 }
2466 }
2467
2468 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2469 {
2470 if (TCG_TARGET_HAS_clz_i64) {
2471 tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
2472 } else {
2473 gen_helper_clz_i64(ret, arg1, arg2);
2474 }
2475 }
2476
2477 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2478 {
2479 if (TCG_TARGET_REG_BITS == 32
2480 && TCG_TARGET_HAS_clz_i32
2481 && arg2 <= 0xffffffffu) {
2482 TCGv_i32 t = tcg_temp_ebb_new_i32();
2483 tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
2484 tcg_gen_addi_i32(t, t, 32);
2485 tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
2486 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2487 tcg_temp_free_i32(t);
2488 } else {
2489 tcg_gen_clz_i64(ret, arg1, tcg_constant_i64(arg2));
2490 }
2491 }
2492
2493 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2494 {
2495 if (TCG_TARGET_HAS_ctz_i64) {
2496 tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
2497 } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
2498 TCGv_i64 z, t = tcg_temp_ebb_new_i64();
2499
2500 if (TCG_TARGET_HAS_ctpop_i64) {
2501 tcg_gen_subi_i64(t, arg1, 1);
2502 tcg_gen_andc_i64(t, t, arg1);
2503 tcg_gen_ctpop_i64(t, t);
2504 } else {
2505 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
2506 tcg_gen_neg_i64(t, arg1);
2507 tcg_gen_and_i64(t, t, arg1);
2508 tcg_gen_clzi_i64(t, t, 64);
2509 tcg_gen_xori_i64(t, t, 63);
2510 }
2511 z = tcg_constant_i64(0);
2512 tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
2513 tcg_temp_free_i64(t);
2514 tcg_temp_free_i64(z);
2515 } else {
2516 gen_helper_ctz_i64(ret, arg1, arg2);
2517 }
2518 }
2519
2520 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2521 {
2522 if (TCG_TARGET_REG_BITS == 32
2523 && TCG_TARGET_HAS_ctz_i32
2524 && arg2 <= 0xffffffffu) {
2525 TCGv_i32 t32 = tcg_temp_ebb_new_i32();
2526 tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
2527 tcg_gen_addi_i32(t32, t32, 32);
2528 tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
2529 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2530 tcg_temp_free_i32(t32);
2531 } else if (!TCG_TARGET_HAS_ctz_i64
2532 && TCG_TARGET_HAS_ctpop_i64
2533 && arg2 == 64) {
2534 /* This equivalence has the advantage of not requiring a fixup. */
2535 TCGv_i64 t = tcg_temp_ebb_new_i64();
2536 tcg_gen_subi_i64(t, arg1, 1);
2537 tcg_gen_andc_i64(t, t, arg1);
2538 tcg_gen_ctpop_i64(ret, t);
2539 tcg_temp_free_i64(t);
2540 } else {
2541 tcg_gen_ctz_i64(ret, arg1, tcg_constant_i64(arg2));
2542 }
2543 }
2544
2545 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
2546 {
2547 if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
2548 TCGv_i64 t = tcg_temp_ebb_new_i64();
2549 tcg_gen_sari_i64(t, arg, 63);
2550 tcg_gen_xor_i64(t, t, arg);
2551 tcg_gen_clzi_i64(t, t, 64);
2552 tcg_gen_subi_i64(ret, t, 1);
2553 tcg_temp_free_i64(t);
2554 } else {
2555 gen_helper_clrsb_i64(ret, arg);
2556 }
2557 }
2558
2559 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
2560 {
2561 if (TCG_TARGET_HAS_ctpop_i64) {
2562 tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
2563 } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
2564 tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2565 tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2566 tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
2567 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2568 } else {
2569 gen_helper_ctpop_i64(ret, arg1);
2570 }
2571 }
2572
2573 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2574 {
2575 if (TCG_TARGET_HAS_rot_i64) {
2576 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
2577 } else {
2578 TCGv_i64 t0, t1;
2579 t0 = tcg_temp_ebb_new_i64();
2580 t1 = tcg_temp_ebb_new_i64();
2581 tcg_gen_shl_i64(t0, arg1, arg2);
2582 tcg_gen_subfi_i64(t1, 64, arg2);
2583 tcg_gen_shr_i64(t1, arg1, t1);
2584 tcg_gen_or_i64(ret, t0, t1);
2585 tcg_temp_free_i64(t0);
2586 tcg_temp_free_i64(t1);
2587 }
2588 }
2589
2590 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2591 {
2592 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2593 /* some cases can be optimized here */
2594 if (arg2 == 0) {
2595 tcg_gen_mov_i64(ret, arg1);
2596 } else if (TCG_TARGET_HAS_rot_i64) {
2597 tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
2598 } else {
2599 TCGv_i64 t0, t1;
2600 t0 = tcg_temp_ebb_new_i64();
2601 t1 = tcg_temp_ebb_new_i64();
2602 tcg_gen_shli_i64(t0, arg1, arg2);
2603 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2604 tcg_gen_or_i64(ret, t0, t1);
2605 tcg_temp_free_i64(t0);
2606 tcg_temp_free_i64(t1);
2607 }
2608 }
2609
2610 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2611 {
2612 if (TCG_TARGET_HAS_rot_i64) {
2613 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2614 } else {
2615 TCGv_i64 t0, t1;
2616 t0 = tcg_temp_ebb_new_i64();
2617 t1 = tcg_temp_ebb_new_i64();
2618 tcg_gen_shr_i64(t0, arg1, arg2);
2619 tcg_gen_subfi_i64(t1, 64, arg2);
2620 tcg_gen_shl_i64(t1, arg1, t1);
2621 tcg_gen_or_i64(ret, t0, t1);
2622 tcg_temp_free_i64(t0);
2623 tcg_temp_free_i64(t1);
2624 }
2625 }
2626
2627 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2628 {
2629 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2630 /* some cases can be optimized here */
2631 if (arg2 == 0) {
2632 tcg_gen_mov_i64(ret, arg1);
2633 } else {
2634 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2635 }
2636 }
2637
2638 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2639 unsigned int ofs, unsigned int len)
2640 {
2641 uint64_t mask;
2642 TCGv_i64 t1;
2643
2644 tcg_debug_assert(ofs < 64);
2645 tcg_debug_assert(len > 0);
2646 tcg_debug_assert(len <= 64);
2647 tcg_debug_assert(ofs + len <= 64);
2648
2649 if (len == 64) {
2650 tcg_gen_mov_i64(ret, arg2);
2651 return;
2652 }
2653 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2654 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2655 return;
2656 }
2657
2658 if (TCG_TARGET_REG_BITS == 32) {
2659 if (ofs >= 32) {
2660 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2661 TCGV_LOW(arg2), ofs - 32, len);
2662 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2663 return;
2664 }
2665 if (ofs + len <= 32) {
2666 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2667 TCGV_LOW(arg2), ofs, len);
2668 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2669 return;
2670 }
2671 }
2672
2673 t1 = tcg_temp_ebb_new_i64();
2674
2675 if (TCG_TARGET_HAS_extract2_i64) {
2676 if (ofs + len == 64) {
2677 tcg_gen_shli_i64(t1, arg1, len);
2678 tcg_gen_extract2_i64(ret, t1, arg2, len);
2679 goto done;
2680 }
2681 if (ofs == 0) {
2682 tcg_gen_extract2_i64(ret, arg1, arg2, len);
2683 tcg_gen_rotli_i64(ret, ret, len);
2684 goto done;
2685 }
2686 }
2687
2688 mask = (1ull << len) - 1;
2689 if (ofs + len < 64) {
2690 tcg_gen_andi_i64(t1, arg2, mask);
2691 tcg_gen_shli_i64(t1, t1, ofs);
2692 } else {
2693 tcg_gen_shli_i64(t1, arg2, ofs);
2694 }
2695 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2696 tcg_gen_or_i64(ret, ret, t1);
2697 done:
2698 tcg_temp_free_i64(t1);
2699 }
2700
2701 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2702 unsigned int ofs, unsigned int len)
2703 {
2704 tcg_debug_assert(ofs < 64);
2705 tcg_debug_assert(len > 0);
2706 tcg_debug_assert(len <= 64);
2707 tcg_debug_assert(ofs + len <= 64);
2708
2709 if (ofs + len == 64) {
2710 tcg_gen_shli_i64(ret, arg, ofs);
2711 } else if (ofs == 0) {
2712 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2713 } else if (TCG_TARGET_HAS_deposit_i64
2714 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2715 TCGv_i64 zero = tcg_constant_i64(0);
2716 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2717 } else {
2718 if (TCG_TARGET_REG_BITS == 32) {
2719 if (ofs >= 32) {
2720 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2721 ofs - 32, len);
2722 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2723 return;
2724 }
2725 if (ofs + len <= 32) {
2726 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2727 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2728 return;
2729 }
2730 }
2731 /* To help two-operand hosts we prefer to zero-extend first,
2732 which allows ARG to stay live. */
2733 switch (len) {
2734 case 32:
2735 if (TCG_TARGET_HAS_ext32u_i64) {
2736 tcg_gen_ext32u_i64(ret, arg);
2737 tcg_gen_shli_i64(ret, ret, ofs);
2738 return;
2739 }
2740 break;
2741 case 16:
2742 if (TCG_TARGET_HAS_ext16u_i64) {
2743 tcg_gen_ext16u_i64(ret, arg);
2744 tcg_gen_shli_i64(ret, ret, ofs);
2745 return;
2746 }
2747 break;
2748 case 8:
2749 if (TCG_TARGET_HAS_ext8u_i64) {
2750 tcg_gen_ext8u_i64(ret, arg);
2751 tcg_gen_shli_i64(ret, ret, ofs);
2752 return;
2753 }
2754 break;
2755 }
2756 /* Otherwise prefer zero-extension over AND for code size. */
2757 switch (ofs + len) {
2758 case 32:
2759 if (TCG_TARGET_HAS_ext32u_i64) {
2760 tcg_gen_shli_i64(ret, arg, ofs);
2761 tcg_gen_ext32u_i64(ret, ret);
2762 return;
2763 }
2764 break;
2765 case 16:
2766 if (TCG_TARGET_HAS_ext16u_i64) {
2767 tcg_gen_shli_i64(ret, arg, ofs);
2768 tcg_gen_ext16u_i64(ret, ret);
2769 return;
2770 }
2771 break;
2772 case 8:
2773 if (TCG_TARGET_HAS_ext8u_i64) {
2774 tcg_gen_shli_i64(ret, arg, ofs);
2775 tcg_gen_ext8u_i64(ret, ret);
2776 return;
2777 }
2778 break;
2779 }
2780 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2781 tcg_gen_shli_i64(ret, ret, ofs);
2782 }
2783 }
2784
2785 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2786 unsigned int ofs, unsigned int len)
2787 {
2788 tcg_debug_assert(ofs < 64);
2789 tcg_debug_assert(len > 0);
2790 tcg_debug_assert(len <= 64);
2791 tcg_debug_assert(ofs + len <= 64);
2792
2793 /* Canonicalize certain special cases, even if extract is supported. */
2794 if (ofs + len == 64) {
2795 tcg_gen_shri_i64(ret, arg, 64 - len);
2796 return;
2797 }
2798 if (ofs == 0) {
2799 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2800 return;
2801 }
2802
2803 if (TCG_TARGET_REG_BITS == 32) {
2804 /* Look for a 32-bit extract within one of the two words. */
2805 if (ofs >= 32) {
2806 tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2807 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2808 return;
2809 }
2810 if (ofs + len <= 32) {
2811 tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2812 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2813 return;
2814 }
2815 /* The field is split across two words. One double-word
2816 shift is better than two double-word shifts. */
2817 goto do_shift_and;
2818 }
2819
2820 if (TCG_TARGET_HAS_extract_i64
2821 && TCG_TARGET_extract_i64_valid(ofs, len)) {
2822 tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2823 return;
2824 }
2825
2826 /* Assume that zero-extension, if available, is cheaper than a shift. */
2827 switch (ofs + len) {
2828 case 32:
2829 if (TCG_TARGET_HAS_ext32u_i64) {
2830 tcg_gen_ext32u_i64(ret, arg);
2831 tcg_gen_shri_i64(ret, ret, ofs);
2832 return;
2833 }
2834 break;
2835 case 16:
2836 if (TCG_TARGET_HAS_ext16u_i64) {
2837 tcg_gen_ext16u_i64(ret, arg);
2838 tcg_gen_shri_i64(ret, ret, ofs);
2839 return;
2840 }
2841 break;
2842 case 8:
2843 if (TCG_TARGET_HAS_ext8u_i64) {
2844 tcg_gen_ext8u_i64(ret, arg);
2845 tcg_gen_shri_i64(ret, ret, ofs);
2846 return;
2847 }
2848 break;
2849 }
2850
2851 /* ??? Ideally we'd know what values are available for immediate AND.
2852 Assume that 8 bits are available, plus the special cases of 16 and 32,
2853 so that we get ext8u, ext16u, and ext32u. */
2854 switch (len) {
2855 case 1 ... 8: case 16: case 32:
2856 do_shift_and:
2857 tcg_gen_shri_i64(ret, arg, ofs);
2858 tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2859 break;
2860 default:
2861 tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2862 tcg_gen_shri_i64(ret, ret, 64 - len);
2863 break;
2864 }
2865 }
2866
2867 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2868 unsigned int ofs, unsigned int len)
2869 {
2870 tcg_debug_assert(ofs < 64);
2871 tcg_debug_assert(len > 0);
2872 tcg_debug_assert(len <= 64);
2873 tcg_debug_assert(ofs + len <= 64);
2874
2875 /* Canonicalize certain special cases, even if sextract is supported. */
2876 if (ofs + len == 64) {
2877 tcg_gen_sari_i64(ret, arg, 64 - len);
2878 return;
2879 }
2880 if (ofs == 0) {
2881 switch (len) {
2882 case 32:
2883 tcg_gen_ext32s_i64(ret, arg);
2884 return;
2885 case 16:
2886 tcg_gen_ext16s_i64(ret, arg);
2887 return;
2888 case 8:
2889 tcg_gen_ext8s_i64(ret, arg);
2890 return;
2891 }
2892 }
2893
2894 if (TCG_TARGET_REG_BITS == 32) {
2895 /* Look for a 32-bit extract within one of the two words. */
2896 if (ofs >= 32) {
2897 tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2898 } else if (ofs + len <= 32) {
2899 tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2900 } else if (ofs == 0) {
2901 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2902 tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2903 return;
2904 } else if (len > 32) {
2905 TCGv_i32 t = tcg_temp_ebb_new_i32();
2906 /* Extract the bits for the high word normally. */
2907 tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2908 /* Shift the field down for the low part. */
2909 tcg_gen_shri_i64(ret, arg, ofs);
2910 /* Overwrite the shift into the high part. */
2911 tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2912 tcg_temp_free_i32(t);
2913 return;
2914 } else {
2915 /* Shift the field down for the low part, such that the
2916 field sits at the MSB. */
2917 tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2918 /* Shift the field down from the MSB, sign extending. */
2919 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2920 }
2921 /* Sign-extend the field from 32 bits. */
2922 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2923 return;
2924 }
2925
2926 if (TCG_TARGET_HAS_sextract_i64
2927 && TCG_TARGET_extract_i64_valid(ofs, len)) {
2928 tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2929 return;
2930 }
2931
2932 /* Assume that sign-extension, if available, is cheaper than a shift. */
2933 switch (ofs + len) {
2934 case 32:
2935 if (TCG_TARGET_HAS_ext32s_i64) {
2936 tcg_gen_ext32s_i64(ret, arg);
2937 tcg_gen_sari_i64(ret, ret, ofs);
2938 return;
2939 }
2940 break;
2941 case 16:
2942 if (TCG_TARGET_HAS_ext16s_i64) {
2943 tcg_gen_ext16s_i64(ret, arg);
2944 tcg_gen_sari_i64(ret, ret, ofs);
2945 return;
2946 }
2947 break;
2948 case 8:
2949 if (TCG_TARGET_HAS_ext8s_i64) {
2950 tcg_gen_ext8s_i64(ret, arg);
2951 tcg_gen_sari_i64(ret, ret, ofs);
2952 return;
2953 }
2954 break;
2955 }
2956 switch (len) {
2957 case 32:
2958 if (TCG_TARGET_HAS_ext32s_i64) {
2959 tcg_gen_shri_i64(ret, arg, ofs);
2960 tcg_gen_ext32s_i64(ret, ret);
2961 return;
2962 }
2963 break;
2964 case 16:
2965 if (TCG_TARGET_HAS_ext16s_i64) {
2966 tcg_gen_shri_i64(ret, arg, ofs);
2967 tcg_gen_ext16s_i64(ret, ret);
2968 return;
2969 }
2970 break;
2971 case 8:
2972 if (TCG_TARGET_HAS_ext8s_i64) {
2973 tcg_gen_shri_i64(ret, arg, ofs);
2974 tcg_gen_ext8s_i64(ret, ret);
2975 return;
2976 }
2977 break;
2978 }
2979 tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2980 tcg_gen_sari_i64(ret, ret, 64 - len);
2981 }
2982
2983 /*
2984 * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2985 * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2986 */
2987 void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2988 unsigned int ofs)
2989 {
2990 tcg_debug_assert(ofs <= 64);
2991 if (ofs == 0) {
2992 tcg_gen_mov_i64(ret, al);
2993 } else if (ofs == 64) {
2994 tcg_gen_mov_i64(ret, ah);
2995 } else if (al == ah) {
2996 tcg_gen_rotri_i64(ret, al, ofs);
2997 } else if (TCG_TARGET_HAS_extract2_i64) {
2998 tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2999 } else {
3000 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3001 tcg_gen_shri_i64(t0, al, ofs);
3002 tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
3003 tcg_temp_free_i64(t0);
3004 }
3005 }
3006
3007 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
3008 TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
3009 {
3010 if (cond == TCG_COND_ALWAYS) {
3011 tcg_gen_mov_i64(ret, v1);
3012 } else if (cond == TCG_COND_NEVER) {
3013 tcg_gen_mov_i64(ret, v2);
3014 } else if (TCG_TARGET_REG_BITS == 32) {
3015 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
3016 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
3017 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
3018 TCGV_LOW(c1), TCGV_HIGH(c1),
3019 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
3020
3021 if (TCG_TARGET_HAS_movcond_i32) {
3022 tcg_gen_movi_i32(t1, 0);
3023 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
3024 TCGV_LOW(v1), TCGV_LOW(v2));
3025 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
3026 TCGV_HIGH(v1), TCGV_HIGH(v2));
3027 } else {
3028 tcg_gen_neg_i32(t0, t0);
3029
3030 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
3031 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
3032 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
3033
3034 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
3035 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
3036 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
3037 }
3038 tcg_temp_free_i32(t0);
3039 tcg_temp_free_i32(t1);
3040 } else if (TCG_TARGET_HAS_movcond_i64) {
3041 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
3042 } else {
3043 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3044 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3045 tcg_gen_negsetcond_i64(cond, t0, c1, c2);
3046 tcg_gen_and_i64(t1, v1, t0);
3047 tcg_gen_andc_i64(ret, v2, t0);
3048 tcg_gen_or_i64(ret, ret, t1);
3049 tcg_temp_free_i64(t0);
3050 tcg_temp_free_i64(t1);
3051 }
3052 }
3053
3054 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
3055 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
3056 {
3057 if (TCG_TARGET_HAS_add2_i64) {
3058 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
3059 } else {
3060 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3061 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3062 tcg_gen_add_i64(t0, al, bl);
3063 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
3064 tcg_gen_add_i64(rh, ah, bh);
3065 tcg_gen_add_i64(rh, rh, t1);
3066 tcg_gen_mov_i64(rl, t0);
3067 tcg_temp_free_i64(t0);
3068 tcg_temp_free_i64(t1);
3069 }
3070 }
3071
3072 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
3073 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
3074 {
3075 if (TCG_TARGET_HAS_sub2_i64) {
3076 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
3077 } else {
3078 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3079 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3080 tcg_gen_sub_i64(t0, al, bl);
3081 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
3082 tcg_gen_sub_i64(rh, ah, bh);
3083 tcg_gen_sub_i64(rh, rh, t1);
3084 tcg_gen_mov_i64(rl, t0);
3085 tcg_temp_free_i64(t0);
3086 tcg_temp_free_i64(t1);
3087 }
3088 }
3089
3090 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3091 {
3092 if (TCG_TARGET_HAS_mulu2_i64) {
3093 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
3094 } else if (TCG_TARGET_HAS_muluh_i64) {
3095 TCGv_i64 t = tcg_temp_ebb_new_i64();
3096 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
3097 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
3098 tcg_gen_mov_i64(rl, t);
3099 tcg_temp_free_i64(t);
3100 } else {
3101 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3102 tcg_gen_mul_i64(t0, arg1, arg2);
3103 gen_helper_muluh_i64(rh, arg1, arg2);
3104 tcg_gen_mov_i64(rl, t0);
3105 tcg_temp_free_i64(t0);
3106 }
3107 }
3108
3109 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3110 {
3111 if (TCG_TARGET_HAS_muls2_i64) {
3112 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
3113 } else if (TCG_TARGET_HAS_mulsh_i64) {
3114 TCGv_i64 t = tcg_temp_ebb_new_i64();
3115 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
3116 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
3117 tcg_gen_mov_i64(rl, t);
3118 tcg_temp_free_i64(t);
3119 } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
3120 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3121 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3122 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
3123 TCGv_i64 t3 = tcg_temp_ebb_new_i64();
3124 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
3125 /* Adjust for negative inputs. */
3126 tcg_gen_sari_i64(t2, arg1, 63);
3127 tcg_gen_sari_i64(t3, arg2, 63);
3128 tcg_gen_and_i64(t2, t2, arg2);
3129 tcg_gen_and_i64(t3, t3, arg1);
3130 tcg_gen_sub_i64(rh, t1, t2);
3131 tcg_gen_sub_i64(rh, rh, t3);
3132 tcg_gen_mov_i64(rl, t0);
3133 tcg_temp_free_i64(t0);
3134 tcg_temp_free_i64(t1);
3135 tcg_temp_free_i64(t2);
3136 tcg_temp_free_i64(t3);
3137 } else {
3138 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3139 tcg_gen_mul_i64(t0, arg1, arg2);
3140 gen_helper_mulsh_i64(rh, arg1, arg2);
3141 tcg_gen_mov_i64(rl, t0);
3142 tcg_temp_free_i64(t0);
3143 }
3144 }
3145
3146 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3147 {
3148 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3149 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3150 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
3151 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
3152 /* Adjust for negative input for the signed arg1. */
3153 tcg_gen_sari_i64(t2, arg1, 63);
3154 tcg_gen_and_i64(t2, t2, arg2);
3155 tcg_gen_sub_i64(rh, t1, t2);
3156 tcg_gen_mov_i64(rl, t0);
3157 tcg_temp_free_i64(t0);
3158 tcg_temp_free_i64(t1);
3159 tcg_temp_free_i64(t2);
3160 }
3161
3162 void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3163 {
3164 tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
3165 }
3166
3167 void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3168 {
3169 tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
3170 }
3171
3172 void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3173 {
3174 tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
3175 }
3176
3177 void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3178 {
3179 tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
3180 }
3181
3182 void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
3183 {
3184 TCGv_i64 t = tcg_temp_ebb_new_i64();
3185
3186 tcg_gen_sari_i64(t, a, 63);
3187 tcg_gen_xor_i64(ret, a, t);
3188 tcg_gen_sub_i64(ret, ret, t);
3189 tcg_temp_free_i64(t);
3190 }
3191
3192 /* Size changing operations. */
3193
3194 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
3195 {
3196 if (TCG_TARGET_REG_BITS == 32) {
3197 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
3198 } else if (TCG_TARGET_HAS_extr_i64_i32) {
3199 tcg_gen_op2(INDEX_op_extrl_i64_i32,
3200 tcgv_i32_arg(ret), tcgv_i64_arg(arg));
3201 } else {
3202 tcg_gen_mov_i32(ret, (TCGv_i32)arg);
3203 }
3204 }
3205
3206 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
3207 {
3208 if (TCG_TARGET_REG_BITS == 32) {
3209 tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
3210 } else if (TCG_TARGET_HAS_extr_i64_i32) {
3211 tcg_gen_op2(INDEX_op_extrh_i64_i32,
3212 tcgv_i32_arg(ret), tcgv_i64_arg(arg));
3213 } else {
3214 TCGv_i64 t = tcg_temp_ebb_new_i64();
3215 tcg_gen_shri_i64(t, arg, 32);
3216 tcg_gen_mov_i32(ret, (TCGv_i32)t);
3217 tcg_temp_free_i64(t);
3218 }
3219 }
3220
3221 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
3222 {
3223 if (TCG_TARGET_REG_BITS == 32) {
3224 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
3225 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
3226 } else {
3227 tcg_gen_op2(INDEX_op_extu_i32_i64,
3228 tcgv_i64_arg(ret), tcgv_i32_arg(arg));
3229 }
3230 }
3231
3232 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
3233 {
3234 if (TCG_TARGET_REG_BITS == 32) {
3235 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
3236 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
3237 } else {
3238 tcg_gen_op2(INDEX_op_ext_i32_i64,
3239 tcgv_i64_arg(ret), tcgv_i32_arg(arg));
3240 }
3241 }
3242
3243 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
3244 {
3245 TCGv_i64 tmp;
3246
3247 if (TCG_TARGET_REG_BITS == 32) {
3248 tcg_gen_mov_i32(TCGV_LOW(dest), low);
3249 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
3250 return;
3251 }
3252
3253 tmp = tcg_temp_ebb_new_i64();
3254 /* These extensions are only needed for type correctness.
3255 We may be able to do better given target specific information. */
3256 tcg_gen_extu_i32_i64(tmp, high);
3257 tcg_gen_extu_i32_i64(dest, low);
3258 /* If deposit is available, use it. Otherwise use the extra
3259 knowledge that we have of the zero-extensions above. */
3260 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
3261 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
3262 } else {
3263 tcg_gen_shli_i64(tmp, tmp, 32);
3264 tcg_gen_or_i64(dest, dest, tmp);
3265 }
3266 tcg_temp_free_i64(tmp);
3267 }
3268
3269 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
3270 {
3271 if (TCG_TARGET_REG_BITS == 32) {
3272 tcg_gen_mov_i32(lo, TCGV_LOW(arg));
3273 tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
3274 } else {
3275 tcg_gen_extrl_i64_i32(lo, arg);
3276 tcg_gen_extrh_i64_i32(hi, arg);
3277 }
3278 }
3279
3280 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
3281 {
3282 tcg_gen_ext32u_i64(lo, arg);
3283 tcg_gen_shri_i64(hi, arg, 32);
3284 }
3285
3286 void tcg_gen_concat32_i64(TCGv_i64 ret, TCGv_i64 lo, TCGv_i64 hi)
3287 {
3288 tcg_gen_deposit_i64(ret, lo, hi, 32, 32);
3289 }
3290
3291 void tcg_gen_extr_i128_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i128 arg)
3292 {
3293 tcg_gen_mov_i64(lo, TCGV128_LOW(arg));
3294 tcg_gen_mov_i64(hi, TCGV128_HIGH(arg));
3295 }
3296
3297 void tcg_gen_concat_i64_i128(TCGv_i128 ret, TCGv_i64 lo, TCGv_i64 hi)
3298 {
3299 tcg_gen_mov_i64(TCGV128_LOW(ret), lo);
3300 tcg_gen_mov_i64(TCGV128_HIGH(ret), hi);
3301 }
3302
3303 void tcg_gen_mov_i128(TCGv_i128 dst, TCGv_i128 src)
3304 {
3305 if (dst != src) {
3306 tcg_gen_mov_i64(TCGV128_LOW(dst), TCGV128_LOW(src));
3307 tcg_gen_mov_i64(TCGV128_HIGH(dst), TCGV128_HIGH(src));
3308 }
3309 }
3310
3311 void tcg_gen_ld_i128(TCGv_i128 ret, TCGv_ptr base, tcg_target_long offset)
3312 {
3313 if (HOST_BIG_ENDIAN) {
3314 tcg_gen_ld_i64(TCGV128_HIGH(ret), base, offset);
3315 tcg_gen_ld_i64(TCGV128_LOW(ret), base, offset + 8);
3316 } else {
3317 tcg_gen_ld_i64(TCGV128_LOW(ret), base, offset);
3318 tcg_gen_ld_i64(TCGV128_HIGH(ret), base, offset + 8);
3319 }
3320 }
3321
3322 void tcg_gen_st_i128(TCGv_i128 val, TCGv_ptr base, tcg_target_long offset)
3323 {
3324 if (HOST_BIG_ENDIAN) {
3325 tcg_gen_st_i64(TCGV128_HIGH(val), base, offset);
3326 tcg_gen_st_i64(TCGV128_LOW(val), base, offset + 8);
3327 } else {
3328 tcg_gen_st_i64(TCGV128_LOW(val), base, offset);
3329 tcg_gen_st_i64(TCGV128_HIGH(val), base, offset + 8);
3330 }
3331 }
3332
3333 /* QEMU specific operations. */
3334
3335 void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
3336 {
3337 /*
3338 * Let the jit code return the read-only version of the
3339 * TranslationBlock, so that we minimize the pc-relative
3340 * distance of the address of the exit_tb code to TB.
3341 * This will improve utilization of pc-relative address loads.
3342 *
3343 * TODO: Move this to translator_loop, so that all const
3344 * TranslationBlock pointers refer to read-only memory.
3345 * This requires coordination with targets that do not use
3346 * the translator_loop.
3347 */
3348 uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
3349
3350 if (tb == NULL) {
3351 tcg_debug_assert(idx == 0);
3352 } else if (idx <= TB_EXIT_IDXMAX) {
3353 #ifdef CONFIG_DEBUG_TCG
3354 /* This is an exit following a goto_tb. Verify that we have
3355 seen this numbered exit before, via tcg_gen_goto_tb. */
3356 tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
3357 #endif
3358 } else {
3359 /* This is an exit via the exitreq label. */
3360 tcg_debug_assert(idx == TB_EXIT_REQUESTED);
3361 }
3362
3363 tcg_gen_op1i(INDEX_op_exit_tb, val);
3364 }
3365
3366 void tcg_gen_goto_tb(unsigned idx)
3367 {
3368 /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
3369 tcg_debug_assert(!(tcg_ctx->gen_tb->cflags & CF_NO_GOTO_TB));
3370 /* We only support two chained exits. */
3371 tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
3372 #ifdef CONFIG_DEBUG_TCG
3373 /* Verify that we haven't seen this numbered exit before. */
3374 tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
3375 tcg_ctx->goto_tb_issue_mask |= 1 << idx;
3376 #endif
3377 plugin_gen_disable_mem_helpers();
3378 tcg_gen_op1i(INDEX_op_goto_tb, idx);
3379 }
3380
3381 void tcg_gen_lookup_and_goto_ptr(void)
3382 {
3383 TCGv_ptr ptr;
3384
3385 if (tcg_ctx->gen_tb->cflags & CF_NO_GOTO_PTR) {
3386 tcg_gen_exit_tb(NULL, 0);
3387 return;
3388 }
3389
3390 plugin_gen_disable_mem_helpers();
3391 ptr = tcg_temp_ebb_new_ptr();
3392 gen_helper_lookup_tb_ptr(ptr, tcg_env);
3393 tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
3394 tcg_temp_free_ptr(ptr);
3395 }