]> git.proxmox.com Git - mirror_qemu.git/blob - tcg/tcg-op.c
tcg: Move 64-bit expanders out of line
[mirror_qemu.git] / tcg / tcg-op.c
1 /*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2008 Fabrice Bellard
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25 #include "qemu/osdep.h"
26 #include "tcg/tcg.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
32
33
34 /*
35 * Encourage the compiler to tail-call to a function, rather than inlining.
36 * Minimizes code size across 99 bottles of beer on the wall.
37 */
38 #define NI __attribute__((noinline))
39
40 void NI tcg_gen_op1(TCGOpcode opc, TCGArg a1)
41 {
42 TCGOp *op = tcg_emit_op(opc, 1);
43 op->args[0] = a1;
44 }
45
46 void NI tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
47 {
48 TCGOp *op = tcg_emit_op(opc, 2);
49 op->args[0] = a1;
50 op->args[1] = a2;
51 }
52
53 void NI tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
54 {
55 TCGOp *op = tcg_emit_op(opc, 3);
56 op->args[0] = a1;
57 op->args[1] = a2;
58 op->args[2] = a3;
59 }
60
61 void NI tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3, TCGArg a4)
62 {
63 TCGOp *op = tcg_emit_op(opc, 4);
64 op->args[0] = a1;
65 op->args[1] = a2;
66 op->args[2] = a3;
67 op->args[3] = a4;
68 }
69
70 void NI tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
71 TCGArg a4, TCGArg a5)
72 {
73 TCGOp *op = tcg_emit_op(opc, 5);
74 op->args[0] = a1;
75 op->args[1] = a2;
76 op->args[2] = a3;
77 op->args[3] = a4;
78 op->args[4] = a5;
79 }
80
81 void NI tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
82 TCGArg a4, TCGArg a5, TCGArg a6)
83 {
84 TCGOp *op = tcg_emit_op(opc, 6);
85 op->args[0] = a1;
86 op->args[1] = a2;
87 op->args[2] = a3;
88 op->args[3] = a4;
89 op->args[4] = a5;
90 op->args[5] = a6;
91 }
92
93 /*
94 * With CONFIG_DEBUG_TCG, tcgv_*_tmp via tcgv_*_arg, is an out-of-line
95 * assertion check. Force tail calls to avoid too much code expansion.
96 */
97 #ifdef CONFIG_DEBUG_TCG
98 # define DNI NI
99 #else
100 # define DNI
101 #endif
102
103 void DNI tcg_gen_op1_i32(TCGOpcode opc, TCGv_i32 a1)
104 {
105 tcg_gen_op1(opc, tcgv_i32_arg(a1));
106 }
107
108 void DNI tcg_gen_op1_i64(TCGOpcode opc, TCGv_i64 a1)
109 {
110 tcg_gen_op1(opc, tcgv_i64_arg(a1));
111 }
112
113 void DNI tcg_gen_op1i(TCGOpcode opc, TCGArg a1)
114 {
115 tcg_gen_op1(opc, a1);
116 }
117
118 void DNI tcg_gen_op2_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2)
119 {
120 tcg_gen_op2(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2));
121 }
122
123 void DNI tcg_gen_op2_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2)
124 {
125 tcg_gen_op2(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2));
126 }
127
128 void DNI tcg_gen_op2i_i32(TCGOpcode opc, TCGv_i32 a1, TCGArg a2)
129 {
130 tcg_gen_op2(opc, tcgv_i32_arg(a1), a2);
131 }
132
133 void DNI tcg_gen_op2i_i64(TCGOpcode opc, TCGv_i64 a1, TCGArg a2)
134 {
135 tcg_gen_op2(opc, tcgv_i64_arg(a1), a2);
136 }
137
138 void DNI tcg_gen_op2ii(TCGOpcode opc, TCGArg a1, TCGArg a2)
139 {
140 tcg_gen_op2(opc, a1, a2);
141 }
142
143 void DNI tcg_gen_op3_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2, TCGv_i32 a3)
144 {
145 tcg_gen_op3(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2), tcgv_i32_arg(a3));
146 }
147
148 void DNI tcg_gen_op3_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2, TCGv_i64 a3)
149 {
150 tcg_gen_op3(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2), tcgv_i64_arg(a3));
151 }
152
153 void DNI tcg_gen_op3i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2, TCGArg a3)
154 {
155 tcg_gen_op3(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2), a3);
156 }
157
158 void DNI tcg_gen_op3i_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2, TCGArg a3)
159 {
160 tcg_gen_op3(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2), a3);
161 }
162
163 void DNI tcg_gen_ldst_op_i32(TCGOpcode opc, TCGv_i32 val,
164 TCGv_ptr base, TCGArg offset)
165 {
166 tcg_gen_op3(opc, tcgv_i32_arg(val), tcgv_ptr_arg(base), offset);
167 }
168
169 void DNI tcg_gen_ldst_op_i64(TCGOpcode opc, TCGv_i64 val,
170 TCGv_ptr base, TCGArg offset)
171 {
172 tcg_gen_op3(opc, tcgv_i64_arg(val), tcgv_ptr_arg(base), offset);
173 }
174
175 void DNI tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
176 TCGv_i32 a3, TCGv_i32 a4)
177 {
178 tcg_gen_op4(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
179 tcgv_i32_arg(a3), tcgv_i32_arg(a4));
180 }
181
182 void DNI tcg_gen_op4_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
183 TCGv_i64 a3, TCGv_i64 a4)
184 {
185 tcg_gen_op4(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
186 tcgv_i64_arg(a3), tcgv_i64_arg(a4));
187 }
188
189 void DNI tcg_gen_op4i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
190 TCGv_i32 a3, TCGArg a4)
191 {
192 tcg_gen_op4(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
193 tcgv_i32_arg(a3), a4);
194 }
195
196 void DNI tcg_gen_op4i_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
197 TCGv_i64 a3, TCGArg a4)
198 {
199 tcg_gen_op4(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
200 tcgv_i64_arg(a3), a4);
201 }
202
203 void DNI tcg_gen_op4ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
204 TCGArg a3, TCGArg a4)
205 {
206 tcg_gen_op4(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2), a3, a4);
207 }
208
209 void DNI tcg_gen_op4ii_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
210 TCGArg a3, TCGArg a4)
211 {
212 tcg_gen_op4(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2), a3, a4);
213 }
214
215 void DNI tcg_gen_op5_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
216 TCGv_i32 a3, TCGv_i32 a4, TCGv_i32 a5)
217 {
218 tcg_gen_op5(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
219 tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5));
220 }
221
222 void DNI tcg_gen_op5_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
223 TCGv_i64 a3, TCGv_i64 a4, TCGv_i64 a5)
224 {
225 tcg_gen_op5(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
226 tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5));
227 }
228
229 void DNI tcg_gen_op5i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
230 TCGv_i32 a3, TCGv_i32 a4, TCGArg a5)
231 {
232 tcg_gen_op5(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
233 tcgv_i32_arg(a3), tcgv_i32_arg(a4), a5);
234 }
235
236 void DNI tcg_gen_op5i_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
237 TCGv_i64 a3, TCGv_i64 a4, TCGArg a5)
238 {
239 tcg_gen_op5(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
240 tcgv_i64_arg(a3), tcgv_i64_arg(a4), a5);
241 }
242
243 void DNI tcg_gen_op5ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
244 TCGv_i32 a3, TCGArg a4, TCGArg a5)
245 {
246 tcg_gen_op5(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
247 tcgv_i32_arg(a3), a4, a5);
248 }
249
250 void DNI tcg_gen_op5ii_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
251 TCGv_i64 a3, TCGArg a4, TCGArg a5)
252 {
253 tcg_gen_op5(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
254 tcgv_i64_arg(a3), a4, a5);
255 }
256
257 void DNI tcg_gen_op6_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2, TCGv_i32 a3,
258 TCGv_i32 a4, TCGv_i32 a5, TCGv_i32 a6)
259 {
260 tcg_gen_op6(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
261 tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5),
262 tcgv_i32_arg(a6));
263 }
264
265 void DNI tcg_gen_op6_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2, TCGv_i64 a3,
266 TCGv_i64 a4, TCGv_i64 a5, TCGv_i64 a6)
267 {
268 tcg_gen_op6(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
269 tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5),
270 tcgv_i64_arg(a6));
271 }
272
273 void DNI tcg_gen_op6i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2, TCGv_i32 a3,
274 TCGv_i32 a4, TCGv_i32 a5, TCGArg a6)
275 {
276 tcg_gen_op6(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
277 tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5), a6);
278 }
279
280 void DNI tcg_gen_op6i_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2, TCGv_i64 a3,
281 TCGv_i64 a4, TCGv_i64 a5, TCGArg a6)
282 {
283 tcg_gen_op6(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
284 tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5), a6);
285 }
286
287 void DNI tcg_gen_op6ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
288 TCGv_i32 a3, TCGv_i32 a4, TCGArg a5, TCGArg a6)
289 {
290 tcg_gen_op6(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
291 tcgv_i32_arg(a3), tcgv_i32_arg(a4), a5, a6);
292 }
293
294 void DNI tcg_gen_op6ii_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
295 TCGv_i64 a3, TCGv_i64 a4, TCGArg a5, TCGArg a6)
296 {
297 tcg_gen_op6(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
298 tcgv_i64_arg(a3), tcgv_i64_arg(a4), a5, a6);
299 }
300
301 /* Generic ops. */
302
303 void gen_set_label(TCGLabel *l)
304 {
305 l->present = 1;
306 tcg_gen_op1(INDEX_op_set_label, label_arg(l));
307 }
308
309 static void add_last_as_label_use(TCGLabel *l)
310 {
311 TCGLabelUse *u = tcg_malloc(sizeof(TCGLabelUse));
312
313 u->op = tcg_last_op();
314 QSIMPLEQ_INSERT_TAIL(&l->branches, u, next);
315 }
316
317 void tcg_gen_br(TCGLabel *l)
318 {
319 tcg_gen_op1(INDEX_op_br, label_arg(l));
320 add_last_as_label_use(l);
321 }
322
323 void tcg_gen_mb(TCGBar mb_type)
324 {
325 #ifdef CONFIG_USER_ONLY
326 bool parallel = tcg_ctx->gen_tb->cflags & CF_PARALLEL;
327 #else
328 /*
329 * It is tempting to elide the barrier in a uniprocessor context.
330 * However, even with a single cpu we have i/o threads running in
331 * parallel, and lack of memory order can result in e.g. virtio
332 * queue entries being read incorrectly.
333 */
334 bool parallel = true;
335 #endif
336
337 if (parallel) {
338 tcg_gen_op1(INDEX_op_mb, mb_type);
339 }
340 }
341
342 void tcg_gen_plugin_cb_start(unsigned from, unsigned type, unsigned wr)
343 {
344 tcg_gen_op3(INDEX_op_plugin_cb_start, from, type, wr);
345 }
346
347 void tcg_gen_plugin_cb_end(void)
348 {
349 tcg_emit_op(INDEX_op_plugin_cb_end, 0);
350 }
351
352 /* 32 bit ops */
353
354 void tcg_gen_discard_i32(TCGv_i32 arg)
355 {
356 tcg_gen_op1_i32(INDEX_op_discard, arg);
357 }
358
359 void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
360 {
361 if (ret != arg) {
362 tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg);
363 }
364 }
365
366 void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
367 {
368 tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
369 }
370
371 void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
372 {
373 tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2);
374 }
375
376 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
377 {
378 /* some cases can be optimized here */
379 if (arg2 == 0) {
380 tcg_gen_mov_i32(ret, arg1);
381 } else {
382 tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
383 }
384 }
385
386 void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
387 {
388 tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2);
389 }
390
391 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
392 {
393 if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
394 /* Don't recurse with tcg_gen_neg_i32. */
395 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
396 } else {
397 tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
398 }
399 }
400
401 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
402 {
403 /* some cases can be optimized here */
404 if (arg2 == 0) {
405 tcg_gen_mov_i32(ret, arg1);
406 } else {
407 tcg_gen_sub_i32(ret, arg1, tcg_constant_i32(arg2));
408 }
409 }
410
411 void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg)
412 {
413 if (TCG_TARGET_HAS_neg_i32) {
414 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg);
415 } else {
416 tcg_gen_subfi_i32(ret, 0, arg);
417 }
418 }
419
420 void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
421 {
422 tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2);
423 }
424
425 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
426 {
427 /* Some cases can be optimized here. */
428 switch (arg2) {
429 case 0:
430 tcg_gen_movi_i32(ret, 0);
431 return;
432 case -1:
433 tcg_gen_mov_i32(ret, arg1);
434 return;
435 case 0xff:
436 /* Don't recurse with tcg_gen_ext8u_i32. */
437 if (TCG_TARGET_HAS_ext8u_i32) {
438 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
439 return;
440 }
441 break;
442 case 0xffff:
443 if (TCG_TARGET_HAS_ext16u_i32) {
444 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
445 return;
446 }
447 break;
448 }
449
450 tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
451 }
452
453 void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
454 {
455 tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2);
456 }
457
458 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
459 {
460 /* Some cases can be optimized here. */
461 if (arg2 == -1) {
462 tcg_gen_movi_i32(ret, -1);
463 } else if (arg2 == 0) {
464 tcg_gen_mov_i32(ret, arg1);
465 } else {
466 tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
467 }
468 }
469
470 void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
471 {
472 tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2);
473 }
474
475 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
476 {
477 /* Some cases can be optimized here. */
478 if (arg2 == 0) {
479 tcg_gen_mov_i32(ret, arg1);
480 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
481 /* Don't recurse with tcg_gen_not_i32. */
482 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
483 } else {
484 tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
485 }
486 }
487
488 void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
489 {
490 if (TCG_TARGET_HAS_not_i32) {
491 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg);
492 } else {
493 tcg_gen_xori_i32(ret, arg, -1);
494 }
495 }
496
497 void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
498 {
499 tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2);
500 }
501
502 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
503 {
504 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
505 if (arg2 == 0) {
506 tcg_gen_mov_i32(ret, arg1);
507 } else {
508 tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
509 }
510 }
511
512 void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
513 {
514 tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2);
515 }
516
517 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
518 {
519 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
520 if (arg2 == 0) {
521 tcg_gen_mov_i32(ret, arg1);
522 } else {
523 tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
524 }
525 }
526
527 void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
528 {
529 tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2);
530 }
531
532 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
533 {
534 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
535 if (arg2 == 0) {
536 tcg_gen_mov_i32(ret, arg1);
537 } else {
538 tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
539 }
540 }
541
542 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
543 {
544 if (cond == TCG_COND_ALWAYS) {
545 tcg_gen_br(l);
546 } else if (cond != TCG_COND_NEVER) {
547 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
548 add_last_as_label_use(l);
549 }
550 }
551
552 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
553 {
554 if (cond == TCG_COND_ALWAYS) {
555 tcg_gen_br(l);
556 } else if (cond != TCG_COND_NEVER) {
557 tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
558 }
559 }
560
561 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
562 TCGv_i32 arg1, TCGv_i32 arg2)
563 {
564 if (cond == TCG_COND_ALWAYS) {
565 tcg_gen_movi_i32(ret, 1);
566 } else if (cond == TCG_COND_NEVER) {
567 tcg_gen_movi_i32(ret, 0);
568 } else {
569 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
570 }
571 }
572
573 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
574 TCGv_i32 arg1, int32_t arg2)
575 {
576 tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
577 }
578
579 void tcg_gen_negsetcond_i32(TCGCond cond, TCGv_i32 ret,
580 TCGv_i32 arg1, TCGv_i32 arg2)
581 {
582 if (cond == TCG_COND_ALWAYS) {
583 tcg_gen_movi_i32(ret, -1);
584 } else if (cond == TCG_COND_NEVER) {
585 tcg_gen_movi_i32(ret, 0);
586 } else if (TCG_TARGET_HAS_negsetcond_i32) {
587 tcg_gen_op4i_i32(INDEX_op_negsetcond_i32, ret, arg1, arg2, cond);
588 } else {
589 tcg_gen_setcond_i32(cond, ret, arg1, arg2);
590 tcg_gen_neg_i32(ret, ret);
591 }
592 }
593
594 void tcg_gen_negsetcondi_i32(TCGCond cond, TCGv_i32 ret,
595 TCGv_i32 arg1, int32_t arg2)
596 {
597 tcg_gen_negsetcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
598 }
599
600 void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
601 {
602 tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2);
603 }
604
605 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
606 {
607 if (arg2 == 0) {
608 tcg_gen_movi_i32(ret, 0);
609 } else if (is_power_of_2(arg2)) {
610 tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
611 } else {
612 tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
613 }
614 }
615
616 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
617 {
618 if (TCG_TARGET_HAS_div_i32) {
619 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
620 } else if (TCG_TARGET_HAS_div2_i32) {
621 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
622 tcg_gen_sari_i32(t0, arg1, 31);
623 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
624 tcg_temp_free_i32(t0);
625 } else {
626 gen_helper_div_i32(ret, arg1, arg2);
627 }
628 }
629
630 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
631 {
632 if (TCG_TARGET_HAS_rem_i32) {
633 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
634 } else if (TCG_TARGET_HAS_div_i32) {
635 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
636 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
637 tcg_gen_mul_i32(t0, t0, arg2);
638 tcg_gen_sub_i32(ret, arg1, t0);
639 tcg_temp_free_i32(t0);
640 } else if (TCG_TARGET_HAS_div2_i32) {
641 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
642 tcg_gen_sari_i32(t0, arg1, 31);
643 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
644 tcg_temp_free_i32(t0);
645 } else {
646 gen_helper_rem_i32(ret, arg1, arg2);
647 }
648 }
649
650 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
651 {
652 if (TCG_TARGET_HAS_div_i32) {
653 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
654 } else if (TCG_TARGET_HAS_div2_i32) {
655 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
656 TCGv_i32 zero = tcg_constant_i32(0);
657 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, zero, arg2);
658 tcg_temp_free_i32(t0);
659 } else {
660 gen_helper_divu_i32(ret, arg1, arg2);
661 }
662 }
663
664 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
665 {
666 if (TCG_TARGET_HAS_rem_i32) {
667 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
668 } else if (TCG_TARGET_HAS_div_i32) {
669 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
670 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
671 tcg_gen_mul_i32(t0, t0, arg2);
672 tcg_gen_sub_i32(ret, arg1, t0);
673 tcg_temp_free_i32(t0);
674 } else if (TCG_TARGET_HAS_div2_i32) {
675 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
676 TCGv_i32 zero = tcg_constant_i32(0);
677 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, zero, arg2);
678 tcg_temp_free_i32(t0);
679 } else {
680 gen_helper_remu_i32(ret, arg1, arg2);
681 }
682 }
683
684 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
685 {
686 if (TCG_TARGET_HAS_andc_i32) {
687 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
688 } else {
689 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
690 tcg_gen_not_i32(t0, arg2);
691 tcg_gen_and_i32(ret, arg1, t0);
692 tcg_temp_free_i32(t0);
693 }
694 }
695
696 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
697 {
698 if (TCG_TARGET_HAS_eqv_i32) {
699 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
700 } else {
701 tcg_gen_xor_i32(ret, arg1, arg2);
702 tcg_gen_not_i32(ret, ret);
703 }
704 }
705
706 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
707 {
708 if (TCG_TARGET_HAS_nand_i32) {
709 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
710 } else {
711 tcg_gen_and_i32(ret, arg1, arg2);
712 tcg_gen_not_i32(ret, ret);
713 }
714 }
715
716 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
717 {
718 if (TCG_TARGET_HAS_nor_i32) {
719 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
720 } else {
721 tcg_gen_or_i32(ret, arg1, arg2);
722 tcg_gen_not_i32(ret, ret);
723 }
724 }
725
726 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
727 {
728 if (TCG_TARGET_HAS_orc_i32) {
729 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
730 } else {
731 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
732 tcg_gen_not_i32(t0, arg2);
733 tcg_gen_or_i32(ret, arg1, t0);
734 tcg_temp_free_i32(t0);
735 }
736 }
737
738 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
739 {
740 if (TCG_TARGET_HAS_clz_i32) {
741 tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
742 } else if (TCG_TARGET_HAS_clz_i64) {
743 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
744 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
745 tcg_gen_extu_i32_i64(t1, arg1);
746 tcg_gen_extu_i32_i64(t2, arg2);
747 tcg_gen_addi_i64(t2, t2, 32);
748 tcg_gen_clz_i64(t1, t1, t2);
749 tcg_gen_extrl_i64_i32(ret, t1);
750 tcg_temp_free_i64(t1);
751 tcg_temp_free_i64(t2);
752 tcg_gen_subi_i32(ret, ret, 32);
753 } else {
754 gen_helper_clz_i32(ret, arg1, arg2);
755 }
756 }
757
758 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
759 {
760 tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
761 }
762
763 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
764 {
765 if (TCG_TARGET_HAS_ctz_i32) {
766 tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
767 } else if (TCG_TARGET_HAS_ctz_i64) {
768 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
769 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
770 tcg_gen_extu_i32_i64(t1, arg1);
771 tcg_gen_extu_i32_i64(t2, arg2);
772 tcg_gen_ctz_i64(t1, t1, t2);
773 tcg_gen_extrl_i64_i32(ret, t1);
774 tcg_temp_free_i64(t1);
775 tcg_temp_free_i64(t2);
776 } else if (TCG_TARGET_HAS_ctpop_i32
777 || TCG_TARGET_HAS_ctpop_i64
778 || TCG_TARGET_HAS_clz_i32
779 || TCG_TARGET_HAS_clz_i64) {
780 TCGv_i32 z, t = tcg_temp_ebb_new_i32();
781
782 if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
783 tcg_gen_subi_i32(t, arg1, 1);
784 tcg_gen_andc_i32(t, t, arg1);
785 tcg_gen_ctpop_i32(t, t);
786 } else {
787 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
788 tcg_gen_neg_i32(t, arg1);
789 tcg_gen_and_i32(t, t, arg1);
790 tcg_gen_clzi_i32(t, t, 32);
791 tcg_gen_xori_i32(t, t, 31);
792 }
793 z = tcg_constant_i32(0);
794 tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
795 tcg_temp_free_i32(t);
796 } else {
797 gen_helper_ctz_i32(ret, arg1, arg2);
798 }
799 }
800
801 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
802 {
803 if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
804 /* This equivalence has the advantage of not requiring a fixup. */
805 TCGv_i32 t = tcg_temp_ebb_new_i32();
806 tcg_gen_subi_i32(t, arg1, 1);
807 tcg_gen_andc_i32(t, t, arg1);
808 tcg_gen_ctpop_i32(ret, t);
809 tcg_temp_free_i32(t);
810 } else {
811 tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
812 }
813 }
814
815 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
816 {
817 if (TCG_TARGET_HAS_clz_i32) {
818 TCGv_i32 t = tcg_temp_ebb_new_i32();
819 tcg_gen_sari_i32(t, arg, 31);
820 tcg_gen_xor_i32(t, t, arg);
821 tcg_gen_clzi_i32(t, t, 32);
822 tcg_gen_subi_i32(ret, t, 1);
823 tcg_temp_free_i32(t);
824 } else {
825 gen_helper_clrsb_i32(ret, arg);
826 }
827 }
828
829 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
830 {
831 if (TCG_TARGET_HAS_ctpop_i32) {
832 tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
833 } else if (TCG_TARGET_HAS_ctpop_i64) {
834 TCGv_i64 t = tcg_temp_ebb_new_i64();
835 tcg_gen_extu_i32_i64(t, arg1);
836 tcg_gen_ctpop_i64(t, t);
837 tcg_gen_extrl_i64_i32(ret, t);
838 tcg_temp_free_i64(t);
839 } else {
840 gen_helper_ctpop_i32(ret, arg1);
841 }
842 }
843
844 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
845 {
846 if (TCG_TARGET_HAS_rot_i32) {
847 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
848 } else {
849 TCGv_i32 t0, t1;
850
851 t0 = tcg_temp_ebb_new_i32();
852 t1 = tcg_temp_ebb_new_i32();
853 tcg_gen_shl_i32(t0, arg1, arg2);
854 tcg_gen_subfi_i32(t1, 32, arg2);
855 tcg_gen_shr_i32(t1, arg1, t1);
856 tcg_gen_or_i32(ret, t0, t1);
857 tcg_temp_free_i32(t0);
858 tcg_temp_free_i32(t1);
859 }
860 }
861
862 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
863 {
864 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
865 /* some cases can be optimized here */
866 if (arg2 == 0) {
867 tcg_gen_mov_i32(ret, arg1);
868 } else if (TCG_TARGET_HAS_rot_i32) {
869 tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
870 } else {
871 TCGv_i32 t0, t1;
872 t0 = tcg_temp_ebb_new_i32();
873 t1 = tcg_temp_ebb_new_i32();
874 tcg_gen_shli_i32(t0, arg1, arg2);
875 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
876 tcg_gen_or_i32(ret, t0, t1);
877 tcg_temp_free_i32(t0);
878 tcg_temp_free_i32(t1);
879 }
880 }
881
882 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
883 {
884 if (TCG_TARGET_HAS_rot_i32) {
885 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
886 } else {
887 TCGv_i32 t0, t1;
888
889 t0 = tcg_temp_ebb_new_i32();
890 t1 = tcg_temp_ebb_new_i32();
891 tcg_gen_shr_i32(t0, arg1, arg2);
892 tcg_gen_subfi_i32(t1, 32, arg2);
893 tcg_gen_shl_i32(t1, arg1, t1);
894 tcg_gen_or_i32(ret, t0, t1);
895 tcg_temp_free_i32(t0);
896 tcg_temp_free_i32(t1);
897 }
898 }
899
900 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
901 {
902 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
903 /* some cases can be optimized here */
904 if (arg2 == 0) {
905 tcg_gen_mov_i32(ret, arg1);
906 } else {
907 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
908 }
909 }
910
911 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
912 unsigned int ofs, unsigned int len)
913 {
914 uint32_t mask;
915 TCGv_i32 t1;
916
917 tcg_debug_assert(ofs < 32);
918 tcg_debug_assert(len > 0);
919 tcg_debug_assert(len <= 32);
920 tcg_debug_assert(ofs + len <= 32);
921
922 if (len == 32) {
923 tcg_gen_mov_i32(ret, arg2);
924 return;
925 }
926 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
927 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
928 return;
929 }
930
931 t1 = tcg_temp_ebb_new_i32();
932
933 if (TCG_TARGET_HAS_extract2_i32) {
934 if (ofs + len == 32) {
935 tcg_gen_shli_i32(t1, arg1, len);
936 tcg_gen_extract2_i32(ret, t1, arg2, len);
937 goto done;
938 }
939 if (ofs == 0) {
940 tcg_gen_extract2_i32(ret, arg1, arg2, len);
941 tcg_gen_rotli_i32(ret, ret, len);
942 goto done;
943 }
944 }
945
946 mask = (1u << len) - 1;
947 if (ofs + len < 32) {
948 tcg_gen_andi_i32(t1, arg2, mask);
949 tcg_gen_shli_i32(t1, t1, ofs);
950 } else {
951 tcg_gen_shli_i32(t1, arg2, ofs);
952 }
953 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
954 tcg_gen_or_i32(ret, ret, t1);
955 done:
956 tcg_temp_free_i32(t1);
957 }
958
959 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
960 unsigned int ofs, unsigned int len)
961 {
962 tcg_debug_assert(ofs < 32);
963 tcg_debug_assert(len > 0);
964 tcg_debug_assert(len <= 32);
965 tcg_debug_assert(ofs + len <= 32);
966
967 if (ofs + len == 32) {
968 tcg_gen_shli_i32(ret, arg, ofs);
969 } else if (ofs == 0) {
970 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
971 } else if (TCG_TARGET_HAS_deposit_i32
972 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
973 TCGv_i32 zero = tcg_constant_i32(0);
974 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
975 } else {
976 /* To help two-operand hosts we prefer to zero-extend first,
977 which allows ARG to stay live. */
978 switch (len) {
979 case 16:
980 if (TCG_TARGET_HAS_ext16u_i32) {
981 tcg_gen_ext16u_i32(ret, arg);
982 tcg_gen_shli_i32(ret, ret, ofs);
983 return;
984 }
985 break;
986 case 8:
987 if (TCG_TARGET_HAS_ext8u_i32) {
988 tcg_gen_ext8u_i32(ret, arg);
989 tcg_gen_shli_i32(ret, ret, ofs);
990 return;
991 }
992 break;
993 }
994 /* Otherwise prefer zero-extension over AND for code size. */
995 switch (ofs + len) {
996 case 16:
997 if (TCG_TARGET_HAS_ext16u_i32) {
998 tcg_gen_shli_i32(ret, arg, ofs);
999 tcg_gen_ext16u_i32(ret, ret);
1000 return;
1001 }
1002 break;
1003 case 8:
1004 if (TCG_TARGET_HAS_ext8u_i32) {
1005 tcg_gen_shli_i32(ret, arg, ofs);
1006 tcg_gen_ext8u_i32(ret, ret);
1007 return;
1008 }
1009 break;
1010 }
1011 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
1012 tcg_gen_shli_i32(ret, ret, ofs);
1013 }
1014 }
1015
1016 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
1017 unsigned int ofs, unsigned int len)
1018 {
1019 tcg_debug_assert(ofs < 32);
1020 tcg_debug_assert(len > 0);
1021 tcg_debug_assert(len <= 32);
1022 tcg_debug_assert(ofs + len <= 32);
1023
1024 /* Canonicalize certain special cases, even if extract is supported. */
1025 if (ofs + len == 32) {
1026 tcg_gen_shri_i32(ret, arg, 32 - len);
1027 return;
1028 }
1029 if (ofs == 0) {
1030 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
1031 return;
1032 }
1033
1034 if (TCG_TARGET_HAS_extract_i32
1035 && TCG_TARGET_extract_i32_valid(ofs, len)) {
1036 tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
1037 return;
1038 }
1039
1040 /* Assume that zero-extension, if available, is cheaper than a shift. */
1041 switch (ofs + len) {
1042 case 16:
1043 if (TCG_TARGET_HAS_ext16u_i32) {
1044 tcg_gen_ext16u_i32(ret, arg);
1045 tcg_gen_shri_i32(ret, ret, ofs);
1046 return;
1047 }
1048 break;
1049 case 8:
1050 if (TCG_TARGET_HAS_ext8u_i32) {
1051 tcg_gen_ext8u_i32(ret, arg);
1052 tcg_gen_shri_i32(ret, ret, ofs);
1053 return;
1054 }
1055 break;
1056 }
1057
1058 /* ??? Ideally we'd know what values are available for immediate AND.
1059 Assume that 8 bits are available, plus the special case of 16,
1060 so that we get ext8u, ext16u. */
1061 switch (len) {
1062 case 1 ... 8: case 16:
1063 tcg_gen_shri_i32(ret, arg, ofs);
1064 tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
1065 break;
1066 default:
1067 tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
1068 tcg_gen_shri_i32(ret, ret, 32 - len);
1069 break;
1070 }
1071 }
1072
1073 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
1074 unsigned int ofs, unsigned int len)
1075 {
1076 tcg_debug_assert(ofs < 32);
1077 tcg_debug_assert(len > 0);
1078 tcg_debug_assert(len <= 32);
1079 tcg_debug_assert(ofs + len <= 32);
1080
1081 /* Canonicalize certain special cases, even if extract is supported. */
1082 if (ofs + len == 32) {
1083 tcg_gen_sari_i32(ret, arg, 32 - len);
1084 return;
1085 }
1086 if (ofs == 0) {
1087 switch (len) {
1088 case 16:
1089 tcg_gen_ext16s_i32(ret, arg);
1090 return;
1091 case 8:
1092 tcg_gen_ext8s_i32(ret, arg);
1093 return;
1094 }
1095 }
1096
1097 if (TCG_TARGET_HAS_sextract_i32
1098 && TCG_TARGET_extract_i32_valid(ofs, len)) {
1099 tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
1100 return;
1101 }
1102
1103 /* Assume that sign-extension, if available, is cheaper than a shift. */
1104 switch (ofs + len) {
1105 case 16:
1106 if (TCG_TARGET_HAS_ext16s_i32) {
1107 tcg_gen_ext16s_i32(ret, arg);
1108 tcg_gen_sari_i32(ret, ret, ofs);
1109 return;
1110 }
1111 break;
1112 case 8:
1113 if (TCG_TARGET_HAS_ext8s_i32) {
1114 tcg_gen_ext8s_i32(ret, arg);
1115 tcg_gen_sari_i32(ret, ret, ofs);
1116 return;
1117 }
1118 break;
1119 }
1120 switch (len) {
1121 case 16:
1122 if (TCG_TARGET_HAS_ext16s_i32) {
1123 tcg_gen_shri_i32(ret, arg, ofs);
1124 tcg_gen_ext16s_i32(ret, ret);
1125 return;
1126 }
1127 break;
1128 case 8:
1129 if (TCG_TARGET_HAS_ext8s_i32) {
1130 tcg_gen_shri_i32(ret, arg, ofs);
1131 tcg_gen_ext8s_i32(ret, ret);
1132 return;
1133 }
1134 break;
1135 }
1136
1137 tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
1138 tcg_gen_sari_i32(ret, ret, 32 - len);
1139 }
1140
1141 /*
1142 * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
1143 * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
1144 */
1145 void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
1146 unsigned int ofs)
1147 {
1148 tcg_debug_assert(ofs <= 32);
1149 if (ofs == 0) {
1150 tcg_gen_mov_i32(ret, al);
1151 } else if (ofs == 32) {
1152 tcg_gen_mov_i32(ret, ah);
1153 } else if (al == ah) {
1154 tcg_gen_rotri_i32(ret, al, ofs);
1155 } else if (TCG_TARGET_HAS_extract2_i32) {
1156 tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
1157 } else {
1158 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1159 tcg_gen_shri_i32(t0, al, ofs);
1160 tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
1161 tcg_temp_free_i32(t0);
1162 }
1163 }
1164
1165 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
1166 TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
1167 {
1168 if (cond == TCG_COND_ALWAYS) {
1169 tcg_gen_mov_i32(ret, v1);
1170 } else if (cond == TCG_COND_NEVER) {
1171 tcg_gen_mov_i32(ret, v2);
1172 } else if (TCG_TARGET_HAS_movcond_i32) {
1173 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
1174 } else {
1175 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1176 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1177 tcg_gen_negsetcond_i32(cond, t0, c1, c2);
1178 tcg_gen_and_i32(t1, v1, t0);
1179 tcg_gen_andc_i32(ret, v2, t0);
1180 tcg_gen_or_i32(ret, ret, t1);
1181 tcg_temp_free_i32(t0);
1182 tcg_temp_free_i32(t1);
1183 }
1184 }
1185
1186 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
1187 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
1188 {
1189 if (TCG_TARGET_HAS_add2_i32) {
1190 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
1191 } else {
1192 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1193 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1194 tcg_gen_concat_i32_i64(t0, al, ah);
1195 tcg_gen_concat_i32_i64(t1, bl, bh);
1196 tcg_gen_add_i64(t0, t0, t1);
1197 tcg_gen_extr_i64_i32(rl, rh, t0);
1198 tcg_temp_free_i64(t0);
1199 tcg_temp_free_i64(t1);
1200 }
1201 }
1202
1203 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
1204 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
1205 {
1206 if (TCG_TARGET_HAS_sub2_i32) {
1207 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
1208 } else {
1209 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1210 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1211 tcg_gen_concat_i32_i64(t0, al, ah);
1212 tcg_gen_concat_i32_i64(t1, bl, bh);
1213 tcg_gen_sub_i64(t0, t0, t1);
1214 tcg_gen_extr_i64_i32(rl, rh, t0);
1215 tcg_temp_free_i64(t0);
1216 tcg_temp_free_i64(t1);
1217 }
1218 }
1219
1220 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1221 {
1222 if (TCG_TARGET_HAS_mulu2_i32) {
1223 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
1224 } else if (TCG_TARGET_HAS_muluh_i32) {
1225 TCGv_i32 t = tcg_temp_ebb_new_i32();
1226 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
1227 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
1228 tcg_gen_mov_i32(rl, t);
1229 tcg_temp_free_i32(t);
1230 } else if (TCG_TARGET_REG_BITS == 64) {
1231 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1232 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1233 tcg_gen_extu_i32_i64(t0, arg1);
1234 tcg_gen_extu_i32_i64(t1, arg2);
1235 tcg_gen_mul_i64(t0, t0, t1);
1236 tcg_gen_extr_i64_i32(rl, rh, t0);
1237 tcg_temp_free_i64(t0);
1238 tcg_temp_free_i64(t1);
1239 } else {
1240 qemu_build_not_reached();
1241 }
1242 }
1243
1244 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1245 {
1246 if (TCG_TARGET_HAS_muls2_i32) {
1247 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
1248 } else if (TCG_TARGET_HAS_mulsh_i32) {
1249 TCGv_i32 t = tcg_temp_ebb_new_i32();
1250 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
1251 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
1252 tcg_gen_mov_i32(rl, t);
1253 tcg_temp_free_i32(t);
1254 } else if (TCG_TARGET_REG_BITS == 32) {
1255 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1256 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1257 TCGv_i32 t2 = tcg_temp_ebb_new_i32();
1258 TCGv_i32 t3 = tcg_temp_ebb_new_i32();
1259 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
1260 /* Adjust for negative inputs. */
1261 tcg_gen_sari_i32(t2, arg1, 31);
1262 tcg_gen_sari_i32(t3, arg2, 31);
1263 tcg_gen_and_i32(t2, t2, arg2);
1264 tcg_gen_and_i32(t3, t3, arg1);
1265 tcg_gen_sub_i32(rh, t1, t2);
1266 tcg_gen_sub_i32(rh, rh, t3);
1267 tcg_gen_mov_i32(rl, t0);
1268 tcg_temp_free_i32(t0);
1269 tcg_temp_free_i32(t1);
1270 tcg_temp_free_i32(t2);
1271 tcg_temp_free_i32(t3);
1272 } else {
1273 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1274 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1275 tcg_gen_ext_i32_i64(t0, arg1);
1276 tcg_gen_ext_i32_i64(t1, arg2);
1277 tcg_gen_mul_i64(t0, t0, t1);
1278 tcg_gen_extr_i64_i32(rl, rh, t0);
1279 tcg_temp_free_i64(t0);
1280 tcg_temp_free_i64(t1);
1281 }
1282 }
1283
1284 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1285 {
1286 if (TCG_TARGET_REG_BITS == 32) {
1287 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1288 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1289 TCGv_i32 t2 = tcg_temp_ebb_new_i32();
1290 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
1291 /* Adjust for negative input for the signed arg1. */
1292 tcg_gen_sari_i32(t2, arg1, 31);
1293 tcg_gen_and_i32(t2, t2, arg2);
1294 tcg_gen_sub_i32(rh, t1, t2);
1295 tcg_gen_mov_i32(rl, t0);
1296 tcg_temp_free_i32(t0);
1297 tcg_temp_free_i32(t1);
1298 tcg_temp_free_i32(t2);
1299 } else {
1300 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1301 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1302 tcg_gen_ext_i32_i64(t0, arg1);
1303 tcg_gen_extu_i32_i64(t1, arg2);
1304 tcg_gen_mul_i64(t0, t0, t1);
1305 tcg_gen_extr_i64_i32(rl, rh, t0);
1306 tcg_temp_free_i64(t0);
1307 tcg_temp_free_i64(t1);
1308 }
1309 }
1310
1311 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1312 {
1313 if (TCG_TARGET_HAS_ext8s_i32) {
1314 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
1315 } else {
1316 tcg_gen_shli_i32(ret, arg, 24);
1317 tcg_gen_sari_i32(ret, ret, 24);
1318 }
1319 }
1320
1321 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1322 {
1323 if (TCG_TARGET_HAS_ext16s_i32) {
1324 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1325 } else {
1326 tcg_gen_shli_i32(ret, arg, 16);
1327 tcg_gen_sari_i32(ret, ret, 16);
1328 }
1329 }
1330
1331 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1332 {
1333 if (TCG_TARGET_HAS_ext8u_i32) {
1334 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1335 } else {
1336 tcg_gen_andi_i32(ret, arg, 0xffu);
1337 }
1338 }
1339
1340 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1341 {
1342 if (TCG_TARGET_HAS_ext16u_i32) {
1343 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1344 } else {
1345 tcg_gen_andi_i32(ret, arg, 0xffffu);
1346 }
1347 }
1348
1349 /*
1350 * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1351 *
1352 * Byte pattern: xxab -> yyba
1353 *
1354 * With TCG_BSWAP_IZ, x == zero, else undefined.
1355 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1356 */
1357 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
1358 {
1359 /* Only one extension flag may be present. */
1360 tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1361
1362 if (TCG_TARGET_HAS_bswap16_i32) {
1363 tcg_gen_op3i_i32(INDEX_op_bswap16_i32, ret, arg, flags);
1364 } else {
1365 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1366 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1367
1368 /* arg = ..ab (IZ) xxab (!IZ) */
1369 tcg_gen_shri_i32(t0, arg, 8); /* t0 = ...a (IZ) .xxa (!IZ) */
1370 if (!(flags & TCG_BSWAP_IZ)) {
1371 tcg_gen_ext8u_i32(t0, t0); /* t0 = ...a */
1372 }
1373
1374 if (flags & TCG_BSWAP_OS) {
1375 tcg_gen_shli_i32(t1, arg, 24); /* t1 = b... */
1376 tcg_gen_sari_i32(t1, t1, 16); /* t1 = ssb. */
1377 } else if (flags & TCG_BSWAP_OZ) {
1378 tcg_gen_ext8u_i32(t1, arg); /* t1 = ...b */
1379 tcg_gen_shli_i32(t1, t1, 8); /* t1 = ..b. */
1380 } else {
1381 tcg_gen_shli_i32(t1, arg, 8); /* t1 = xab. */
1382 }
1383
1384 tcg_gen_or_i32(ret, t0, t1); /* ret = ..ba (OZ) */
1385 /* = ssba (OS) */
1386 /* = xaba (no flag) */
1387 tcg_temp_free_i32(t0);
1388 tcg_temp_free_i32(t1);
1389 }
1390 }
1391
1392 /*
1393 * bswap32_i32: 32-bit byte swap on a 32-bit value.
1394 *
1395 * Byte pattern: abcd -> dcba
1396 */
1397 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1398 {
1399 if (TCG_TARGET_HAS_bswap32_i32) {
1400 tcg_gen_op3i_i32(INDEX_op_bswap32_i32, ret, arg, 0);
1401 } else {
1402 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1403 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1404 TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
1405
1406 /* arg = abcd */
1407 tcg_gen_shri_i32(t0, arg, 8); /* t0 = .abc */
1408 tcg_gen_and_i32(t1, arg, t2); /* t1 = .b.d */
1409 tcg_gen_and_i32(t0, t0, t2); /* t0 = .a.c */
1410 tcg_gen_shli_i32(t1, t1, 8); /* t1 = b.d. */
1411 tcg_gen_or_i32(ret, t0, t1); /* ret = badc */
1412
1413 tcg_gen_shri_i32(t0, ret, 16); /* t0 = ..ba */
1414 tcg_gen_shli_i32(t1, ret, 16); /* t1 = dc.. */
1415 tcg_gen_or_i32(ret, t0, t1); /* ret = dcba */
1416
1417 tcg_temp_free_i32(t0);
1418 tcg_temp_free_i32(t1);
1419 }
1420 }
1421
1422 /*
1423 * hswap_i32: Swap 16-bit halfwords within a 32-bit value.
1424 *
1425 * Byte pattern: abcd -> cdab
1426 */
1427 void tcg_gen_hswap_i32(TCGv_i32 ret, TCGv_i32 arg)
1428 {
1429 /* Swapping 2 16-bit elements is a rotate. */
1430 tcg_gen_rotli_i32(ret, arg, 16);
1431 }
1432
1433 void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1434 {
1435 tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1436 }
1437
1438 void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1439 {
1440 tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1441 }
1442
1443 void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1444 {
1445 tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1446 }
1447
1448 void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1449 {
1450 tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1451 }
1452
1453 void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1454 {
1455 TCGv_i32 t = tcg_temp_ebb_new_i32();
1456
1457 tcg_gen_sari_i32(t, a, 31);
1458 tcg_gen_xor_i32(ret, a, t);
1459 tcg_gen_sub_i32(ret, ret, t);
1460 tcg_temp_free_i32(t);
1461 }
1462
1463 void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1464 {
1465 tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset);
1466 }
1467
1468 void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1469 {
1470 tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset);
1471 }
1472
1473 void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1474 {
1475 tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset);
1476 }
1477
1478 void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1479 {
1480 tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset);
1481 }
1482
1483 void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1484 {
1485 tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset);
1486 }
1487
1488 void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1489 {
1490 tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset);
1491 }
1492
1493 void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1494 {
1495 tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset);
1496 }
1497
1498 void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1499 {
1500 tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset);
1501 }
1502
1503
1504 /* 64-bit ops */
1505
1506 void tcg_gen_discard_i64(TCGv_i64 arg)
1507 {
1508 if (TCG_TARGET_REG_BITS == 64) {
1509 tcg_gen_op1_i64(INDEX_op_discard, arg);
1510 } else {
1511 tcg_gen_discard_i32(TCGV_LOW(arg));
1512 tcg_gen_discard_i32(TCGV_HIGH(arg));
1513 }
1514 }
1515
1516 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1517 {
1518 if (ret == arg) {
1519 return;
1520 }
1521 if (TCG_TARGET_REG_BITS == 64) {
1522 tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg);
1523 } else {
1524 TCGTemp *ts = tcgv_i64_temp(arg);
1525
1526 /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1527 if (ts->kind == TEMP_CONST) {
1528 tcg_gen_movi_i64(ret, ts->val);
1529 } else {
1530 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1531 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1532 }
1533 }
1534 }
1535
1536 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1537 {
1538 if (TCG_TARGET_REG_BITS == 64) {
1539 tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1540 } else {
1541 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1542 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1543 }
1544 }
1545
1546 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1547 {
1548 if (TCG_TARGET_REG_BITS == 64) {
1549 tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset);
1550 } else {
1551 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1552 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1553 }
1554 }
1555
1556 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1557 {
1558 if (TCG_TARGET_REG_BITS == 64) {
1559 tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset);
1560 } else {
1561 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1562 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1563 }
1564 }
1565
1566 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1567 {
1568 if (TCG_TARGET_REG_BITS == 64) {
1569 tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset);
1570 } else {
1571 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1572 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1573 }
1574 }
1575
1576 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1577 {
1578 if (TCG_TARGET_REG_BITS == 64) {
1579 tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset);
1580 } else {
1581 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1582 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1583 }
1584 }
1585
1586 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1587 {
1588 if (TCG_TARGET_REG_BITS == 64) {
1589 tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset);
1590 } else {
1591 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1592 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1593 }
1594 }
1595
1596 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1597 {
1598 if (TCG_TARGET_REG_BITS == 64) {
1599 tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset);
1600 } else {
1601 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1602 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1603 }
1604 }
1605
1606 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1607 {
1608 /*
1609 * For 32-bit host, since arg2 and ret have different types,
1610 * they cannot be the same temporary -- no chance of overlap.
1611 */
1612 if (TCG_TARGET_REG_BITS == 64) {
1613 tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset);
1614 } else if (HOST_BIG_ENDIAN) {
1615 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1616 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1617 } else {
1618 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1619 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1620 }
1621 }
1622
1623 void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1624 {
1625 if (TCG_TARGET_REG_BITS == 64) {
1626 tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset);
1627 } else {
1628 tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
1629 }
1630 }
1631
1632 void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1633 {
1634 if (TCG_TARGET_REG_BITS == 64) {
1635 tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset);
1636 } else {
1637 tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
1638 }
1639 }
1640
1641 void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1642 {
1643 if (TCG_TARGET_REG_BITS == 64) {
1644 tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset);
1645 } else {
1646 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1647 }
1648 }
1649
1650 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1651 {
1652 if (TCG_TARGET_REG_BITS == 64) {
1653 tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset);
1654 } else if (HOST_BIG_ENDIAN) {
1655 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1656 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1657 } else {
1658 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1659 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1660 }
1661 }
1662
1663 void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1664 {
1665 if (TCG_TARGET_REG_BITS == 64) {
1666 tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2);
1667 } else {
1668 tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1669 TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1670 }
1671 }
1672
1673 void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1674 {
1675 if (TCG_TARGET_REG_BITS == 64) {
1676 tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2);
1677 } else {
1678 tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1679 TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1680 }
1681 }
1682
1683 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1684 {
1685 if (TCG_TARGET_REG_BITS == 64) {
1686 tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2);
1687 } else {
1688 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1689 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1690 }
1691 }
1692
1693 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1694 {
1695 if (TCG_TARGET_REG_BITS == 64) {
1696 tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2);
1697 } else {
1698 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1699 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1700 }
1701 }
1702
1703 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1704 {
1705 if (TCG_TARGET_REG_BITS == 64) {
1706 tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2);
1707 } else {
1708 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1709 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1710 }
1711 }
1712
1713 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1714 {
1715 if (TCG_TARGET_REG_BITS == 64) {
1716 tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2);
1717 } else {
1718 gen_helper_shl_i64(ret, arg1, arg2);
1719 }
1720 }
1721
1722 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1723 {
1724 if (TCG_TARGET_REG_BITS == 64) {
1725 tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2);
1726 } else {
1727 gen_helper_shr_i64(ret, arg1, arg2);
1728 }
1729 }
1730
1731 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1732 {
1733 if (TCG_TARGET_REG_BITS == 64) {
1734 tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2);
1735 } else {
1736 gen_helper_sar_i64(ret, arg1, arg2);
1737 }
1738 }
1739
1740 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1741 {
1742 TCGv_i64 t0;
1743 TCGv_i32 t1;
1744
1745 if (TCG_TARGET_REG_BITS == 64) {
1746 tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2);
1747 return;
1748 }
1749
1750
1751 t0 = tcg_temp_ebb_new_i64();
1752 t1 = tcg_temp_ebb_new_i32();
1753
1754 tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1755 TCGV_LOW(arg1), TCGV_LOW(arg2));
1756
1757 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1758 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1759 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1760 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1761
1762 tcg_gen_mov_i64(ret, t0);
1763 tcg_temp_free_i64(t0);
1764 tcg_temp_free_i32(t1);
1765 }
1766
1767 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1768 {
1769 /* some cases can be optimized here */
1770 if (arg2 == 0) {
1771 tcg_gen_mov_i64(ret, arg1);
1772 } else if (TCG_TARGET_REG_BITS == 64) {
1773 tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
1774 } else {
1775 tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1776 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1777 tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1778 }
1779 }
1780
1781 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1782 {
1783 if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
1784 /* Don't recurse with tcg_gen_neg_i64. */
1785 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
1786 } else if (TCG_TARGET_REG_BITS == 64) {
1787 tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
1788 } else {
1789 tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1790 tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1791 TCGV_LOW(arg2), TCGV_HIGH(arg2));
1792 }
1793 }
1794
1795 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1796 {
1797 /* some cases can be optimized here */
1798 if (arg2 == 0) {
1799 tcg_gen_mov_i64(ret, arg1);
1800 } else if (TCG_TARGET_REG_BITS == 64) {
1801 tcg_gen_sub_i64(ret, arg1, tcg_constant_i64(arg2));
1802 } else {
1803 tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1804 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1805 tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1806 }
1807 }
1808
1809 void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
1810 {
1811 if (TCG_TARGET_HAS_neg_i64) {
1812 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
1813 } else {
1814 tcg_gen_subfi_i64(ret, 0, arg);
1815 }
1816 }
1817
1818 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1819 {
1820 if (TCG_TARGET_REG_BITS == 32) {
1821 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1822 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1823 return;
1824 }
1825
1826 /* Some cases can be optimized here. */
1827 switch (arg2) {
1828 case 0:
1829 tcg_gen_movi_i64(ret, 0);
1830 return;
1831 case -1:
1832 tcg_gen_mov_i64(ret, arg1);
1833 return;
1834 case 0xff:
1835 /* Don't recurse with tcg_gen_ext8u_i64. */
1836 if (TCG_TARGET_HAS_ext8u_i64) {
1837 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1838 return;
1839 }
1840 break;
1841 case 0xffff:
1842 if (TCG_TARGET_HAS_ext16u_i64) {
1843 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1844 return;
1845 }
1846 break;
1847 case 0xffffffffu:
1848 if (TCG_TARGET_HAS_ext32u_i64) {
1849 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1850 return;
1851 }
1852 break;
1853 }
1854
1855 tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
1856 }
1857
1858 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1859 {
1860 if (TCG_TARGET_REG_BITS == 32) {
1861 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1862 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1863 return;
1864 }
1865 /* Some cases can be optimized here. */
1866 if (arg2 == -1) {
1867 tcg_gen_movi_i64(ret, -1);
1868 } else if (arg2 == 0) {
1869 tcg_gen_mov_i64(ret, arg1);
1870 } else {
1871 tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
1872 }
1873 }
1874
1875 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1876 {
1877 if (TCG_TARGET_REG_BITS == 32) {
1878 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1879 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1880 return;
1881 }
1882 /* Some cases can be optimized here. */
1883 if (arg2 == 0) {
1884 tcg_gen_mov_i64(ret, arg1);
1885 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1886 /* Don't recurse with tcg_gen_not_i64. */
1887 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1888 } else {
1889 tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
1890 }
1891 }
1892
1893 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1894 unsigned c, bool right, bool arith)
1895 {
1896 tcg_debug_assert(c < 64);
1897 if (c == 0) {
1898 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1899 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1900 } else if (c >= 32) {
1901 c -= 32;
1902 if (right) {
1903 if (arith) {
1904 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1905 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1906 } else {
1907 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1908 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1909 }
1910 } else {
1911 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1912 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1913 }
1914 } else if (right) {
1915 if (TCG_TARGET_HAS_extract2_i32) {
1916 tcg_gen_extract2_i32(TCGV_LOW(ret),
1917 TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1918 } else {
1919 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1920 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1921 TCGV_HIGH(arg1), 32 - c, c);
1922 }
1923 if (arith) {
1924 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1925 } else {
1926 tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1927 }
1928 } else {
1929 if (TCG_TARGET_HAS_extract2_i32) {
1930 tcg_gen_extract2_i32(TCGV_HIGH(ret),
1931 TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1932 } else {
1933 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1934 tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1935 tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1936 TCGV_HIGH(arg1), c, 32 - c);
1937 tcg_temp_free_i32(t0);
1938 }
1939 tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1940 }
1941 }
1942
1943 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1944 {
1945 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1946 if (TCG_TARGET_REG_BITS == 32) {
1947 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1948 } else if (arg2 == 0) {
1949 tcg_gen_mov_i64(ret, arg1);
1950 } else {
1951 tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
1952 }
1953 }
1954
1955 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1956 {
1957 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1958 if (TCG_TARGET_REG_BITS == 32) {
1959 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1960 } else if (arg2 == 0) {
1961 tcg_gen_mov_i64(ret, arg1);
1962 } else {
1963 tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
1964 }
1965 }
1966
1967 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1968 {
1969 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1970 if (TCG_TARGET_REG_BITS == 32) {
1971 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1972 } else if (arg2 == 0) {
1973 tcg_gen_mov_i64(ret, arg1);
1974 } else {
1975 tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
1976 }
1977 }
1978
1979 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1980 {
1981 if (cond == TCG_COND_ALWAYS) {
1982 tcg_gen_br(l);
1983 } else if (cond != TCG_COND_NEVER) {
1984 if (TCG_TARGET_REG_BITS == 32) {
1985 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1986 TCGV_HIGH(arg1), TCGV_LOW(arg2),
1987 TCGV_HIGH(arg2), cond, label_arg(l));
1988 } else {
1989 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1990 label_arg(l));
1991 }
1992 add_last_as_label_use(l);
1993 }
1994 }
1995
1996 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1997 {
1998 if (TCG_TARGET_REG_BITS == 64) {
1999 tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
2000 } else if (cond == TCG_COND_ALWAYS) {
2001 tcg_gen_br(l);
2002 } else if (cond != TCG_COND_NEVER) {
2003 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
2004 TCGV_LOW(arg1), TCGV_HIGH(arg1),
2005 tcg_constant_i32(arg2),
2006 tcg_constant_i32(arg2 >> 32),
2007 cond, label_arg(l));
2008 add_last_as_label_use(l);
2009 }
2010 }
2011
2012 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
2013 TCGv_i64 arg1, TCGv_i64 arg2)
2014 {
2015 if (cond == TCG_COND_ALWAYS) {
2016 tcg_gen_movi_i64(ret, 1);
2017 } else if (cond == TCG_COND_NEVER) {
2018 tcg_gen_movi_i64(ret, 0);
2019 } else {
2020 if (TCG_TARGET_REG_BITS == 32) {
2021 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
2022 TCGV_LOW(arg1), TCGV_HIGH(arg1),
2023 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
2024 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2025 } else {
2026 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
2027 }
2028 }
2029 }
2030
2031 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
2032 TCGv_i64 arg1, int64_t arg2)
2033 {
2034 if (TCG_TARGET_REG_BITS == 64) {
2035 tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
2036 } else if (cond == TCG_COND_ALWAYS) {
2037 tcg_gen_movi_i64(ret, 1);
2038 } else if (cond == TCG_COND_NEVER) {
2039 tcg_gen_movi_i64(ret, 0);
2040 } else {
2041 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
2042 TCGV_LOW(arg1), TCGV_HIGH(arg1),
2043 tcg_constant_i32(arg2),
2044 tcg_constant_i32(arg2 >> 32), cond);
2045 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2046 }
2047 }
2048
2049 void tcg_gen_negsetcondi_i64(TCGCond cond, TCGv_i64 ret,
2050 TCGv_i64 arg1, int64_t arg2)
2051 {
2052 tcg_gen_negsetcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
2053 }
2054
2055 void tcg_gen_negsetcond_i64(TCGCond cond, TCGv_i64 ret,
2056 TCGv_i64 arg1, TCGv_i64 arg2)
2057 {
2058 if (cond == TCG_COND_ALWAYS) {
2059 tcg_gen_movi_i64(ret, -1);
2060 } else if (cond == TCG_COND_NEVER) {
2061 tcg_gen_movi_i64(ret, 0);
2062 } else if (TCG_TARGET_HAS_negsetcond_i64) {
2063 tcg_gen_op4i_i64(INDEX_op_negsetcond_i64, ret, arg1, arg2, cond);
2064 } else if (TCG_TARGET_REG_BITS == 32) {
2065 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
2066 TCGV_LOW(arg1), TCGV_HIGH(arg1),
2067 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
2068 tcg_gen_neg_i32(TCGV_LOW(ret), TCGV_LOW(ret));
2069 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_LOW(ret));
2070 } else {
2071 tcg_gen_setcond_i64(cond, ret, arg1, arg2);
2072 tcg_gen_neg_i64(ret, ret);
2073 }
2074 }
2075
2076 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2077 {
2078 if (arg2 == 0) {
2079 tcg_gen_movi_i64(ret, 0);
2080 } else if (is_power_of_2(arg2)) {
2081 tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
2082 } else {
2083 tcg_gen_mul_i64(ret, arg1, tcg_constant_i64(arg2));
2084 }
2085 }
2086
2087 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2088 {
2089 if (TCG_TARGET_HAS_div_i64) {
2090 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
2091 } else if (TCG_TARGET_HAS_div2_i64) {
2092 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2093 tcg_gen_sari_i64(t0, arg1, 63);
2094 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
2095 tcg_temp_free_i64(t0);
2096 } else {
2097 gen_helper_div_i64(ret, arg1, arg2);
2098 }
2099 }
2100
2101 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2102 {
2103 if (TCG_TARGET_HAS_rem_i64) {
2104 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
2105 } else if (TCG_TARGET_HAS_div_i64) {
2106 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2107 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
2108 tcg_gen_mul_i64(t0, t0, arg2);
2109 tcg_gen_sub_i64(ret, arg1, t0);
2110 tcg_temp_free_i64(t0);
2111 } else if (TCG_TARGET_HAS_div2_i64) {
2112 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2113 tcg_gen_sari_i64(t0, arg1, 63);
2114 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
2115 tcg_temp_free_i64(t0);
2116 } else {
2117 gen_helper_rem_i64(ret, arg1, arg2);
2118 }
2119 }
2120
2121 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2122 {
2123 if (TCG_TARGET_HAS_div_i64) {
2124 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
2125 } else if (TCG_TARGET_HAS_div2_i64) {
2126 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2127 TCGv_i64 zero = tcg_constant_i64(0);
2128 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, zero, arg2);
2129 tcg_temp_free_i64(t0);
2130 } else {
2131 gen_helper_divu_i64(ret, arg1, arg2);
2132 }
2133 }
2134
2135 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2136 {
2137 if (TCG_TARGET_HAS_rem_i64) {
2138 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
2139 } else if (TCG_TARGET_HAS_div_i64) {
2140 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2141 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
2142 tcg_gen_mul_i64(t0, t0, arg2);
2143 tcg_gen_sub_i64(ret, arg1, t0);
2144 tcg_temp_free_i64(t0);
2145 } else if (TCG_TARGET_HAS_div2_i64) {
2146 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2147 TCGv_i64 zero = tcg_constant_i64(0);
2148 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, zero, arg2);
2149 tcg_temp_free_i64(t0);
2150 } else {
2151 gen_helper_remu_i64(ret, arg1, arg2);
2152 }
2153 }
2154
2155 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
2156 {
2157 if (TCG_TARGET_REG_BITS == 32) {
2158 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2159 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2160 } else if (TCG_TARGET_HAS_ext8s_i64) {
2161 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
2162 } else {
2163 tcg_gen_shli_i64(ret, arg, 56);
2164 tcg_gen_sari_i64(ret, ret, 56);
2165 }
2166 }
2167
2168 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
2169 {
2170 if (TCG_TARGET_REG_BITS == 32) {
2171 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2172 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2173 } else if (TCG_TARGET_HAS_ext16s_i64) {
2174 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
2175 } else {
2176 tcg_gen_shli_i64(ret, arg, 48);
2177 tcg_gen_sari_i64(ret, ret, 48);
2178 }
2179 }
2180
2181 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
2182 {
2183 if (TCG_TARGET_REG_BITS == 32) {
2184 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2185 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2186 } else if (TCG_TARGET_HAS_ext32s_i64) {
2187 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
2188 } else {
2189 tcg_gen_shli_i64(ret, arg, 32);
2190 tcg_gen_sari_i64(ret, ret, 32);
2191 }
2192 }
2193
2194 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
2195 {
2196 if (TCG_TARGET_REG_BITS == 32) {
2197 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2198 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2199 } else if (TCG_TARGET_HAS_ext8u_i64) {
2200 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
2201 } else {
2202 tcg_gen_andi_i64(ret, arg, 0xffu);
2203 }
2204 }
2205
2206 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
2207 {
2208 if (TCG_TARGET_REG_BITS == 32) {
2209 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2210 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2211 } else if (TCG_TARGET_HAS_ext16u_i64) {
2212 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
2213 } else {
2214 tcg_gen_andi_i64(ret, arg, 0xffffu);
2215 }
2216 }
2217
2218 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
2219 {
2220 if (TCG_TARGET_REG_BITS == 32) {
2221 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2222 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2223 } else if (TCG_TARGET_HAS_ext32u_i64) {
2224 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
2225 } else {
2226 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
2227 }
2228 }
2229
2230 /*
2231 * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
2232 *
2233 * Byte pattern: xxxxxxxxab -> yyyyyyyyba
2234 *
2235 * With TCG_BSWAP_IZ, x == zero, else undefined.
2236 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2237 */
2238 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
2239 {
2240 /* Only one extension flag may be present. */
2241 tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
2242
2243 if (TCG_TARGET_REG_BITS == 32) {
2244 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg), flags);
2245 if (flags & TCG_BSWAP_OS) {
2246 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2247 } else {
2248 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2249 }
2250 } else if (TCG_TARGET_HAS_bswap16_i64) {
2251 tcg_gen_op3i_i64(INDEX_op_bswap16_i64, ret, arg, flags);
2252 } else {
2253 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2254 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2255
2256 /* arg = ......ab or xxxxxxab */
2257 tcg_gen_shri_i64(t0, arg, 8); /* t0 = .......a or .xxxxxxa */
2258 if (!(flags & TCG_BSWAP_IZ)) {
2259 tcg_gen_ext8u_i64(t0, t0); /* t0 = .......a */
2260 }
2261
2262 if (flags & TCG_BSWAP_OS) {
2263 tcg_gen_shli_i64(t1, arg, 56); /* t1 = b....... */
2264 tcg_gen_sari_i64(t1, t1, 48); /* t1 = ssssssb. */
2265 } else if (flags & TCG_BSWAP_OZ) {
2266 tcg_gen_ext8u_i64(t1, arg); /* t1 = .......b */
2267 tcg_gen_shli_i64(t1, t1, 8); /* t1 = ......b. */
2268 } else {
2269 tcg_gen_shli_i64(t1, arg, 8); /* t1 = xxxxxab. */
2270 }
2271
2272 tcg_gen_or_i64(ret, t0, t1); /* ret = ......ba (OZ) */
2273 /* ssssssba (OS) */
2274 /* xxxxxaba (no flag) */
2275 tcg_temp_free_i64(t0);
2276 tcg_temp_free_i64(t1);
2277 }
2278 }
2279
2280 /*
2281 * bswap32_i64: 32-bit byte swap on the low bits of a 64-bit value.
2282 *
2283 * Byte pattern: xxxxabcd -> yyyydcba
2284 *
2285 * With TCG_BSWAP_IZ, x == zero, else undefined.
2286 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2287 */
2288 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
2289 {
2290 /* Only one extension flag may be present. */
2291 tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
2292
2293 if (TCG_TARGET_REG_BITS == 32) {
2294 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2295 if (flags & TCG_BSWAP_OS) {
2296 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2297 } else {
2298 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2299 }
2300 } else if (TCG_TARGET_HAS_bswap32_i64) {
2301 tcg_gen_op3i_i64(INDEX_op_bswap32_i64, ret, arg, flags);
2302 } else {
2303 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2304 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2305 TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
2306
2307 /* arg = xxxxabcd */
2308 tcg_gen_shri_i64(t0, arg, 8); /* t0 = .xxxxabc */
2309 tcg_gen_and_i64(t1, arg, t2); /* t1 = .....b.d */
2310 tcg_gen_and_i64(t0, t0, t2); /* t0 = .....a.c */
2311 tcg_gen_shli_i64(t1, t1, 8); /* t1 = ....b.d. */
2312 tcg_gen_or_i64(ret, t0, t1); /* ret = ....badc */
2313
2314 tcg_gen_shli_i64(t1, ret, 48); /* t1 = dc...... */
2315 tcg_gen_shri_i64(t0, ret, 16); /* t0 = ......ba */
2316 if (flags & TCG_BSWAP_OS) {
2317 tcg_gen_sari_i64(t1, t1, 32); /* t1 = ssssdc.. */
2318 } else {
2319 tcg_gen_shri_i64(t1, t1, 32); /* t1 = ....dc.. */
2320 }
2321 tcg_gen_or_i64(ret, t0, t1); /* ret = ssssdcba (OS) */
2322 /* ....dcba (else) */
2323
2324 tcg_temp_free_i64(t0);
2325 tcg_temp_free_i64(t1);
2326 }
2327 }
2328
2329 /*
2330 * bswap64_i64: 64-bit byte swap on a 64-bit value.
2331 *
2332 * Byte pattern: abcdefgh -> hgfedcba
2333 */
2334 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
2335 {
2336 if (TCG_TARGET_REG_BITS == 32) {
2337 TCGv_i32 t0, t1;
2338 t0 = tcg_temp_ebb_new_i32();
2339 t1 = tcg_temp_ebb_new_i32();
2340
2341 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
2342 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
2343 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
2344 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
2345 tcg_temp_free_i32(t0);
2346 tcg_temp_free_i32(t1);
2347 } else if (TCG_TARGET_HAS_bswap64_i64) {
2348 tcg_gen_op3i_i64(INDEX_op_bswap64_i64, ret, arg, 0);
2349 } else {
2350 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2351 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2352 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2353
2354 /* arg = abcdefgh */
2355 tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
2356 tcg_gen_shri_i64(t0, arg, 8); /* t0 = .abcdefg */
2357 tcg_gen_and_i64(t1, arg, t2); /* t1 = .b.d.f.h */
2358 tcg_gen_and_i64(t0, t0, t2); /* t0 = .a.c.e.g */
2359 tcg_gen_shli_i64(t1, t1, 8); /* t1 = b.d.f.h. */
2360 tcg_gen_or_i64(ret, t0, t1); /* ret = badcfehg */
2361
2362 tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
2363 tcg_gen_shri_i64(t0, ret, 16); /* t0 = ..badcfe */
2364 tcg_gen_and_i64(t1, ret, t2); /* t1 = ..dc..hg */
2365 tcg_gen_and_i64(t0, t0, t2); /* t0 = ..ba..fe */
2366 tcg_gen_shli_i64(t1, t1, 16); /* t1 = dc..hg.. */
2367 tcg_gen_or_i64(ret, t0, t1); /* ret = dcbahgfe */
2368
2369 tcg_gen_shri_i64(t0, ret, 32); /* t0 = ....dcba */
2370 tcg_gen_shli_i64(t1, ret, 32); /* t1 = hgfe.... */
2371 tcg_gen_or_i64(ret, t0, t1); /* ret = hgfedcba */
2372
2373 tcg_temp_free_i64(t0);
2374 tcg_temp_free_i64(t1);
2375 tcg_temp_free_i64(t2);
2376 }
2377 }
2378
2379 /*
2380 * hswap_i64: Swap 16-bit halfwords within a 64-bit value.
2381 * See also include/qemu/bitops.h, hswap64.
2382 *
2383 * Byte pattern: abcdefgh -> ghefcdab
2384 */
2385 void tcg_gen_hswap_i64(TCGv_i64 ret, TCGv_i64 arg)
2386 {
2387 uint64_t m = 0x0000ffff0000ffffull;
2388 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2389 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2390
2391 /* arg = abcdefgh */
2392 tcg_gen_rotli_i64(t1, arg, 32); /* t1 = efghabcd */
2393 tcg_gen_andi_i64(t0, t1, m); /* t0 = ..gh..cd */
2394 tcg_gen_shli_i64(t0, t0, 16); /* t0 = gh..cd.. */
2395 tcg_gen_shri_i64(t1, t1, 16); /* t1 = ..efghab */
2396 tcg_gen_andi_i64(t1, t1, m); /* t1 = ..ef..ab */
2397 tcg_gen_or_i64(ret, t0, t1); /* ret = ghefcdab */
2398
2399 tcg_temp_free_i64(t0);
2400 tcg_temp_free_i64(t1);
2401 }
2402
2403 /*
2404 * wswap_i64: Swap 32-bit words within a 64-bit value.
2405 *
2406 * Byte pattern: abcdefgh -> efghabcd
2407 */
2408 void tcg_gen_wswap_i64(TCGv_i64 ret, TCGv_i64 arg)
2409 {
2410 /* Swapping 2 32-bit elements is a rotate. */
2411 tcg_gen_rotli_i64(ret, arg, 32);
2412 }
2413
2414 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
2415 {
2416 if (TCG_TARGET_REG_BITS == 32) {
2417 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2418 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
2419 } else if (TCG_TARGET_HAS_not_i64) {
2420 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
2421 } else {
2422 tcg_gen_xori_i64(ret, arg, -1);
2423 }
2424 }
2425
2426 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2427 {
2428 if (TCG_TARGET_REG_BITS == 32) {
2429 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2430 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2431 } else if (TCG_TARGET_HAS_andc_i64) {
2432 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
2433 } else {
2434 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2435 tcg_gen_not_i64(t0, arg2);
2436 tcg_gen_and_i64(ret, arg1, t0);
2437 tcg_temp_free_i64(t0);
2438 }
2439 }
2440
2441 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2442 {
2443 if (TCG_TARGET_REG_BITS == 32) {
2444 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2445 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2446 } else if (TCG_TARGET_HAS_eqv_i64) {
2447 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
2448 } else {
2449 tcg_gen_xor_i64(ret, arg1, arg2);
2450 tcg_gen_not_i64(ret, ret);
2451 }
2452 }
2453
2454 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2455 {
2456 if (TCG_TARGET_REG_BITS == 32) {
2457 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2458 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2459 } else if (TCG_TARGET_HAS_nand_i64) {
2460 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
2461 } else {
2462 tcg_gen_and_i64(ret, arg1, arg2);
2463 tcg_gen_not_i64(ret, ret);
2464 }
2465 }
2466
2467 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2468 {
2469 if (TCG_TARGET_REG_BITS == 32) {
2470 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2471 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2472 } else if (TCG_TARGET_HAS_nor_i64) {
2473 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
2474 } else {
2475 tcg_gen_or_i64(ret, arg1, arg2);
2476 tcg_gen_not_i64(ret, ret);
2477 }
2478 }
2479
2480 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2481 {
2482 if (TCG_TARGET_REG_BITS == 32) {
2483 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2484 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2485 } else if (TCG_TARGET_HAS_orc_i64) {
2486 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
2487 } else {
2488 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2489 tcg_gen_not_i64(t0, arg2);
2490 tcg_gen_or_i64(ret, arg1, t0);
2491 tcg_temp_free_i64(t0);
2492 }
2493 }
2494
2495 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2496 {
2497 if (TCG_TARGET_HAS_clz_i64) {
2498 tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
2499 } else {
2500 gen_helper_clz_i64(ret, arg1, arg2);
2501 }
2502 }
2503
2504 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2505 {
2506 if (TCG_TARGET_REG_BITS == 32
2507 && TCG_TARGET_HAS_clz_i32
2508 && arg2 <= 0xffffffffu) {
2509 TCGv_i32 t = tcg_temp_ebb_new_i32();
2510 tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
2511 tcg_gen_addi_i32(t, t, 32);
2512 tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
2513 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2514 tcg_temp_free_i32(t);
2515 } else {
2516 tcg_gen_clz_i64(ret, arg1, tcg_constant_i64(arg2));
2517 }
2518 }
2519
2520 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2521 {
2522 if (TCG_TARGET_HAS_ctz_i64) {
2523 tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
2524 } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
2525 TCGv_i64 z, t = tcg_temp_ebb_new_i64();
2526
2527 if (TCG_TARGET_HAS_ctpop_i64) {
2528 tcg_gen_subi_i64(t, arg1, 1);
2529 tcg_gen_andc_i64(t, t, arg1);
2530 tcg_gen_ctpop_i64(t, t);
2531 } else {
2532 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
2533 tcg_gen_neg_i64(t, arg1);
2534 tcg_gen_and_i64(t, t, arg1);
2535 tcg_gen_clzi_i64(t, t, 64);
2536 tcg_gen_xori_i64(t, t, 63);
2537 }
2538 z = tcg_constant_i64(0);
2539 tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
2540 tcg_temp_free_i64(t);
2541 tcg_temp_free_i64(z);
2542 } else {
2543 gen_helper_ctz_i64(ret, arg1, arg2);
2544 }
2545 }
2546
2547 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2548 {
2549 if (TCG_TARGET_REG_BITS == 32
2550 && TCG_TARGET_HAS_ctz_i32
2551 && arg2 <= 0xffffffffu) {
2552 TCGv_i32 t32 = tcg_temp_ebb_new_i32();
2553 tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
2554 tcg_gen_addi_i32(t32, t32, 32);
2555 tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
2556 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2557 tcg_temp_free_i32(t32);
2558 } else if (!TCG_TARGET_HAS_ctz_i64
2559 && TCG_TARGET_HAS_ctpop_i64
2560 && arg2 == 64) {
2561 /* This equivalence has the advantage of not requiring a fixup. */
2562 TCGv_i64 t = tcg_temp_ebb_new_i64();
2563 tcg_gen_subi_i64(t, arg1, 1);
2564 tcg_gen_andc_i64(t, t, arg1);
2565 tcg_gen_ctpop_i64(ret, t);
2566 tcg_temp_free_i64(t);
2567 } else {
2568 tcg_gen_ctz_i64(ret, arg1, tcg_constant_i64(arg2));
2569 }
2570 }
2571
2572 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
2573 {
2574 if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
2575 TCGv_i64 t = tcg_temp_ebb_new_i64();
2576 tcg_gen_sari_i64(t, arg, 63);
2577 tcg_gen_xor_i64(t, t, arg);
2578 tcg_gen_clzi_i64(t, t, 64);
2579 tcg_gen_subi_i64(ret, t, 1);
2580 tcg_temp_free_i64(t);
2581 } else {
2582 gen_helper_clrsb_i64(ret, arg);
2583 }
2584 }
2585
2586 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
2587 {
2588 if (TCG_TARGET_HAS_ctpop_i64) {
2589 tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
2590 } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
2591 tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2592 tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2593 tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
2594 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2595 } else {
2596 gen_helper_ctpop_i64(ret, arg1);
2597 }
2598 }
2599
2600 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2601 {
2602 if (TCG_TARGET_HAS_rot_i64) {
2603 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
2604 } else {
2605 TCGv_i64 t0, t1;
2606 t0 = tcg_temp_ebb_new_i64();
2607 t1 = tcg_temp_ebb_new_i64();
2608 tcg_gen_shl_i64(t0, arg1, arg2);
2609 tcg_gen_subfi_i64(t1, 64, arg2);
2610 tcg_gen_shr_i64(t1, arg1, t1);
2611 tcg_gen_or_i64(ret, t0, t1);
2612 tcg_temp_free_i64(t0);
2613 tcg_temp_free_i64(t1);
2614 }
2615 }
2616
2617 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2618 {
2619 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2620 /* some cases can be optimized here */
2621 if (arg2 == 0) {
2622 tcg_gen_mov_i64(ret, arg1);
2623 } else if (TCG_TARGET_HAS_rot_i64) {
2624 tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
2625 } else {
2626 TCGv_i64 t0, t1;
2627 t0 = tcg_temp_ebb_new_i64();
2628 t1 = tcg_temp_ebb_new_i64();
2629 tcg_gen_shli_i64(t0, arg1, arg2);
2630 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2631 tcg_gen_or_i64(ret, t0, t1);
2632 tcg_temp_free_i64(t0);
2633 tcg_temp_free_i64(t1);
2634 }
2635 }
2636
2637 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2638 {
2639 if (TCG_TARGET_HAS_rot_i64) {
2640 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2641 } else {
2642 TCGv_i64 t0, t1;
2643 t0 = tcg_temp_ebb_new_i64();
2644 t1 = tcg_temp_ebb_new_i64();
2645 tcg_gen_shr_i64(t0, arg1, arg2);
2646 tcg_gen_subfi_i64(t1, 64, arg2);
2647 tcg_gen_shl_i64(t1, arg1, t1);
2648 tcg_gen_or_i64(ret, t0, t1);
2649 tcg_temp_free_i64(t0);
2650 tcg_temp_free_i64(t1);
2651 }
2652 }
2653
2654 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2655 {
2656 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2657 /* some cases can be optimized here */
2658 if (arg2 == 0) {
2659 tcg_gen_mov_i64(ret, arg1);
2660 } else {
2661 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2662 }
2663 }
2664
2665 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2666 unsigned int ofs, unsigned int len)
2667 {
2668 uint64_t mask;
2669 TCGv_i64 t1;
2670
2671 tcg_debug_assert(ofs < 64);
2672 tcg_debug_assert(len > 0);
2673 tcg_debug_assert(len <= 64);
2674 tcg_debug_assert(ofs + len <= 64);
2675
2676 if (len == 64) {
2677 tcg_gen_mov_i64(ret, arg2);
2678 return;
2679 }
2680 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2681 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2682 return;
2683 }
2684
2685 if (TCG_TARGET_REG_BITS == 32) {
2686 if (ofs >= 32) {
2687 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2688 TCGV_LOW(arg2), ofs - 32, len);
2689 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2690 return;
2691 }
2692 if (ofs + len <= 32) {
2693 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2694 TCGV_LOW(arg2), ofs, len);
2695 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2696 return;
2697 }
2698 }
2699
2700 t1 = tcg_temp_ebb_new_i64();
2701
2702 if (TCG_TARGET_HAS_extract2_i64) {
2703 if (ofs + len == 64) {
2704 tcg_gen_shli_i64(t1, arg1, len);
2705 tcg_gen_extract2_i64(ret, t1, arg2, len);
2706 goto done;
2707 }
2708 if (ofs == 0) {
2709 tcg_gen_extract2_i64(ret, arg1, arg2, len);
2710 tcg_gen_rotli_i64(ret, ret, len);
2711 goto done;
2712 }
2713 }
2714
2715 mask = (1ull << len) - 1;
2716 if (ofs + len < 64) {
2717 tcg_gen_andi_i64(t1, arg2, mask);
2718 tcg_gen_shli_i64(t1, t1, ofs);
2719 } else {
2720 tcg_gen_shli_i64(t1, arg2, ofs);
2721 }
2722 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2723 tcg_gen_or_i64(ret, ret, t1);
2724 done:
2725 tcg_temp_free_i64(t1);
2726 }
2727
2728 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2729 unsigned int ofs, unsigned int len)
2730 {
2731 tcg_debug_assert(ofs < 64);
2732 tcg_debug_assert(len > 0);
2733 tcg_debug_assert(len <= 64);
2734 tcg_debug_assert(ofs + len <= 64);
2735
2736 if (ofs + len == 64) {
2737 tcg_gen_shli_i64(ret, arg, ofs);
2738 } else if (ofs == 0) {
2739 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2740 } else if (TCG_TARGET_HAS_deposit_i64
2741 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2742 TCGv_i64 zero = tcg_constant_i64(0);
2743 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2744 } else {
2745 if (TCG_TARGET_REG_BITS == 32) {
2746 if (ofs >= 32) {
2747 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2748 ofs - 32, len);
2749 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2750 return;
2751 }
2752 if (ofs + len <= 32) {
2753 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2754 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2755 return;
2756 }
2757 }
2758 /* To help two-operand hosts we prefer to zero-extend first,
2759 which allows ARG to stay live. */
2760 switch (len) {
2761 case 32:
2762 if (TCG_TARGET_HAS_ext32u_i64) {
2763 tcg_gen_ext32u_i64(ret, arg);
2764 tcg_gen_shli_i64(ret, ret, ofs);
2765 return;
2766 }
2767 break;
2768 case 16:
2769 if (TCG_TARGET_HAS_ext16u_i64) {
2770 tcg_gen_ext16u_i64(ret, arg);
2771 tcg_gen_shli_i64(ret, ret, ofs);
2772 return;
2773 }
2774 break;
2775 case 8:
2776 if (TCG_TARGET_HAS_ext8u_i64) {
2777 tcg_gen_ext8u_i64(ret, arg);
2778 tcg_gen_shli_i64(ret, ret, ofs);
2779 return;
2780 }
2781 break;
2782 }
2783 /* Otherwise prefer zero-extension over AND for code size. */
2784 switch (ofs + len) {
2785 case 32:
2786 if (TCG_TARGET_HAS_ext32u_i64) {
2787 tcg_gen_shli_i64(ret, arg, ofs);
2788 tcg_gen_ext32u_i64(ret, ret);
2789 return;
2790 }
2791 break;
2792 case 16:
2793 if (TCG_TARGET_HAS_ext16u_i64) {
2794 tcg_gen_shli_i64(ret, arg, ofs);
2795 tcg_gen_ext16u_i64(ret, ret);
2796 return;
2797 }
2798 break;
2799 case 8:
2800 if (TCG_TARGET_HAS_ext8u_i64) {
2801 tcg_gen_shli_i64(ret, arg, ofs);
2802 tcg_gen_ext8u_i64(ret, ret);
2803 return;
2804 }
2805 break;
2806 }
2807 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2808 tcg_gen_shli_i64(ret, ret, ofs);
2809 }
2810 }
2811
2812 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2813 unsigned int ofs, unsigned int len)
2814 {
2815 tcg_debug_assert(ofs < 64);
2816 tcg_debug_assert(len > 0);
2817 tcg_debug_assert(len <= 64);
2818 tcg_debug_assert(ofs + len <= 64);
2819
2820 /* Canonicalize certain special cases, even if extract is supported. */
2821 if (ofs + len == 64) {
2822 tcg_gen_shri_i64(ret, arg, 64 - len);
2823 return;
2824 }
2825 if (ofs == 0) {
2826 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2827 return;
2828 }
2829
2830 if (TCG_TARGET_REG_BITS == 32) {
2831 /* Look for a 32-bit extract within one of the two words. */
2832 if (ofs >= 32) {
2833 tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2834 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2835 return;
2836 }
2837 if (ofs + len <= 32) {
2838 tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2839 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2840 return;
2841 }
2842 /* The field is split across two words. One double-word
2843 shift is better than two double-word shifts. */
2844 goto do_shift_and;
2845 }
2846
2847 if (TCG_TARGET_HAS_extract_i64
2848 && TCG_TARGET_extract_i64_valid(ofs, len)) {
2849 tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2850 return;
2851 }
2852
2853 /* Assume that zero-extension, if available, is cheaper than a shift. */
2854 switch (ofs + len) {
2855 case 32:
2856 if (TCG_TARGET_HAS_ext32u_i64) {
2857 tcg_gen_ext32u_i64(ret, arg);
2858 tcg_gen_shri_i64(ret, ret, ofs);
2859 return;
2860 }
2861 break;
2862 case 16:
2863 if (TCG_TARGET_HAS_ext16u_i64) {
2864 tcg_gen_ext16u_i64(ret, arg);
2865 tcg_gen_shri_i64(ret, ret, ofs);
2866 return;
2867 }
2868 break;
2869 case 8:
2870 if (TCG_TARGET_HAS_ext8u_i64) {
2871 tcg_gen_ext8u_i64(ret, arg);
2872 tcg_gen_shri_i64(ret, ret, ofs);
2873 return;
2874 }
2875 break;
2876 }
2877
2878 /* ??? Ideally we'd know what values are available for immediate AND.
2879 Assume that 8 bits are available, plus the special cases of 16 and 32,
2880 so that we get ext8u, ext16u, and ext32u. */
2881 switch (len) {
2882 case 1 ... 8: case 16: case 32:
2883 do_shift_and:
2884 tcg_gen_shri_i64(ret, arg, ofs);
2885 tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2886 break;
2887 default:
2888 tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2889 tcg_gen_shri_i64(ret, ret, 64 - len);
2890 break;
2891 }
2892 }
2893
2894 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2895 unsigned int ofs, unsigned int len)
2896 {
2897 tcg_debug_assert(ofs < 64);
2898 tcg_debug_assert(len > 0);
2899 tcg_debug_assert(len <= 64);
2900 tcg_debug_assert(ofs + len <= 64);
2901
2902 /* Canonicalize certain special cases, even if sextract is supported. */
2903 if (ofs + len == 64) {
2904 tcg_gen_sari_i64(ret, arg, 64 - len);
2905 return;
2906 }
2907 if (ofs == 0) {
2908 switch (len) {
2909 case 32:
2910 tcg_gen_ext32s_i64(ret, arg);
2911 return;
2912 case 16:
2913 tcg_gen_ext16s_i64(ret, arg);
2914 return;
2915 case 8:
2916 tcg_gen_ext8s_i64(ret, arg);
2917 return;
2918 }
2919 }
2920
2921 if (TCG_TARGET_REG_BITS == 32) {
2922 /* Look for a 32-bit extract within one of the two words. */
2923 if (ofs >= 32) {
2924 tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2925 } else if (ofs + len <= 32) {
2926 tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2927 } else if (ofs == 0) {
2928 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2929 tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2930 return;
2931 } else if (len > 32) {
2932 TCGv_i32 t = tcg_temp_ebb_new_i32();
2933 /* Extract the bits for the high word normally. */
2934 tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2935 /* Shift the field down for the low part. */
2936 tcg_gen_shri_i64(ret, arg, ofs);
2937 /* Overwrite the shift into the high part. */
2938 tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2939 tcg_temp_free_i32(t);
2940 return;
2941 } else {
2942 /* Shift the field down for the low part, such that the
2943 field sits at the MSB. */
2944 tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2945 /* Shift the field down from the MSB, sign extending. */
2946 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2947 }
2948 /* Sign-extend the field from 32 bits. */
2949 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2950 return;
2951 }
2952
2953 if (TCG_TARGET_HAS_sextract_i64
2954 && TCG_TARGET_extract_i64_valid(ofs, len)) {
2955 tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2956 return;
2957 }
2958
2959 /* Assume that sign-extension, if available, is cheaper than a shift. */
2960 switch (ofs + len) {
2961 case 32:
2962 if (TCG_TARGET_HAS_ext32s_i64) {
2963 tcg_gen_ext32s_i64(ret, arg);
2964 tcg_gen_sari_i64(ret, ret, ofs);
2965 return;
2966 }
2967 break;
2968 case 16:
2969 if (TCG_TARGET_HAS_ext16s_i64) {
2970 tcg_gen_ext16s_i64(ret, arg);
2971 tcg_gen_sari_i64(ret, ret, ofs);
2972 return;
2973 }
2974 break;
2975 case 8:
2976 if (TCG_TARGET_HAS_ext8s_i64) {
2977 tcg_gen_ext8s_i64(ret, arg);
2978 tcg_gen_sari_i64(ret, ret, ofs);
2979 return;
2980 }
2981 break;
2982 }
2983 switch (len) {
2984 case 32:
2985 if (TCG_TARGET_HAS_ext32s_i64) {
2986 tcg_gen_shri_i64(ret, arg, ofs);
2987 tcg_gen_ext32s_i64(ret, ret);
2988 return;
2989 }
2990 break;
2991 case 16:
2992 if (TCG_TARGET_HAS_ext16s_i64) {
2993 tcg_gen_shri_i64(ret, arg, ofs);
2994 tcg_gen_ext16s_i64(ret, ret);
2995 return;
2996 }
2997 break;
2998 case 8:
2999 if (TCG_TARGET_HAS_ext8s_i64) {
3000 tcg_gen_shri_i64(ret, arg, ofs);
3001 tcg_gen_ext8s_i64(ret, ret);
3002 return;
3003 }
3004 break;
3005 }
3006 tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
3007 tcg_gen_sari_i64(ret, ret, 64 - len);
3008 }
3009
3010 /*
3011 * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
3012 * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
3013 */
3014 void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
3015 unsigned int ofs)
3016 {
3017 tcg_debug_assert(ofs <= 64);
3018 if (ofs == 0) {
3019 tcg_gen_mov_i64(ret, al);
3020 } else if (ofs == 64) {
3021 tcg_gen_mov_i64(ret, ah);
3022 } else if (al == ah) {
3023 tcg_gen_rotri_i64(ret, al, ofs);
3024 } else if (TCG_TARGET_HAS_extract2_i64) {
3025 tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
3026 } else {
3027 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3028 tcg_gen_shri_i64(t0, al, ofs);
3029 tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
3030 tcg_temp_free_i64(t0);
3031 }
3032 }
3033
3034 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
3035 TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
3036 {
3037 if (cond == TCG_COND_ALWAYS) {
3038 tcg_gen_mov_i64(ret, v1);
3039 } else if (cond == TCG_COND_NEVER) {
3040 tcg_gen_mov_i64(ret, v2);
3041 } else if (TCG_TARGET_REG_BITS == 32) {
3042 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
3043 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
3044 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
3045 TCGV_LOW(c1), TCGV_HIGH(c1),
3046 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
3047
3048 if (TCG_TARGET_HAS_movcond_i32) {
3049 tcg_gen_movi_i32(t1, 0);
3050 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
3051 TCGV_LOW(v1), TCGV_LOW(v2));
3052 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
3053 TCGV_HIGH(v1), TCGV_HIGH(v2));
3054 } else {
3055 tcg_gen_neg_i32(t0, t0);
3056
3057 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
3058 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
3059 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
3060
3061 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
3062 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
3063 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
3064 }
3065 tcg_temp_free_i32(t0);
3066 tcg_temp_free_i32(t1);
3067 } else if (TCG_TARGET_HAS_movcond_i64) {
3068 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
3069 } else {
3070 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3071 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3072 tcg_gen_negsetcond_i64(cond, t0, c1, c2);
3073 tcg_gen_and_i64(t1, v1, t0);
3074 tcg_gen_andc_i64(ret, v2, t0);
3075 tcg_gen_or_i64(ret, ret, t1);
3076 tcg_temp_free_i64(t0);
3077 tcg_temp_free_i64(t1);
3078 }
3079 }
3080
3081 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
3082 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
3083 {
3084 if (TCG_TARGET_HAS_add2_i64) {
3085 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
3086 } else {
3087 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3088 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3089 tcg_gen_add_i64(t0, al, bl);
3090 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
3091 tcg_gen_add_i64(rh, ah, bh);
3092 tcg_gen_add_i64(rh, rh, t1);
3093 tcg_gen_mov_i64(rl, t0);
3094 tcg_temp_free_i64(t0);
3095 tcg_temp_free_i64(t1);
3096 }
3097 }
3098
3099 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
3100 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
3101 {
3102 if (TCG_TARGET_HAS_sub2_i64) {
3103 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
3104 } else {
3105 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3106 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3107 tcg_gen_sub_i64(t0, al, bl);
3108 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
3109 tcg_gen_sub_i64(rh, ah, bh);
3110 tcg_gen_sub_i64(rh, rh, t1);
3111 tcg_gen_mov_i64(rl, t0);
3112 tcg_temp_free_i64(t0);
3113 tcg_temp_free_i64(t1);
3114 }
3115 }
3116
3117 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3118 {
3119 if (TCG_TARGET_HAS_mulu2_i64) {
3120 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
3121 } else if (TCG_TARGET_HAS_muluh_i64) {
3122 TCGv_i64 t = tcg_temp_ebb_new_i64();
3123 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
3124 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
3125 tcg_gen_mov_i64(rl, t);
3126 tcg_temp_free_i64(t);
3127 } else {
3128 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3129 tcg_gen_mul_i64(t0, arg1, arg2);
3130 gen_helper_muluh_i64(rh, arg1, arg2);
3131 tcg_gen_mov_i64(rl, t0);
3132 tcg_temp_free_i64(t0);
3133 }
3134 }
3135
3136 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3137 {
3138 if (TCG_TARGET_HAS_muls2_i64) {
3139 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
3140 } else if (TCG_TARGET_HAS_mulsh_i64) {
3141 TCGv_i64 t = tcg_temp_ebb_new_i64();
3142 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
3143 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
3144 tcg_gen_mov_i64(rl, t);
3145 tcg_temp_free_i64(t);
3146 } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
3147 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3148 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3149 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
3150 TCGv_i64 t3 = tcg_temp_ebb_new_i64();
3151 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
3152 /* Adjust for negative inputs. */
3153 tcg_gen_sari_i64(t2, arg1, 63);
3154 tcg_gen_sari_i64(t3, arg2, 63);
3155 tcg_gen_and_i64(t2, t2, arg2);
3156 tcg_gen_and_i64(t3, t3, arg1);
3157 tcg_gen_sub_i64(rh, t1, t2);
3158 tcg_gen_sub_i64(rh, rh, t3);
3159 tcg_gen_mov_i64(rl, t0);
3160 tcg_temp_free_i64(t0);
3161 tcg_temp_free_i64(t1);
3162 tcg_temp_free_i64(t2);
3163 tcg_temp_free_i64(t3);
3164 } else {
3165 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3166 tcg_gen_mul_i64(t0, arg1, arg2);
3167 gen_helper_mulsh_i64(rh, arg1, arg2);
3168 tcg_gen_mov_i64(rl, t0);
3169 tcg_temp_free_i64(t0);
3170 }
3171 }
3172
3173 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3174 {
3175 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3176 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3177 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
3178 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
3179 /* Adjust for negative input for the signed arg1. */
3180 tcg_gen_sari_i64(t2, arg1, 63);
3181 tcg_gen_and_i64(t2, t2, arg2);
3182 tcg_gen_sub_i64(rh, t1, t2);
3183 tcg_gen_mov_i64(rl, t0);
3184 tcg_temp_free_i64(t0);
3185 tcg_temp_free_i64(t1);
3186 tcg_temp_free_i64(t2);
3187 }
3188
3189 void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3190 {
3191 tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
3192 }
3193
3194 void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3195 {
3196 tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
3197 }
3198
3199 void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3200 {
3201 tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
3202 }
3203
3204 void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3205 {
3206 tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
3207 }
3208
3209 void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
3210 {
3211 TCGv_i64 t = tcg_temp_ebb_new_i64();
3212
3213 tcg_gen_sari_i64(t, a, 63);
3214 tcg_gen_xor_i64(ret, a, t);
3215 tcg_gen_sub_i64(ret, ret, t);
3216 tcg_temp_free_i64(t);
3217 }
3218
3219 /* Size changing operations. */
3220
3221 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
3222 {
3223 if (TCG_TARGET_REG_BITS == 32) {
3224 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
3225 } else if (TCG_TARGET_HAS_extr_i64_i32) {
3226 tcg_gen_op2(INDEX_op_extrl_i64_i32,
3227 tcgv_i32_arg(ret), tcgv_i64_arg(arg));
3228 } else {
3229 tcg_gen_mov_i32(ret, (TCGv_i32)arg);
3230 }
3231 }
3232
3233 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
3234 {
3235 if (TCG_TARGET_REG_BITS == 32) {
3236 tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
3237 } else if (TCG_TARGET_HAS_extr_i64_i32) {
3238 tcg_gen_op2(INDEX_op_extrh_i64_i32,
3239 tcgv_i32_arg(ret), tcgv_i64_arg(arg));
3240 } else {
3241 TCGv_i64 t = tcg_temp_ebb_new_i64();
3242 tcg_gen_shri_i64(t, arg, 32);
3243 tcg_gen_mov_i32(ret, (TCGv_i32)t);
3244 tcg_temp_free_i64(t);
3245 }
3246 }
3247
3248 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
3249 {
3250 if (TCG_TARGET_REG_BITS == 32) {
3251 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
3252 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
3253 } else {
3254 tcg_gen_op2(INDEX_op_extu_i32_i64,
3255 tcgv_i64_arg(ret), tcgv_i32_arg(arg));
3256 }
3257 }
3258
3259 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
3260 {
3261 if (TCG_TARGET_REG_BITS == 32) {
3262 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
3263 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
3264 } else {
3265 tcg_gen_op2(INDEX_op_ext_i32_i64,
3266 tcgv_i64_arg(ret), tcgv_i32_arg(arg));
3267 }
3268 }
3269
3270 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
3271 {
3272 TCGv_i64 tmp;
3273
3274 if (TCG_TARGET_REG_BITS == 32) {
3275 tcg_gen_mov_i32(TCGV_LOW(dest), low);
3276 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
3277 return;
3278 }
3279
3280 tmp = tcg_temp_ebb_new_i64();
3281 /* These extensions are only needed for type correctness.
3282 We may be able to do better given target specific information. */
3283 tcg_gen_extu_i32_i64(tmp, high);
3284 tcg_gen_extu_i32_i64(dest, low);
3285 /* If deposit is available, use it. Otherwise use the extra
3286 knowledge that we have of the zero-extensions above. */
3287 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
3288 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
3289 } else {
3290 tcg_gen_shli_i64(tmp, tmp, 32);
3291 tcg_gen_or_i64(dest, dest, tmp);
3292 }
3293 tcg_temp_free_i64(tmp);
3294 }
3295
3296 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
3297 {
3298 if (TCG_TARGET_REG_BITS == 32) {
3299 tcg_gen_mov_i32(lo, TCGV_LOW(arg));
3300 tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
3301 } else {
3302 tcg_gen_extrl_i64_i32(lo, arg);
3303 tcg_gen_extrh_i64_i32(hi, arg);
3304 }
3305 }
3306
3307 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
3308 {
3309 tcg_gen_ext32u_i64(lo, arg);
3310 tcg_gen_shri_i64(hi, arg, 32);
3311 }
3312
3313 void tcg_gen_concat32_i64(TCGv_i64 ret, TCGv_i64 lo, TCGv_i64 hi)
3314 {
3315 tcg_gen_deposit_i64(ret, lo, hi, 32, 32);
3316 }
3317
3318 void tcg_gen_extr_i128_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i128 arg)
3319 {
3320 tcg_gen_mov_i64(lo, TCGV128_LOW(arg));
3321 tcg_gen_mov_i64(hi, TCGV128_HIGH(arg));
3322 }
3323
3324 void tcg_gen_concat_i64_i128(TCGv_i128 ret, TCGv_i64 lo, TCGv_i64 hi)
3325 {
3326 tcg_gen_mov_i64(TCGV128_LOW(ret), lo);
3327 tcg_gen_mov_i64(TCGV128_HIGH(ret), hi);
3328 }
3329
3330 void tcg_gen_mov_i128(TCGv_i128 dst, TCGv_i128 src)
3331 {
3332 if (dst != src) {
3333 tcg_gen_mov_i64(TCGV128_LOW(dst), TCGV128_LOW(src));
3334 tcg_gen_mov_i64(TCGV128_HIGH(dst), TCGV128_HIGH(src));
3335 }
3336 }
3337
3338 void tcg_gen_ld_i128(TCGv_i128 ret, TCGv_ptr base, tcg_target_long offset)
3339 {
3340 if (HOST_BIG_ENDIAN) {
3341 tcg_gen_ld_i64(TCGV128_HIGH(ret), base, offset);
3342 tcg_gen_ld_i64(TCGV128_LOW(ret), base, offset + 8);
3343 } else {
3344 tcg_gen_ld_i64(TCGV128_LOW(ret), base, offset);
3345 tcg_gen_ld_i64(TCGV128_HIGH(ret), base, offset + 8);
3346 }
3347 }
3348
3349 void tcg_gen_st_i128(TCGv_i128 val, TCGv_ptr base, tcg_target_long offset)
3350 {
3351 if (HOST_BIG_ENDIAN) {
3352 tcg_gen_st_i64(TCGV128_HIGH(val), base, offset);
3353 tcg_gen_st_i64(TCGV128_LOW(val), base, offset + 8);
3354 } else {
3355 tcg_gen_st_i64(TCGV128_LOW(val), base, offset);
3356 tcg_gen_st_i64(TCGV128_HIGH(val), base, offset + 8);
3357 }
3358 }
3359
3360 /* QEMU specific operations. */
3361
3362 void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
3363 {
3364 /*
3365 * Let the jit code return the read-only version of the
3366 * TranslationBlock, so that we minimize the pc-relative
3367 * distance of the address of the exit_tb code to TB.
3368 * This will improve utilization of pc-relative address loads.
3369 *
3370 * TODO: Move this to translator_loop, so that all const
3371 * TranslationBlock pointers refer to read-only memory.
3372 * This requires coordination with targets that do not use
3373 * the translator_loop.
3374 */
3375 uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
3376
3377 if (tb == NULL) {
3378 tcg_debug_assert(idx == 0);
3379 } else if (idx <= TB_EXIT_IDXMAX) {
3380 #ifdef CONFIG_DEBUG_TCG
3381 /* This is an exit following a goto_tb. Verify that we have
3382 seen this numbered exit before, via tcg_gen_goto_tb. */
3383 tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
3384 #endif
3385 } else {
3386 /* This is an exit via the exitreq label. */
3387 tcg_debug_assert(idx == TB_EXIT_REQUESTED);
3388 }
3389
3390 tcg_gen_op1i(INDEX_op_exit_tb, val);
3391 }
3392
3393 void tcg_gen_goto_tb(unsigned idx)
3394 {
3395 /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
3396 tcg_debug_assert(!(tcg_ctx->gen_tb->cflags & CF_NO_GOTO_TB));
3397 /* We only support two chained exits. */
3398 tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
3399 #ifdef CONFIG_DEBUG_TCG
3400 /* Verify that we haven't seen this numbered exit before. */
3401 tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
3402 tcg_ctx->goto_tb_issue_mask |= 1 << idx;
3403 #endif
3404 plugin_gen_disable_mem_helpers();
3405 tcg_gen_op1i(INDEX_op_goto_tb, idx);
3406 }
3407
3408 void tcg_gen_lookup_and_goto_ptr(void)
3409 {
3410 TCGv_ptr ptr;
3411
3412 if (tcg_ctx->gen_tb->cflags & CF_NO_GOTO_PTR) {
3413 tcg_gen_exit_tb(NULL, 0);
3414 return;
3415 }
3416
3417 plugin_gen_disable_mem_helpers();
3418 ptr = tcg_temp_ebb_new_ptr();
3419 gen_helper_lookup_tb_ptr(ptr, tcg_env);
3420 tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
3421 tcg_temp_free_ptr(ptr);
3422 }