]> git.proxmox.com Git - mirror_qemu.git/blame - tcg/tcg-op.c
plugins: Use emit_before_op for PLUGIN_GEN_FROM_MEM
[mirror_qemu.git] / tcg / tcg-op.c
CommitLineData
951c6300
RH
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2008 Fabrice Bellard
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
757e725b 25#include "qemu/osdep.h"
dcb32f1d 26#include "tcg/tcg.h"
47f7313d 27#include "tcg/tcg-temp-internal.h"
ad3d0e4d 28#include "tcg/tcg-op-common.h"
cac9b0fd 29#include "exec/translation-block.h"
e6d86bed 30#include "exec/plugin-gen.h"
d56fea79 31#include "tcg-internal.h"
951c6300
RH
32
33
ecfa1877
RH
34/*
35 * Encourage the compiler to tail-call to a function, rather than inlining.
36 * Minimizes code size across 99 bottles of beer on the wall.
37 */
38#define NI __attribute__((noinline))
39
40void NI tcg_gen_op1(TCGOpcode opc, TCGArg a1)
951c6300 41{
d4478943 42 TCGOp *op = tcg_emit_op(opc, 1);
75e8b9b7 43 op->args[0] = a1;
951c6300
RH
44}
45
ecfa1877 46void NI tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
951c6300 47{
d4478943 48 TCGOp *op = tcg_emit_op(opc, 2);
75e8b9b7
RH
49 op->args[0] = a1;
50 op->args[1] = a2;
951c6300
RH
51}
52
ecfa1877 53void NI tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
951c6300 54{
d4478943 55 TCGOp *op = tcg_emit_op(opc, 3);
75e8b9b7
RH
56 op->args[0] = a1;
57 op->args[1] = a2;
58 op->args[2] = a3;
951c6300
RH
59}
60
ecfa1877 61void NI tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3, TCGArg a4)
951c6300 62{
d4478943 63 TCGOp *op = tcg_emit_op(opc, 4);
75e8b9b7
RH
64 op->args[0] = a1;
65 op->args[1] = a2;
66 op->args[2] = a3;
67 op->args[3] = a4;
951c6300
RH
68}
69
ecfa1877
RH
70void NI tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
71 TCGArg a4, TCGArg a5)
951c6300 72{
d4478943 73 TCGOp *op = tcg_emit_op(opc, 5);
75e8b9b7
RH
74 op->args[0] = a1;
75 op->args[1] = a2;
76 op->args[2] = a3;
77 op->args[3] = a4;
78 op->args[4] = a5;
951c6300
RH
79}
80
ecfa1877
RH
81void NI tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
82 TCGArg a4, TCGArg a5, TCGArg a6)
951c6300 83{
d4478943 84 TCGOp *op = tcg_emit_op(opc, 6);
75e8b9b7
RH
85 op->args[0] = a1;
86 op->args[1] = a2;
87 op->args[2] = a3;
88 op->args[3] = a4;
89 op->args[4] = a5;
90 op->args[5] = a6;
951c6300
RH
91}
92
6fc75d50
RH
93/*
94 * With CONFIG_DEBUG_TCG, tcgv_*_tmp via tcgv_*_arg, is an out-of-line
95 * assertion check. Force tail calls to avoid too much code expansion.
96 */
97#ifdef CONFIG_DEBUG_TCG
98# define DNI NI
99#else
100# define DNI
101#endif
102
17b9fadb 103static void DNI tcg_gen_op1_i32(TCGOpcode opc, TCGv_i32 a1)
6fc75d50
RH
104{
105 tcg_gen_op1(opc, tcgv_i32_arg(a1));
106}
107
17b9fadb 108static void DNI tcg_gen_op1_i64(TCGOpcode opc, TCGv_i64 a1)
6fc75d50
RH
109{
110 tcg_gen_op1(opc, tcgv_i64_arg(a1));
111}
112
17b9fadb 113static void DNI tcg_gen_op1i(TCGOpcode opc, TCGArg a1)
6fc75d50
RH
114{
115 tcg_gen_op1(opc, a1);
116}
117
17b9fadb 118static void DNI tcg_gen_op2_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2)
6fc75d50
RH
119{
120 tcg_gen_op2(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2));
121}
122
17b9fadb 123static void DNI tcg_gen_op2_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2)
6fc75d50
RH
124{
125 tcg_gen_op2(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2));
126}
127
17b9fadb
RH
128static void DNI tcg_gen_op3_i32(TCGOpcode opc, TCGv_i32 a1,
129 TCGv_i32 a2, TCGv_i32 a3)
6fc75d50
RH
130{
131 tcg_gen_op3(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2), tcgv_i32_arg(a3));
132}
133
17b9fadb
RH
134static void DNI tcg_gen_op3_i64(TCGOpcode opc, TCGv_i64 a1,
135 TCGv_i64 a2, TCGv_i64 a3)
6fc75d50
RH
136{
137 tcg_gen_op3(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2), tcgv_i64_arg(a3));
138}
139
17b9fadb
RH
140static void DNI tcg_gen_op3i_i32(TCGOpcode opc, TCGv_i32 a1,
141 TCGv_i32 a2, TCGArg a3)
6fc75d50
RH
142{
143 tcg_gen_op3(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2), a3);
144}
145
17b9fadb
RH
146static void DNI tcg_gen_op3i_i64(TCGOpcode opc, TCGv_i64 a1,
147 TCGv_i64 a2, TCGArg a3)
6fc75d50
RH
148{
149 tcg_gen_op3(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2), a3);
150}
151
17b9fadb
RH
152static void DNI tcg_gen_ldst_op_i32(TCGOpcode opc, TCGv_i32 val,
153 TCGv_ptr base, TCGArg offset)
6fc75d50
RH
154{
155 tcg_gen_op3(opc, tcgv_i32_arg(val), tcgv_ptr_arg(base), offset);
156}
157
17b9fadb
RH
158static void DNI tcg_gen_ldst_op_i64(TCGOpcode opc, TCGv_i64 val,
159 TCGv_ptr base, TCGArg offset)
6fc75d50
RH
160{
161 tcg_gen_op3(opc, tcgv_i64_arg(val), tcgv_ptr_arg(base), offset);
162}
163
17b9fadb
RH
164static void DNI tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
165 TCGv_i32 a3, TCGv_i32 a4)
6fc75d50
RH
166{
167 tcg_gen_op4(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
168 tcgv_i32_arg(a3), tcgv_i32_arg(a4));
169}
170
17b9fadb
RH
171static void DNI tcg_gen_op4_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
172 TCGv_i64 a3, TCGv_i64 a4)
6fc75d50
RH
173{
174 tcg_gen_op4(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
175 tcgv_i64_arg(a3), tcgv_i64_arg(a4));
176}
177
17b9fadb
RH
178static void DNI tcg_gen_op4i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
179 TCGv_i32 a3, TCGArg a4)
6fc75d50
RH
180{
181 tcg_gen_op4(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
182 tcgv_i32_arg(a3), a4);
183}
184
17b9fadb
RH
185static void DNI tcg_gen_op4i_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
186 TCGv_i64 a3, TCGArg a4)
6fc75d50
RH
187{
188 tcg_gen_op4(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
189 tcgv_i64_arg(a3), a4);
190}
191
17b9fadb
RH
192static void DNI tcg_gen_op4ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
193 TCGArg a3, TCGArg a4)
6fc75d50
RH
194{
195 tcg_gen_op4(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2), a3, a4);
196}
197
17b9fadb
RH
198static void DNI tcg_gen_op4ii_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
199 TCGArg a3, TCGArg a4)
6fc75d50
RH
200{
201 tcg_gen_op4(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2), a3, a4);
202}
203
17b9fadb
RH
204static void DNI tcg_gen_op5_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
205 TCGv_i32 a3, TCGv_i32 a4, TCGv_i32 a5)
6fc75d50
RH
206{
207 tcg_gen_op5(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
208 tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5));
209}
210
17b9fadb
RH
211static void DNI tcg_gen_op5_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
212 TCGv_i64 a3, TCGv_i64 a4, TCGv_i64 a5)
6fc75d50
RH
213{
214 tcg_gen_op5(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
215 tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5));
216}
217
17b9fadb
RH
218static void DNI tcg_gen_op5ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
219 TCGv_i32 a3, TCGArg a4, TCGArg a5)
6fc75d50
RH
220{
221 tcg_gen_op5(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
222 tcgv_i32_arg(a3), a4, a5);
223}
224
17b9fadb
RH
225static void DNI tcg_gen_op5ii_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
226 TCGv_i64 a3, TCGArg a4, TCGArg a5)
6fc75d50
RH
227{
228 tcg_gen_op5(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
229 tcgv_i64_arg(a3), a4, a5);
230}
231
17b9fadb
RH
232static void DNI tcg_gen_op6_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
233 TCGv_i32 a3, TCGv_i32 a4,
234 TCGv_i32 a5, TCGv_i32 a6)
6fc75d50
RH
235{
236 tcg_gen_op6(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
237 tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5),
238 tcgv_i32_arg(a6));
239}
240
17b9fadb
RH
241static void DNI tcg_gen_op6_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
242 TCGv_i64 a3, TCGv_i64 a4,
243 TCGv_i64 a5, TCGv_i64 a6)
6fc75d50
RH
244{
245 tcg_gen_op6(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
246 tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5),
247 tcgv_i64_arg(a6));
248}
249
17b9fadb
RH
250static void DNI tcg_gen_op6i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
251 TCGv_i32 a3, TCGv_i32 a4,
252 TCGv_i32 a5, TCGArg a6)
6fc75d50
RH
253{
254 tcg_gen_op6(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
255 tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5), a6);
256}
257
17b9fadb
RH
258static void DNI tcg_gen_op6i_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
259 TCGv_i64 a3, TCGv_i64 a4,
260 TCGv_i64 a5, TCGArg a6)
6fc75d50
RH
261{
262 tcg_gen_op6(opc, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
263 tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5), a6);
264}
265
17b9fadb
RH
266static void DNI tcg_gen_op6ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
267 TCGv_i32 a3, TCGv_i32 a4,
268 TCGArg a5, TCGArg a6)
6fc75d50
RH
269{
270 tcg_gen_op6(opc, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
271 tcgv_i32_arg(a3), tcgv_i32_arg(a4), a5, a6);
272}
273
f85b1fc4
RH
274/* Generic ops. */
275
01bbb6e3
RH
276void gen_set_label(TCGLabel *l)
277{
278 l->present = 1;
279 tcg_gen_op1(INDEX_op_set_label, label_arg(l));
280}
281
f85b1fc4
RH
282static void add_last_as_label_use(TCGLabel *l)
283{
284 TCGLabelUse *u = tcg_malloc(sizeof(TCGLabelUse));
285
286 u->op = tcg_last_op();
287 QSIMPLEQ_INSERT_TAIL(&l->branches, u, next);
288}
289
290void tcg_gen_br(TCGLabel *l)
291{
292 tcg_gen_op1(INDEX_op_br, label_arg(l));
293 add_last_as_label_use(l);
294}
295
f65e19bc
PK
296void tcg_gen_mb(TCGBar mb_type)
297{
c914d46d
RH
298#ifdef CONFIG_USER_ONLY
299 bool parallel = tcg_ctx->gen_tb->cflags & CF_PARALLEL;
300#else
301 /*
302 * It is tempting to elide the barrier in a uniprocessor context.
303 * However, even with a single cpu we have i/o threads running in
304 * parallel, and lack of memory order can result in e.g. virtio
305 * queue entries being read incorrectly.
306 */
307 bool parallel = true;
308#endif
309
310 if (parallel) {
b7e8b17a 311 tcg_gen_op1(INDEX_op_mb, mb_type);
f65e19bc
PK
312 }
313}
314
a0948bb7
RH
315void tcg_gen_plugin_cb(unsigned from)
316{
317 tcg_gen_op1(INDEX_op_plugin_cb, from);
318}
319
8a2927f2
RH
320void tcg_gen_plugin_mem_cb(TCGv_i64 addr, unsigned meminfo)
321{
322 tcg_gen_op2(INDEX_op_plugin_mem_cb, tcgv_i64_arg(addr), meminfo);
323}
324
01bbb6e3
RH
325void tcg_gen_plugin_cb_start(unsigned from, unsigned type, unsigned wr)
326{
327 tcg_gen_op3(INDEX_op_plugin_cb_start, from, type, wr);
328}
329
330void tcg_gen_plugin_cb_end(void)
331{
332 tcg_emit_op(INDEX_op_plugin_cb_end, 0);
333}
334
951c6300
RH
335/* 32 bit ops */
336
09607d35
RH
337void tcg_gen_discard_i32(TCGv_i32 arg)
338{
339 tcg_gen_op1_i32(INDEX_op_discard, arg);
340}
341
342void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
343{
344 if (ret != arg) {
345 tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg);
346 }
347}
348
11d11d61
RH
349void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
350{
351 tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
352}
353
09607d35
RH
354void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
355{
356 tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2);
357}
358
951c6300
RH
359void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
360{
361 /* some cases can be optimized here */
362 if (arg2 == 0) {
363 tcg_gen_mov_i32(ret, arg1);
364 } else {
11d11d61 365 tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
366 }
367}
368
09607d35
RH
369void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
370{
371 tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2);
372}
373
951c6300
RH
374void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
375{
b701f195
RH
376 if (arg1 == 0) {
377 tcg_gen_neg_i32(ret, arg2);
951c6300 378 } else {
11d11d61 379 tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
951c6300
RH
380 }
381}
382
383void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
384{
1551004e 385 tcg_gen_addi_i32(ret, arg1, -arg2);
951c6300
RH
386}
387
09607d35
RH
388void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg)
389{
b701f195 390 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg);
09607d35
RH
391}
392
393void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
394{
395 tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2);
396}
397
474b2e8f 398void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
951c6300 399{
951c6300
RH
400 /* Some cases can be optimized here. */
401 switch (arg2) {
402 case 0:
403 tcg_gen_movi_i32(ret, 0);
404 return;
474b2e8f 405 case -1:
951c6300
RH
406 tcg_gen_mov_i32(ret, arg1);
407 return;
474b2e8f 408 case 0xff:
951c6300
RH
409 /* Don't recurse with tcg_gen_ext8u_i32. */
410 if (TCG_TARGET_HAS_ext8u_i32) {
411 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
412 return;
413 }
414 break;
474b2e8f 415 case 0xffff:
951c6300
RH
416 if (TCG_TARGET_HAS_ext16u_i32) {
417 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
418 return;
419 }
420 break;
421 }
11d11d61
RH
422
423 tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
424}
425
09607d35
RH
426void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
427{
428 tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2);
429}
430
951c6300
RH
431void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
432{
433 /* Some cases can be optimized here. */
434 if (arg2 == -1) {
435 tcg_gen_movi_i32(ret, -1);
436 } else if (arg2 == 0) {
437 tcg_gen_mov_i32(ret, arg1);
438 } else {
11d11d61 439 tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
440 }
441}
442
09607d35
RH
443void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
444{
445 tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2);
446}
447
951c6300
RH
448void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
449{
450 /* Some cases can be optimized here. */
451 if (arg2 == 0) {
452 tcg_gen_mov_i32(ret, arg1);
453 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
454 /* Don't recurse with tcg_gen_not_i32. */
455 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
456 } else {
11d11d61 457 tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
458 }
459}
460
09607d35
RH
461void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
462{
463 if (TCG_TARGET_HAS_not_i32) {
464 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg);
465 } else {
466 tcg_gen_xori_i32(ret, arg, -1);
467 }
468}
469
470void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
471{
472 tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2);
473}
474
474b2e8f 475void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
951c6300 476{
474b2e8f 477 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
951c6300
RH
478 if (arg2 == 0) {
479 tcg_gen_mov_i32(ret, arg1);
480 } else {
11d11d61 481 tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
482 }
483}
484
09607d35
RH
485void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
486{
487 tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2);
488}
489
474b2e8f 490void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
951c6300 491{
474b2e8f 492 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
951c6300
RH
493 if (arg2 == 0) {
494 tcg_gen_mov_i32(ret, arg1);
495 } else {
11d11d61 496 tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
497 }
498}
499
09607d35
RH
500void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
501{
502 tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2);
503}
504
474b2e8f 505void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
951c6300 506{
474b2e8f 507 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
951c6300
RH
508 if (arg2 == 0) {
509 tcg_gen_mov_i32(ret, arg1);
510 } else {
11d11d61 511 tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
512 }
513}
514
42a268c2 515void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
951c6300
RH
516{
517 if (cond == TCG_COND_ALWAYS) {
42a268c2 518 tcg_gen_br(l);
951c6300 519 } else if (cond != TCG_COND_NEVER) {
42a268c2 520 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
f85b1fc4 521 add_last_as_label_use(l);
951c6300
RH
522 }
523}
524
42a268c2 525void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
951c6300 526{
37ed3bf1
RH
527 if (cond == TCG_COND_ALWAYS) {
528 tcg_gen_br(l);
529 } else if (cond != TCG_COND_NEVER) {
11d11d61 530 tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
37ed3bf1 531 }
951c6300
RH
532}
533
534void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
535 TCGv_i32 arg1, TCGv_i32 arg2)
536{
537 if (cond == TCG_COND_ALWAYS) {
538 tcg_gen_movi_i32(ret, 1);
539 } else if (cond == TCG_COND_NEVER) {
540 tcg_gen_movi_i32(ret, 0);
541 } else {
542 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
543 }
544}
545
546void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
547 TCGv_i32 arg1, int32_t arg2)
548{
11d11d61 549 tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
550}
551
3635502d
RH
552void tcg_gen_negsetcond_i32(TCGCond cond, TCGv_i32 ret,
553 TCGv_i32 arg1, TCGv_i32 arg2)
554{
555 if (cond == TCG_COND_ALWAYS) {
556 tcg_gen_movi_i32(ret, -1);
557 } else if (cond == TCG_COND_NEVER) {
558 tcg_gen_movi_i32(ret, 0);
559 } else if (TCG_TARGET_HAS_negsetcond_i32) {
560 tcg_gen_op4i_i32(INDEX_op_negsetcond_i32, ret, arg1, arg2, cond);
561 } else {
562 tcg_gen_setcond_i32(cond, ret, arg1, arg2);
563 tcg_gen_neg_i32(ret, ret);
564 }
565}
566
93c86ecd
PB
567void tcg_gen_negsetcondi_i32(TCGCond cond, TCGv_i32 ret,
568 TCGv_i32 arg1, int32_t arg2)
569{
570 tcg_gen_negsetcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
571}
572
09607d35
RH
573void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
574{
575 tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2);
576}
577
951c6300
RH
578void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
579{
b2e3ae94
RH
580 if (arg2 == 0) {
581 tcg_gen_movi_i32(ret, 0);
582 } else if (is_power_of_2(arg2)) {
583 tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
584 } else {
11d11d61 585 tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
b2e3ae94 586 }
951c6300
RH
587}
588
589void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
590{
591 if (TCG_TARGET_HAS_div_i32) {
592 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
593 } else if (TCG_TARGET_HAS_div2_i32) {
5dd48602 594 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300
RH
595 tcg_gen_sari_i32(t0, arg1, 31);
596 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
597 tcg_temp_free_i32(t0);
598 } else {
599 gen_helper_div_i32(ret, arg1, arg2);
600 }
601}
602
603void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
604{
605 if (TCG_TARGET_HAS_rem_i32) {
606 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
607 } else if (TCG_TARGET_HAS_div_i32) {
5dd48602 608 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300
RH
609 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
610 tcg_gen_mul_i32(t0, t0, arg2);
611 tcg_gen_sub_i32(ret, arg1, t0);
612 tcg_temp_free_i32(t0);
613 } else if (TCG_TARGET_HAS_div2_i32) {
5dd48602 614 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300
RH
615 tcg_gen_sari_i32(t0, arg1, 31);
616 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
617 tcg_temp_free_i32(t0);
618 } else {
619 gen_helper_rem_i32(ret, arg1, arg2);
620 }
621}
622
623void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
624{
625 if (TCG_TARGET_HAS_div_i32) {
626 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
627 } else if (TCG_TARGET_HAS_div2_i32) {
5dd48602 628 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
bfefdbea
RH
629 TCGv_i32 zero = tcg_constant_i32(0);
630 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, zero, arg2);
951c6300
RH
631 tcg_temp_free_i32(t0);
632 } else {
633 gen_helper_divu_i32(ret, arg1, arg2);
634 }
635}
636
637void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
638{
639 if (TCG_TARGET_HAS_rem_i32) {
640 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
641 } else if (TCG_TARGET_HAS_div_i32) {
5dd48602 642 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300
RH
643 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
644 tcg_gen_mul_i32(t0, t0, arg2);
645 tcg_gen_sub_i32(ret, arg1, t0);
646 tcg_temp_free_i32(t0);
647 } else if (TCG_TARGET_HAS_div2_i32) {
5dd48602 648 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
bfefdbea
RH
649 TCGv_i32 zero = tcg_constant_i32(0);
650 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, zero, arg2);
951c6300
RH
651 tcg_temp_free_i32(t0);
652 } else {
653 gen_helper_remu_i32(ret, arg1, arg2);
654 }
655}
656
657void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
658{
659 if (TCG_TARGET_HAS_andc_i32) {
660 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
661 } else {
5dd48602 662 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300
RH
663 tcg_gen_not_i32(t0, arg2);
664 tcg_gen_and_i32(ret, arg1, t0);
665 tcg_temp_free_i32(t0);
666 }
667}
668
669void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
670{
671 if (TCG_TARGET_HAS_eqv_i32) {
672 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
673 } else {
674 tcg_gen_xor_i32(ret, arg1, arg2);
675 tcg_gen_not_i32(ret, ret);
676 }
677}
678
679void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
680{
681 if (TCG_TARGET_HAS_nand_i32) {
682 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
683 } else {
684 tcg_gen_and_i32(ret, arg1, arg2);
685 tcg_gen_not_i32(ret, ret);
686 }
687}
688
689void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
690{
691 if (TCG_TARGET_HAS_nor_i32) {
692 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
693 } else {
694 tcg_gen_or_i32(ret, arg1, arg2);
695 tcg_gen_not_i32(ret, ret);
696 }
697}
698
699void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
700{
701 if (TCG_TARGET_HAS_orc_i32) {
702 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
703 } else {
5dd48602 704 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300
RH
705 tcg_gen_not_i32(t0, arg2);
706 tcg_gen_or_i32(ret, arg1, t0);
707 tcg_temp_free_i32(t0);
708 }
709}
710
0e28d006
RH
711void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
712{
713 if (TCG_TARGET_HAS_clz_i32) {
714 tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
715 } else if (TCG_TARGET_HAS_clz_i64) {
5dd48602
RH
716 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
717 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
0e28d006
RH
718 tcg_gen_extu_i32_i64(t1, arg1);
719 tcg_gen_extu_i32_i64(t2, arg2);
720 tcg_gen_addi_i64(t2, t2, 32);
721 tcg_gen_clz_i64(t1, t1, t2);
722 tcg_gen_extrl_i64_i32(ret, t1);
723 tcg_temp_free_i64(t1);
724 tcg_temp_free_i64(t2);
725 tcg_gen_subi_i32(ret, ret, 32);
726 } else {
727 gen_helper_clz_i32(ret, arg1, arg2);
728 }
729}
730
731void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
732{
11d11d61 733 tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
0e28d006
RH
734}
735
736void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
737{
738 if (TCG_TARGET_HAS_ctz_i32) {
739 tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
740 } else if (TCG_TARGET_HAS_ctz_i64) {
5dd48602
RH
741 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
742 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
0e28d006
RH
743 tcg_gen_extu_i32_i64(t1, arg1);
744 tcg_gen_extu_i32_i64(t2, arg2);
745 tcg_gen_ctz_i64(t1, t1, t2);
746 tcg_gen_extrl_i64_i32(ret, t1);
747 tcg_temp_free_i64(t1);
748 tcg_temp_free_i64(t2);
14e99210
RH
749 } else if (TCG_TARGET_HAS_ctpop_i32
750 || TCG_TARGET_HAS_ctpop_i64
751 || TCG_TARGET_HAS_clz_i32
752 || TCG_TARGET_HAS_clz_i64) {
5dd48602 753 TCGv_i32 z, t = tcg_temp_ebb_new_i32();
14e99210
RH
754
755 if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
756 tcg_gen_subi_i32(t, arg1, 1);
757 tcg_gen_andc_i32(t, t, arg1);
758 tcg_gen_ctpop_i32(t, t);
759 } else {
760 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
761 tcg_gen_neg_i32(t, arg1);
762 tcg_gen_and_i32(t, t, arg1);
763 tcg_gen_clzi_i32(t, t, 32);
764 tcg_gen_xori_i32(t, t, 31);
765 }
11d11d61 766 z = tcg_constant_i32(0);
14e99210
RH
767 tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
768 tcg_temp_free_i32(t);
0e28d006
RH
769 } else {
770 gen_helper_ctz_i32(ret, arg1, arg2);
771 }
772}
773
774void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
775{
14e99210
RH
776 if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
777 /* This equivalence has the advantage of not requiring a fixup. */
5dd48602 778 TCGv_i32 t = tcg_temp_ebb_new_i32();
14e99210
RH
779 tcg_gen_subi_i32(t, arg1, 1);
780 tcg_gen_andc_i32(t, t, arg1);
781 tcg_gen_ctpop_i32(ret, t);
782 tcg_temp_free_i32(t);
783 } else {
11d11d61 784 tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
14e99210 785 }
0e28d006
RH
786}
787
086920c2
RH
788void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
789{
790 if (TCG_TARGET_HAS_clz_i32) {
5dd48602 791 TCGv_i32 t = tcg_temp_ebb_new_i32();
086920c2
RH
792 tcg_gen_sari_i32(t, arg, 31);
793 tcg_gen_xor_i32(t, t, arg);
794 tcg_gen_clzi_i32(t, t, 32);
795 tcg_gen_subi_i32(ret, t, 1);
796 tcg_temp_free_i32(t);
797 } else {
798 gen_helper_clrsb_i32(ret, arg);
799 }
800}
801
a768e4e9
RH
802void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
803{
804 if (TCG_TARGET_HAS_ctpop_i32) {
805 tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
806 } else if (TCG_TARGET_HAS_ctpop_i64) {
5dd48602 807 TCGv_i64 t = tcg_temp_ebb_new_i64();
a768e4e9
RH
808 tcg_gen_extu_i32_i64(t, arg1);
809 tcg_gen_ctpop_i64(t, t);
810 tcg_gen_extrl_i64_i32(ret, t);
811 tcg_temp_free_i64(t);
812 } else {
813 gen_helper_ctpop_i32(ret, arg1);
814 }
815}
816
951c6300
RH
817void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
818{
819 if (TCG_TARGET_HAS_rot_i32) {
820 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
821 } else {
822 TCGv_i32 t0, t1;
823
5dd48602
RH
824 t0 = tcg_temp_ebb_new_i32();
825 t1 = tcg_temp_ebb_new_i32();
951c6300
RH
826 tcg_gen_shl_i32(t0, arg1, arg2);
827 tcg_gen_subfi_i32(t1, 32, arg2);
828 tcg_gen_shr_i32(t1, arg1, t1);
829 tcg_gen_or_i32(ret, t0, t1);
830 tcg_temp_free_i32(t0);
831 tcg_temp_free_i32(t1);
832 }
833}
834
07dada03 835void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
951c6300 836{
07dada03 837 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
951c6300
RH
838 /* some cases can be optimized here */
839 if (arg2 == 0) {
840 tcg_gen_mov_i32(ret, arg1);
841 } else if (TCG_TARGET_HAS_rot_i32) {
11d11d61 842 tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
951c6300
RH
843 } else {
844 TCGv_i32 t0, t1;
5dd48602
RH
845 t0 = tcg_temp_ebb_new_i32();
846 t1 = tcg_temp_ebb_new_i32();
951c6300
RH
847 tcg_gen_shli_i32(t0, arg1, arg2);
848 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
849 tcg_gen_or_i32(ret, t0, t1);
850 tcg_temp_free_i32(t0);
851 tcg_temp_free_i32(t1);
852 }
853}
854
855void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
856{
857 if (TCG_TARGET_HAS_rot_i32) {
858 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
859 } else {
860 TCGv_i32 t0, t1;
861
5dd48602
RH
862 t0 = tcg_temp_ebb_new_i32();
863 t1 = tcg_temp_ebb_new_i32();
951c6300
RH
864 tcg_gen_shr_i32(t0, arg1, arg2);
865 tcg_gen_subfi_i32(t1, 32, arg2);
866 tcg_gen_shl_i32(t1, arg1, t1);
867 tcg_gen_or_i32(ret, t0, t1);
868 tcg_temp_free_i32(t0);
869 tcg_temp_free_i32(t1);
870 }
871}
872
07dada03 873void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
951c6300 874{
07dada03 875 tcg_debug_assert(arg2 >= 0 && arg2 < 32);
951c6300
RH
876 /* some cases can be optimized here */
877 if (arg2 == 0) {
878 tcg_gen_mov_i32(ret, arg1);
879 } else {
880 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
881 }
882}
883
884void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
885 unsigned int ofs, unsigned int len)
886{
887 uint32_t mask;
888 TCGv_i32 t1;
889
890 tcg_debug_assert(ofs < 32);
0d0d309d 891 tcg_debug_assert(len > 0);
951c6300
RH
892 tcg_debug_assert(len <= 32);
893 tcg_debug_assert(ofs + len <= 32);
894
0d0d309d 895 if (len == 32) {
951c6300
RH
896 tcg_gen_mov_i32(ret, arg2);
897 return;
898 }
899 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
900 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
901 return;
902 }
903
5dd48602 904 t1 = tcg_temp_ebb_new_i32();
951c6300 905
b0a60567
RH
906 if (TCG_TARGET_HAS_extract2_i32) {
907 if (ofs + len == 32) {
908 tcg_gen_shli_i32(t1, arg1, len);
909 tcg_gen_extract2_i32(ret, t1, arg2, len);
910 goto done;
911 }
912 if (ofs == 0) {
913 tcg_gen_extract2_i32(ret, arg1, arg2, len);
914 tcg_gen_rotli_i32(ret, ret, len);
915 goto done;
916 }
917 }
918
919 mask = (1u << len) - 1;
951c6300
RH
920 if (ofs + len < 32) {
921 tcg_gen_andi_i32(t1, arg2, mask);
922 tcg_gen_shli_i32(t1, t1, ofs);
923 } else {
924 tcg_gen_shli_i32(t1, arg2, ofs);
925 }
926 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
927 tcg_gen_or_i32(ret, ret, t1);
b0a60567 928 done:
951c6300
RH
929 tcg_temp_free_i32(t1);
930}
931
07cc68d5
RH
932void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
933 unsigned int ofs, unsigned int len)
934{
935 tcg_debug_assert(ofs < 32);
936 tcg_debug_assert(len > 0);
937 tcg_debug_assert(len <= 32);
938 tcg_debug_assert(ofs + len <= 32);
939
940 if (ofs + len == 32) {
941 tcg_gen_shli_i32(ret, arg, ofs);
942 } else if (ofs == 0) {
943 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
944 } else if (TCG_TARGET_HAS_deposit_i32
945 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
11d11d61 946 TCGv_i32 zero = tcg_constant_i32(0);
07cc68d5 947 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
07cc68d5
RH
948 } else {
949 /* To help two-operand hosts we prefer to zero-extend first,
950 which allows ARG to stay live. */
951 switch (len) {
952 case 16:
953 if (TCG_TARGET_HAS_ext16u_i32) {
954 tcg_gen_ext16u_i32(ret, arg);
955 tcg_gen_shli_i32(ret, ret, ofs);
956 return;
957 }
958 break;
959 case 8:
960 if (TCG_TARGET_HAS_ext8u_i32) {
961 tcg_gen_ext8u_i32(ret, arg);
962 tcg_gen_shli_i32(ret, ret, ofs);
963 return;
964 }
965 break;
966 }
967 /* Otherwise prefer zero-extension over AND for code size. */
968 switch (ofs + len) {
969 case 16:
970 if (TCG_TARGET_HAS_ext16u_i32) {
971 tcg_gen_shli_i32(ret, arg, ofs);
972 tcg_gen_ext16u_i32(ret, ret);
973 return;
974 }
975 break;
976 case 8:
977 if (TCG_TARGET_HAS_ext8u_i32) {
978 tcg_gen_shli_i32(ret, arg, ofs);
979 tcg_gen_ext8u_i32(ret, ret);
980 return;
981 }
982 break;
983 }
984 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
985 tcg_gen_shli_i32(ret, ret, ofs);
986 }
987}
988
7ec8bab3
RH
989void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
990 unsigned int ofs, unsigned int len)
991{
992 tcg_debug_assert(ofs < 32);
993 tcg_debug_assert(len > 0);
994 tcg_debug_assert(len <= 32);
995 tcg_debug_assert(ofs + len <= 32);
996
997 /* Canonicalize certain special cases, even if extract is supported. */
998 if (ofs + len == 32) {
999 tcg_gen_shri_i32(ret, arg, 32 - len);
1000 return;
1001 }
1002 if (ofs == 0) {
1003 tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
1004 return;
1005 }
1006
1007 if (TCG_TARGET_HAS_extract_i32
1008 && TCG_TARGET_extract_i32_valid(ofs, len)) {
1009 tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
1010 return;
1011 }
1012
1013 /* Assume that zero-extension, if available, is cheaper than a shift. */
1014 switch (ofs + len) {
1015 case 16:
1016 if (TCG_TARGET_HAS_ext16u_i32) {
1017 tcg_gen_ext16u_i32(ret, arg);
1018 tcg_gen_shri_i32(ret, ret, ofs);
1019 return;
1020 }
1021 break;
1022 case 8:
1023 if (TCG_TARGET_HAS_ext8u_i32) {
1024 tcg_gen_ext8u_i32(ret, arg);
1025 tcg_gen_shri_i32(ret, ret, ofs);
1026 return;
1027 }
1028 break;
1029 }
1030
1031 /* ??? Ideally we'd know what values are available for immediate AND.
1032 Assume that 8 bits are available, plus the special case of 16,
1033 so that we get ext8u, ext16u. */
1034 switch (len) {
1035 case 1 ... 8: case 16:
1036 tcg_gen_shri_i32(ret, arg, ofs);
1037 tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
1038 break;
1039 default:
1040 tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
1041 tcg_gen_shri_i32(ret, ret, 32 - len);
1042 break;
1043 }
1044}
1045
1046void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
1047 unsigned int ofs, unsigned int len)
1048{
1049 tcg_debug_assert(ofs < 32);
1050 tcg_debug_assert(len > 0);
1051 tcg_debug_assert(len <= 32);
1052 tcg_debug_assert(ofs + len <= 32);
1053
1054 /* Canonicalize certain special cases, even if extract is supported. */
1055 if (ofs + len == 32) {
1056 tcg_gen_sari_i32(ret, arg, 32 - len);
1057 return;
1058 }
1059 if (ofs == 0) {
1060 switch (len) {
1061 case 16:
1062 tcg_gen_ext16s_i32(ret, arg);
1063 return;
1064 case 8:
1065 tcg_gen_ext8s_i32(ret, arg);
1066 return;
1067 }
1068 }
1069
1070 if (TCG_TARGET_HAS_sextract_i32
1071 && TCG_TARGET_extract_i32_valid(ofs, len)) {
1072 tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
1073 return;
1074 }
1075
1076 /* Assume that sign-extension, if available, is cheaper than a shift. */
1077 switch (ofs + len) {
1078 case 16:
1079 if (TCG_TARGET_HAS_ext16s_i32) {
1080 tcg_gen_ext16s_i32(ret, arg);
1081 tcg_gen_sari_i32(ret, ret, ofs);
1082 return;
1083 }
1084 break;
1085 case 8:
1086 if (TCG_TARGET_HAS_ext8s_i32) {
1087 tcg_gen_ext8s_i32(ret, arg);
1088 tcg_gen_sari_i32(ret, ret, ofs);
1089 return;
1090 }
1091 break;
1092 }
1093 switch (len) {
1094 case 16:
1095 if (TCG_TARGET_HAS_ext16s_i32) {
1096 tcg_gen_shri_i32(ret, arg, ofs);
1097 tcg_gen_ext16s_i32(ret, ret);
1098 return;
1099 }
1100 break;
1101 case 8:
1102 if (TCG_TARGET_HAS_ext8s_i32) {
1103 tcg_gen_shri_i32(ret, arg, ofs);
1104 tcg_gen_ext8s_i32(ret, ret);
1105 return;
1106 }
1107 break;
1108 }
1109
1110 tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
1111 tcg_gen_sari_i32(ret, ret, 32 - len);
1112}
1113
2089fcc9
DH
1114/*
1115 * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
1116 * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
1117 */
1118void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
1119 unsigned int ofs)
1120{
1121 tcg_debug_assert(ofs <= 32);
1122 if (ofs == 0) {
1123 tcg_gen_mov_i32(ret, al);
1124 } else if (ofs == 32) {
1125 tcg_gen_mov_i32(ret, ah);
1126 } else if (al == ah) {
1127 tcg_gen_rotri_i32(ret, al, ofs);
fce1296f
RH
1128 } else if (TCG_TARGET_HAS_extract2_i32) {
1129 tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
2089fcc9 1130 } else {
5dd48602 1131 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
2089fcc9
DH
1132 tcg_gen_shri_i32(t0, al, ofs);
1133 tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
1134 tcg_temp_free_i32(t0);
1135 }
1136}
1137
951c6300
RH
1138void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
1139 TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
1140{
37ed3bf1
RH
1141 if (cond == TCG_COND_ALWAYS) {
1142 tcg_gen_mov_i32(ret, v1);
1143 } else if (cond == TCG_COND_NEVER) {
1144 tcg_gen_mov_i32(ret, v2);
951c6300 1145 } else {
3871be75 1146 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
951c6300
RH
1147 }
1148}
1149
1150void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
1151 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
1152{
1153 if (TCG_TARGET_HAS_add2_i32) {
1154 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
951c6300 1155 } else {
5dd48602
RH
1156 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1157 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
1158 tcg_gen_concat_i32_i64(t0, al, ah);
1159 tcg_gen_concat_i32_i64(t1, bl, bh);
1160 tcg_gen_add_i64(t0, t0, t1);
1161 tcg_gen_extr_i64_i32(rl, rh, t0);
1162 tcg_temp_free_i64(t0);
1163 tcg_temp_free_i64(t1);
1164 }
1165}
1166
1167void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
1168 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
1169{
1170 if (TCG_TARGET_HAS_sub2_i32) {
1171 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
951c6300 1172 } else {
5dd48602
RH
1173 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1174 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
1175 tcg_gen_concat_i32_i64(t0, al, ah);
1176 tcg_gen_concat_i32_i64(t1, bl, bh);
1177 tcg_gen_sub_i64(t0, t0, t1);
1178 tcg_gen_extr_i64_i32(rl, rh, t0);
1179 tcg_temp_free_i64(t0);
1180 tcg_temp_free_i64(t1);
1181 }
1182}
1183
1184void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1185{
1186 if (TCG_TARGET_HAS_mulu2_i32) {
1187 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
951c6300 1188 } else if (TCG_TARGET_HAS_muluh_i32) {
5dd48602 1189 TCGv_i32 t = tcg_temp_ebb_new_i32();
951c6300
RH
1190 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
1191 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
1192 tcg_gen_mov_i32(rl, t);
1193 tcg_temp_free_i32(t);
9fd86b51 1194 } else if (TCG_TARGET_REG_BITS == 64) {
5dd48602
RH
1195 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1196 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
1197 tcg_gen_extu_i32_i64(t0, arg1);
1198 tcg_gen_extu_i32_i64(t1, arg2);
1199 tcg_gen_mul_i64(t0, t0, t1);
1200 tcg_gen_extr_i64_i32(rl, rh, t0);
1201 tcg_temp_free_i64(t0);
1202 tcg_temp_free_i64(t1);
9fd86b51
RH
1203 } else {
1204 qemu_build_not_reached();
951c6300
RH
1205 }
1206}
1207
1208void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1209{
1210 if (TCG_TARGET_HAS_muls2_i32) {
1211 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
951c6300 1212 } else if (TCG_TARGET_HAS_mulsh_i32) {
5dd48602 1213 TCGv_i32 t = tcg_temp_ebb_new_i32();
951c6300
RH
1214 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
1215 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
1216 tcg_gen_mov_i32(rl, t);
1217 tcg_temp_free_i32(t);
1218 } else if (TCG_TARGET_REG_BITS == 32) {
5dd48602
RH
1219 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1220 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1221 TCGv_i32 t2 = tcg_temp_ebb_new_i32();
1222 TCGv_i32 t3 = tcg_temp_ebb_new_i32();
951c6300
RH
1223 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
1224 /* Adjust for negative inputs. */
1225 tcg_gen_sari_i32(t2, arg1, 31);
1226 tcg_gen_sari_i32(t3, arg2, 31);
1227 tcg_gen_and_i32(t2, t2, arg2);
1228 tcg_gen_and_i32(t3, t3, arg1);
1229 tcg_gen_sub_i32(rh, t1, t2);
1230 tcg_gen_sub_i32(rh, rh, t3);
1231 tcg_gen_mov_i32(rl, t0);
1232 tcg_temp_free_i32(t0);
1233 tcg_temp_free_i32(t1);
1234 tcg_temp_free_i32(t2);
1235 tcg_temp_free_i32(t3);
1236 } else {
5dd48602
RH
1237 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1238 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
1239 tcg_gen_ext_i32_i64(t0, arg1);
1240 tcg_gen_ext_i32_i64(t1, arg2);
1241 tcg_gen_mul_i64(t0, t0, t1);
1242 tcg_gen_extr_i64_i32(rl, rh, t0);
1243 tcg_temp_free_i64(t0);
1244 tcg_temp_free_i64(t1);
1245 }
1246}
1247
5087abfb
RH
1248void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1249{
1250 if (TCG_TARGET_REG_BITS == 32) {
5dd48602
RH
1251 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1252 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1253 TCGv_i32 t2 = tcg_temp_ebb_new_i32();
5087abfb
RH
1254 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
1255 /* Adjust for negative input for the signed arg1. */
1256 tcg_gen_sari_i32(t2, arg1, 31);
1257 tcg_gen_and_i32(t2, t2, arg2);
1258 tcg_gen_sub_i32(rh, t1, t2);
1259 tcg_gen_mov_i32(rl, t0);
1260 tcg_temp_free_i32(t0);
1261 tcg_temp_free_i32(t1);
1262 tcg_temp_free_i32(t2);
1263 } else {
5dd48602
RH
1264 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1265 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
5087abfb
RH
1266 tcg_gen_ext_i32_i64(t0, arg1);
1267 tcg_gen_extu_i32_i64(t1, arg2);
1268 tcg_gen_mul_i64(t0, t0, t1);
1269 tcg_gen_extr_i64_i32(rl, rh, t0);
1270 tcg_temp_free_i64(t0);
1271 tcg_temp_free_i64(t1);
1272 }
1273}
1274
951c6300
RH
1275void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1276{
1277 if (TCG_TARGET_HAS_ext8s_i32) {
1278 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
1279 } else {
1280 tcg_gen_shli_i32(ret, arg, 24);
1281 tcg_gen_sari_i32(ret, ret, 24);
1282 }
1283}
1284
1285void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1286{
1287 if (TCG_TARGET_HAS_ext16s_i32) {
1288 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1289 } else {
1290 tcg_gen_shli_i32(ret, arg, 16);
1291 tcg_gen_sari_i32(ret, ret, 16);
1292 }
1293}
1294
1295void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1296{
1297 if (TCG_TARGET_HAS_ext8u_i32) {
1298 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1299 } else {
1300 tcg_gen_andi_i32(ret, arg, 0xffu);
1301 }
1302}
1303
1304void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1305{
1306 if (TCG_TARGET_HAS_ext16u_i32) {
1307 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1308 } else {
1309 tcg_gen_andi_i32(ret, arg, 0xffffu);
1310 }
1311}
1312
4de5a76a
PMD
1313/*
1314 * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1315 *
1316 * Byte pattern: xxab -> yyba
1317 *
1318 * With TCG_BSWAP_IZ, x == zero, else undefined.
1319 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1320 */
2b836c2a 1321void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
951c6300 1322{
2b836c2a
RH
1323 /* Only one extension flag may be present. */
1324 tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1325
951c6300 1326 if (TCG_TARGET_HAS_bswap16_i32) {
2b836c2a 1327 tcg_gen_op3i_i32(INDEX_op_bswap16_i32, ret, arg, flags);
951c6300 1328 } else {
5dd48602
RH
1329 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1330 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
2b836c2a 1331
4de5a76a
PMD
1332 /* arg = ..ab (IZ) xxab (!IZ) */
1333 tcg_gen_shri_i32(t0, arg, 8); /* t0 = ...a (IZ) .xxa (!IZ) */
2b836c2a 1334 if (!(flags & TCG_BSWAP_IZ)) {
4de5a76a 1335 tcg_gen_ext8u_i32(t0, t0); /* t0 = ...a */
2b836c2a 1336 }
951c6300 1337
2b836c2a 1338 if (flags & TCG_BSWAP_OS) {
4de5a76a
PMD
1339 tcg_gen_shli_i32(t1, arg, 24); /* t1 = b... */
1340 tcg_gen_sari_i32(t1, t1, 16); /* t1 = ssb. */
2b836c2a 1341 } else if (flags & TCG_BSWAP_OZ) {
4de5a76a
PMD
1342 tcg_gen_ext8u_i32(t1, arg); /* t1 = ...b */
1343 tcg_gen_shli_i32(t1, t1, 8); /* t1 = ..b. */
2b836c2a 1344 } else {
4de5a76a 1345 tcg_gen_shli_i32(t1, arg, 8); /* t1 = xab. */
2b836c2a
RH
1346 }
1347
4de5a76a
PMD
1348 tcg_gen_or_i32(ret, t0, t1); /* ret = ..ba (OZ) */
1349 /* = ssba (OS) */
1350 /* = xaba (no flag) */
951c6300 1351 tcg_temp_free_i32(t0);
2b836c2a 1352 tcg_temp_free_i32(t1);
951c6300
RH
1353 }
1354}
1355
92964556
PMD
1356/*
1357 * bswap32_i32: 32-bit byte swap on a 32-bit value.
1358 *
1359 * Byte pattern: abcd -> dcba
1360 */
951c6300
RH
1361void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1362{
1363 if (TCG_TARGET_HAS_bswap32_i32) {
587195bd 1364 tcg_gen_op3i_i32(INDEX_op_bswap32_i32, ret, arg, 0);
951c6300 1365 } else {
5dd48602
RH
1366 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1367 TCGv_i32 t1 = tcg_temp_ebb_new_i32();
11d11d61 1368 TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
951c6300 1369
a686dc71
RH
1370 /* arg = abcd */
1371 tcg_gen_shri_i32(t0, arg, 8); /* t0 = .abc */
1372 tcg_gen_and_i32(t1, arg, t2); /* t1 = .b.d */
1373 tcg_gen_and_i32(t0, t0, t2); /* t0 = .a.c */
1374 tcg_gen_shli_i32(t1, t1, 8); /* t1 = b.d. */
1375 tcg_gen_or_i32(ret, t0, t1); /* ret = badc */
951c6300 1376
a686dc71
RH
1377 tcg_gen_shri_i32(t0, ret, 16); /* t0 = ..ba */
1378 tcg_gen_shli_i32(t1, ret, 16); /* t1 = dc.. */
1379 tcg_gen_or_i32(ret, t0, t1); /* ret = dcba */
951c6300 1380
951c6300
RH
1381 tcg_temp_free_i32(t0);
1382 tcg_temp_free_i32(t1);
1383 }
1384}
1385
b8976aa5
PMD
1386/*
1387 * hswap_i32: Swap 16-bit halfwords within a 32-bit value.
1388 *
1389 * Byte pattern: abcd -> cdab
1390 */
46be8425
RH
1391void tcg_gen_hswap_i32(TCGv_i32 ret, TCGv_i32 arg)
1392{
1393 /* Swapping 2 16-bit elements is a rotate. */
1394 tcg_gen_rotli_i32(ret, arg, 16);
1395}
1396
b87fb8cd
RH
1397void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1398{
1399 tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1400}
1401
1402void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1403{
1404 tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1405}
1406
1407void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1408{
1409 tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1410}
1411
1412void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1413{
1414 tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1415}
1416
ff1f11f7
RH
1417void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1418{
5dd48602 1419 TCGv_i32 t = tcg_temp_ebb_new_i32();
ff1f11f7
RH
1420
1421 tcg_gen_sari_i32(t, a, 31);
1422 tcg_gen_xor_i32(ret, a, t);
1423 tcg_gen_sub_i32(ret, ret, t);
1424 tcg_temp_free_i32(t);
1425}
1426
09607d35
RH
1427void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1428{
1429 tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset);
1430}
1431
1432void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1433{
1434 tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset);
1435}
1436
1437void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1438{
1439 tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset);
1440}
1441
1442void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1443{
1444 tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset);
1445}
1446
1447void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1448{
1449 tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset);
1450}
1451
1452void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1453{
1454 tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset);
1455}
1456
1457void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1458{
1459 tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset);
1460}
1461
1462void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1463{
1464 tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset);
1465}
1466
1467
951c6300
RH
1468/* 64-bit ops */
1469
951c6300
RH
1470void tcg_gen_discard_i64(TCGv_i64 arg)
1471{
e0de2f55
RH
1472 if (TCG_TARGET_REG_BITS == 64) {
1473 tcg_gen_op1_i64(INDEX_op_discard, arg);
1474 } else {
1475 tcg_gen_discard_i32(TCGV_LOW(arg));
1476 tcg_gen_discard_i32(TCGV_HIGH(arg));
1477 }
951c6300
RH
1478}
1479
1480void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1481{
e0de2f55
RH
1482 if (ret == arg) {
1483 return;
1484 }
1485 if (TCG_TARGET_REG_BITS == 64) {
1486 tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg);
11d11d61 1487 } else {
e0de2f55
RH
1488 TCGTemp *ts = tcgv_i64_temp(arg);
1489
1490 /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1491 if (ts->kind == TEMP_CONST) {
1492 tcg_gen_movi_i64(ret, ts->val);
1493 } else {
1494 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1495 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1496 }
11d11d61 1497 }
951c6300
RH
1498}
1499
1500void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1501{
e0de2f55
RH
1502 if (TCG_TARGET_REG_BITS == 64) {
1503 tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1504 } else {
1505 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1506 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1507 }
951c6300
RH
1508}
1509
1510void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1511{
e0de2f55
RH
1512 if (TCG_TARGET_REG_BITS == 64) {
1513 tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset);
1514 } else {
1515 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1516 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1517 }
951c6300
RH
1518}
1519
1520void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1521{
e0de2f55
RH
1522 if (TCG_TARGET_REG_BITS == 64) {
1523 tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset);
1524 } else {
1525 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1526 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1527 }
951c6300
RH
1528}
1529
1530void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1531{
e0de2f55
RH
1532 if (TCG_TARGET_REG_BITS == 64) {
1533 tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset);
1534 } else {
1535 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1536 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1537 }
951c6300
RH
1538}
1539
1540void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1541{
e0de2f55
RH
1542 if (TCG_TARGET_REG_BITS == 64) {
1543 tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset);
1544 } else {
1545 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1546 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1547 }
951c6300
RH
1548}
1549
1550void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1551{
e0de2f55
RH
1552 if (TCG_TARGET_REG_BITS == 64) {
1553 tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset);
1554 } else {
1555 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1556 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1557 }
951c6300
RH
1558}
1559
1560void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1561{
e0de2f55
RH
1562 if (TCG_TARGET_REG_BITS == 64) {
1563 tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset);
1564 } else {
1565 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1566 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1567 }
951c6300
RH
1568}
1569
1570void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1571{
e0de2f55
RH
1572 /*
1573 * For 32-bit host, since arg2 and ret have different types,
1574 * they cannot be the same temporary -- no chance of overlap.
1575 */
1576 if (TCG_TARGET_REG_BITS == 64) {
1577 tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset);
1578 } else if (HOST_BIG_ENDIAN) {
1579 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1580 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1581 } else {
1582 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1583 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1584 }
951c6300
RH
1585}
1586
d56fea79
RH
1587void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1588{
e0de2f55
RH
1589 if (TCG_TARGET_REG_BITS == 64) {
1590 tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset);
1591 } else {
1592 tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
1593 }
d56fea79
RH
1594}
1595
1596void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1597{
e0de2f55
RH
1598 if (TCG_TARGET_REG_BITS == 64) {
1599 tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset);
1600 } else {
1601 tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
1602 }
d56fea79
RH
1603}
1604
1605void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1606{
e0de2f55
RH
1607 if (TCG_TARGET_REG_BITS == 64) {
1608 tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset);
1609 } else {
1610 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1611 }
d56fea79
RH
1612}
1613
951c6300
RH
1614void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1615{
e0de2f55
RH
1616 if (TCG_TARGET_REG_BITS == 64) {
1617 tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset);
1618 } else if (HOST_BIG_ENDIAN) {
1619 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1620 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1621 } else {
1622 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1623 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1624 }
951c6300
RH
1625}
1626
d56fea79
RH
1627void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1628{
e0de2f55
RH
1629 if (TCG_TARGET_REG_BITS == 64) {
1630 tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2);
1631 } else {
1632 tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1633 TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1634 }
d56fea79
RH
1635}
1636
1637void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1638{
e0de2f55
RH
1639 if (TCG_TARGET_REG_BITS == 64) {
1640 tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2);
1641 } else {
1642 tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1643 TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1644 }
d56fea79
RH
1645}
1646
951c6300
RH
1647void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1648{
e0de2f55
RH
1649 if (TCG_TARGET_REG_BITS == 64) {
1650 tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2);
1651 } else {
1652 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1653 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1654 }
951c6300
RH
1655}
1656
1657void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1658{
e0de2f55
RH
1659 if (TCG_TARGET_REG_BITS == 64) {
1660 tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2);
1661 } else {
1662 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1663 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1664 }
951c6300
RH
1665}
1666
1667void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1668{
e0de2f55
RH
1669 if (TCG_TARGET_REG_BITS == 64) {
1670 tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2);
1671 } else {
1672 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1673 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1674 }
951c6300
RH
1675}
1676
1677void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1678{
e0de2f55
RH
1679 if (TCG_TARGET_REG_BITS == 64) {
1680 tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2);
1681 } else {
1682 gen_helper_shl_i64(ret, arg1, arg2);
1683 }
951c6300
RH
1684}
1685
1686void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1687{
e0de2f55
RH
1688 if (TCG_TARGET_REG_BITS == 64) {
1689 tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2);
1690 } else {
1691 gen_helper_shr_i64(ret, arg1, arg2);
1692 }
951c6300
RH
1693}
1694
1695void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1696{
e0de2f55
RH
1697 if (TCG_TARGET_REG_BITS == 64) {
1698 tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2);
1699 } else {
1700 gen_helper_sar_i64(ret, arg1, arg2);
1701 }
951c6300
RH
1702}
1703
1704void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1705{
1706 TCGv_i64 t0;
1707 TCGv_i32 t1;
1708
e0de2f55
RH
1709 if (TCG_TARGET_REG_BITS == 64) {
1710 tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2);
1711 return;
1712 }
1713
1714
5dd48602
RH
1715 t0 = tcg_temp_ebb_new_i64();
1716 t1 = tcg_temp_ebb_new_i32();
951c6300
RH
1717
1718 tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1719 TCGV_LOW(arg1), TCGV_LOW(arg2));
1720
1721 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1722 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1723 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1724 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1725
1726 tcg_gen_mov_i64(ret, t0);
1727 tcg_temp_free_i64(t0);
1728 tcg_temp_free_i32(t1);
1729}
11d11d61 1730
951c6300
RH
1731void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1732{
1733 /* some cases can be optimized here */
1734 if (arg2 == 0) {
1735 tcg_gen_mov_i64(ret, arg1);
11d11d61
RH
1736 } else if (TCG_TARGET_REG_BITS == 64) {
1737 tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300 1738 } else {
11d11d61
RH
1739 tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1740 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1741 tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
951c6300
RH
1742 }
1743}
1744
1745void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1746{
b701f195
RH
1747 if (arg1 == 0) {
1748 tcg_gen_neg_i64(ret, arg2);
11d11d61
RH
1749 } else if (TCG_TARGET_REG_BITS == 64) {
1750 tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
951c6300 1751 } else {
11d11d61
RH
1752 tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1753 tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1754 TCGV_LOW(arg2), TCGV_HIGH(arg2));
951c6300
RH
1755 }
1756}
1757
1758void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1759{
1551004e 1760 tcg_gen_addi_i64(ret, arg1, -arg2);
951c6300
RH
1761}
1762
e0de2f55
RH
1763void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
1764{
b701f195 1765 if (TCG_TARGET_REG_BITS == 64) {
e0de2f55
RH
1766 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
1767 } else {
b701f195
RH
1768 TCGv_i32 zero = tcg_constant_i32(0);
1769 tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1770 zero, zero, TCGV_LOW(arg), TCGV_HIGH(arg));
e0de2f55
RH
1771 }
1772}
1773
474b2e8f 1774void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
951c6300 1775{
3a13c3f3
RH
1776 if (TCG_TARGET_REG_BITS == 32) {
1777 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1778 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1779 return;
1780 }
1781
951c6300
RH
1782 /* Some cases can be optimized here. */
1783 switch (arg2) {
1784 case 0:
1785 tcg_gen_movi_i64(ret, 0);
1786 return;
474b2e8f 1787 case -1:
951c6300
RH
1788 tcg_gen_mov_i64(ret, arg1);
1789 return;
474b2e8f 1790 case 0xff:
951c6300
RH
1791 /* Don't recurse with tcg_gen_ext8u_i64. */
1792 if (TCG_TARGET_HAS_ext8u_i64) {
1793 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1794 return;
1795 }
1796 break;
474b2e8f 1797 case 0xffff:
951c6300
RH
1798 if (TCG_TARGET_HAS_ext16u_i64) {
1799 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1800 return;
1801 }
1802 break;
474b2e8f 1803 case 0xffffffffu:
951c6300
RH
1804 if (TCG_TARGET_HAS_ext32u_i64) {
1805 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1806 return;
1807 }
1808 break;
1809 }
11d11d61
RH
1810
1811 tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300
RH
1812}
1813
1814void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1815{
3a13c3f3
RH
1816 if (TCG_TARGET_REG_BITS == 32) {
1817 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1818 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1819 return;
1820 }
951c6300
RH
1821 /* Some cases can be optimized here. */
1822 if (arg2 == -1) {
1823 tcg_gen_movi_i64(ret, -1);
1824 } else if (arg2 == 0) {
1825 tcg_gen_mov_i64(ret, arg1);
1826 } else {
11d11d61 1827 tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300 1828 }
951c6300
RH
1829}
1830
1831void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1832{
3a13c3f3
RH
1833 if (TCG_TARGET_REG_BITS == 32) {
1834 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1835 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1836 return;
1837 }
951c6300
RH
1838 /* Some cases can be optimized here. */
1839 if (arg2 == 0) {
1840 tcg_gen_mov_i64(ret, arg1);
1841 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1842 /* Don't recurse with tcg_gen_not_i64. */
1843 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1844 } else {
11d11d61 1845 tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300 1846 }
951c6300
RH
1847}
1848
951c6300
RH
1849static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1850 unsigned c, bool right, bool arith)
1851{
1852 tcg_debug_assert(c < 64);
1853 if (c == 0) {
1854 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1855 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1856 } else if (c >= 32) {
1857 c -= 32;
1858 if (right) {
1859 if (arith) {
1860 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1861 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1862 } else {
1863 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1864 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1865 }
1866 } else {
1867 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1868 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1869 }
02616bad
RH
1870 } else if (right) {
1871 if (TCG_TARGET_HAS_extract2_i32) {
1872 tcg_gen_extract2_i32(TCGV_LOW(ret),
1873 TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1874 } else {
951c6300 1875 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
02616bad
RH
1876 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1877 TCGV_HIGH(arg1), 32 - c, c);
1878 }
1879 if (arith) {
1880 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1881 } else {
1882 tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1883 }
1884 } else {
1885 if (TCG_TARGET_HAS_extract2_i32) {
1886 tcg_gen_extract2_i32(TCGV_HIGH(ret),
1887 TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
951c6300 1888 } else {
5dd48602 1889 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951c6300 1890 tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
02616bad
RH
1891 tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1892 TCGV_HIGH(arg1), c, 32 - c);
1893 tcg_temp_free_i32(t0);
951c6300 1894 }
02616bad 1895 tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
951c6300
RH
1896 }
1897}
1898
474b2e8f 1899void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
951c6300 1900{
474b2e8f 1901 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
3a13c3f3
RH
1902 if (TCG_TARGET_REG_BITS == 32) {
1903 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1904 } else if (arg2 == 0) {
951c6300
RH
1905 tcg_gen_mov_i64(ret, arg1);
1906 } else {
11d11d61 1907 tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300
RH
1908 }
1909}
1910
474b2e8f 1911void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
951c6300 1912{
474b2e8f 1913 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
3a13c3f3
RH
1914 if (TCG_TARGET_REG_BITS == 32) {
1915 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1916 } else if (arg2 == 0) {
951c6300
RH
1917 tcg_gen_mov_i64(ret, arg1);
1918 } else {
11d11d61 1919 tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300
RH
1920 }
1921}
1922
474b2e8f 1923void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
951c6300 1924{
474b2e8f 1925 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
3a13c3f3
RH
1926 if (TCG_TARGET_REG_BITS == 32) {
1927 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1928 } else if (arg2 == 0) {
951c6300
RH
1929 tcg_gen_mov_i64(ret, arg1);
1930 } else {
11d11d61 1931 tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300
RH
1932 }
1933}
951c6300 1934
42a268c2 1935void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
951c6300
RH
1936{
1937 if (cond == TCG_COND_ALWAYS) {
42a268c2 1938 tcg_gen_br(l);
951c6300 1939 } else if (cond != TCG_COND_NEVER) {
3a13c3f3
RH
1940 if (TCG_TARGET_REG_BITS == 32) {
1941 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1942 TCGV_HIGH(arg1), TCGV_LOW(arg2),
42a268c2 1943 TCGV_HIGH(arg2), cond, label_arg(l));
3a13c3f3 1944 } else {
42a268c2
RH
1945 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1946 label_arg(l));
3a13c3f3 1947 }
f85b1fc4 1948 add_last_as_label_use(l);
951c6300
RH
1949 }
1950}
1951
42a268c2 1952void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
951c6300 1953{
11d11d61
RH
1954 if (TCG_TARGET_REG_BITS == 64) {
1955 tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1956 } else if (cond == TCG_COND_ALWAYS) {
42a268c2 1957 tcg_gen_br(l);
951c6300 1958 } else if (cond != TCG_COND_NEVER) {
11d11d61
RH
1959 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1960 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1961 tcg_constant_i32(arg2),
1962 tcg_constant_i32(arg2 >> 32),
1963 cond, label_arg(l));
f85b1fc4 1964 add_last_as_label_use(l);
951c6300
RH
1965 }
1966}
1967
1968void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1969 TCGv_i64 arg1, TCGv_i64 arg2)
1970{
1971 if (cond == TCG_COND_ALWAYS) {
1972 tcg_gen_movi_i64(ret, 1);
1973 } else if (cond == TCG_COND_NEVER) {
1974 tcg_gen_movi_i64(ret, 0);
1975 } else {
3a13c3f3
RH
1976 if (TCG_TARGET_REG_BITS == 32) {
1977 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1978 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1979 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1980 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1981 } else {
1982 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1983 }
951c6300
RH
1984 }
1985}
1986
1987void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1988 TCGv_i64 arg1, int64_t arg2)
1989{
11d11d61
RH
1990 if (TCG_TARGET_REG_BITS == 64) {
1991 tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1992 } else if (cond == TCG_COND_ALWAYS) {
1993 tcg_gen_movi_i64(ret, 1);
1994 } else if (cond == TCG_COND_NEVER) {
1995 tcg_gen_movi_i64(ret, 0);
1996 } else {
1997 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1998 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1999 tcg_constant_i32(arg2),
2000 tcg_constant_i32(arg2 >> 32), cond);
2001 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2002 }
951c6300
RH
2003}
2004
93c86ecd
PB
2005void tcg_gen_negsetcondi_i64(TCGCond cond, TCGv_i64 ret,
2006 TCGv_i64 arg1, int64_t arg2)
2007{
2008 tcg_gen_negsetcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
2009}
2010
3635502d
RH
2011void tcg_gen_negsetcond_i64(TCGCond cond, TCGv_i64 ret,
2012 TCGv_i64 arg1, TCGv_i64 arg2)
2013{
2014 if (cond == TCG_COND_ALWAYS) {
2015 tcg_gen_movi_i64(ret, -1);
2016 } else if (cond == TCG_COND_NEVER) {
2017 tcg_gen_movi_i64(ret, 0);
2018 } else if (TCG_TARGET_HAS_negsetcond_i64) {
2019 tcg_gen_op4i_i64(INDEX_op_negsetcond_i64, ret, arg1, arg2, cond);
2020 } else if (TCG_TARGET_REG_BITS == 32) {
2021 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
2022 TCGV_LOW(arg1), TCGV_HIGH(arg1),
2023 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
2024 tcg_gen_neg_i32(TCGV_LOW(ret), TCGV_LOW(ret));
2025 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_LOW(ret));
2026 } else {
2027 tcg_gen_setcond_i64(cond, ret, arg1, arg2);
2028 tcg_gen_neg_i64(ret, ret);
2029 }
2030}
2031
951c6300
RH
2032void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2033{
b2e3ae94
RH
2034 if (arg2 == 0) {
2035 tcg_gen_movi_i64(ret, 0);
2036 } else if (is_power_of_2(arg2)) {
2037 tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
2038 } else {
f04de891 2039 tcg_gen_mul_i64(ret, arg1, tcg_constant_i64(arg2));
b2e3ae94 2040 }
951c6300
RH
2041}
2042
2043void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2044{
2045 if (TCG_TARGET_HAS_div_i64) {
2046 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
2047 } else if (TCG_TARGET_HAS_div2_i64) {
5dd48602 2048 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
2049 tcg_gen_sari_i64(t0, arg1, 63);
2050 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
2051 tcg_temp_free_i64(t0);
2052 } else {
2053 gen_helper_div_i64(ret, arg1, arg2);
2054 }
2055}
2056
2057void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2058{
2059 if (TCG_TARGET_HAS_rem_i64) {
2060 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
2061 } else if (TCG_TARGET_HAS_div_i64) {
5dd48602 2062 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
2063 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
2064 tcg_gen_mul_i64(t0, t0, arg2);
2065 tcg_gen_sub_i64(ret, arg1, t0);
2066 tcg_temp_free_i64(t0);
2067 } else if (TCG_TARGET_HAS_div2_i64) {
5dd48602 2068 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
2069 tcg_gen_sari_i64(t0, arg1, 63);
2070 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
2071 tcg_temp_free_i64(t0);
2072 } else {
2073 gen_helper_rem_i64(ret, arg1, arg2);
2074 }
2075}
2076
2077void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2078{
2079 if (TCG_TARGET_HAS_div_i64) {
2080 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
2081 } else if (TCG_TARGET_HAS_div2_i64) {
5dd48602 2082 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
bfefdbea
RH
2083 TCGv_i64 zero = tcg_constant_i64(0);
2084 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, zero, arg2);
951c6300
RH
2085 tcg_temp_free_i64(t0);
2086 } else {
2087 gen_helper_divu_i64(ret, arg1, arg2);
2088 }
2089}
2090
2091void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2092{
2093 if (TCG_TARGET_HAS_rem_i64) {
2094 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
2095 } else if (TCG_TARGET_HAS_div_i64) {
5dd48602 2096 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
2097 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
2098 tcg_gen_mul_i64(t0, t0, arg2);
2099 tcg_gen_sub_i64(ret, arg1, t0);
2100 tcg_temp_free_i64(t0);
2101 } else if (TCG_TARGET_HAS_div2_i64) {
5dd48602 2102 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
bfefdbea
RH
2103 TCGv_i64 zero = tcg_constant_i64(0);
2104 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, zero, arg2);
951c6300
RH
2105 tcg_temp_free_i64(t0);
2106 } else {
2107 gen_helper_remu_i64(ret, arg1, arg2);
2108 }
2109}
2110
2111void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
2112{
3a13c3f3
RH
2113 if (TCG_TARGET_REG_BITS == 32) {
2114 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2115 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2116 } else if (TCG_TARGET_HAS_ext8s_i64) {
951c6300
RH
2117 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
2118 } else {
2119 tcg_gen_shli_i64(ret, arg, 56);
2120 tcg_gen_sari_i64(ret, ret, 56);
2121 }
951c6300
RH
2122}
2123
2124void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
2125{
3a13c3f3
RH
2126 if (TCG_TARGET_REG_BITS == 32) {
2127 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2128 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2129 } else if (TCG_TARGET_HAS_ext16s_i64) {
951c6300
RH
2130 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
2131 } else {
2132 tcg_gen_shli_i64(ret, arg, 48);
2133 tcg_gen_sari_i64(ret, ret, 48);
2134 }
951c6300
RH
2135}
2136
2137void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
2138{
3a13c3f3
RH
2139 if (TCG_TARGET_REG_BITS == 32) {
2140 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2141 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2142 } else if (TCG_TARGET_HAS_ext32s_i64) {
951c6300
RH
2143 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
2144 } else {
2145 tcg_gen_shli_i64(ret, arg, 32);
2146 tcg_gen_sari_i64(ret, ret, 32);
2147 }
951c6300
RH
2148}
2149
2150void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
2151{
3a13c3f3
RH
2152 if (TCG_TARGET_REG_BITS == 32) {
2153 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2154 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2155 } else if (TCG_TARGET_HAS_ext8u_i64) {
951c6300
RH
2156 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
2157 } else {
2158 tcg_gen_andi_i64(ret, arg, 0xffu);
2159 }
951c6300
RH
2160}
2161
2162void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
2163{
3a13c3f3
RH
2164 if (TCG_TARGET_REG_BITS == 32) {
2165 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2166 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2167 } else if (TCG_TARGET_HAS_ext16u_i64) {
951c6300
RH
2168 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
2169 } else {
2170 tcg_gen_andi_i64(ret, arg, 0xffffu);
2171 }
951c6300
RH
2172}
2173
2174void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
2175{
3a13c3f3
RH
2176 if (TCG_TARGET_REG_BITS == 32) {
2177 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2178 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2179 } else if (TCG_TARGET_HAS_ext32u_i64) {
951c6300
RH
2180 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
2181 } else {
2182 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
2183 }
951c6300
RH
2184}
2185
8b078800
PMD
2186/*
2187 * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
2188 *
2189 * Byte pattern: xxxxxxxxab -> yyyyyyyyba
2190 *
2191 * With TCG_BSWAP_IZ, x == zero, else undefined.
2192 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2193 */
2b836c2a 2194void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
951c6300 2195{
2b836c2a
RH
2196 /* Only one extension flag may be present. */
2197 tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
2198
3a13c3f3 2199 if (TCG_TARGET_REG_BITS == 32) {
2b836c2a
RH
2200 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg), flags);
2201 if (flags & TCG_BSWAP_OS) {
2202 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2203 } else {
2204 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2205 }
3a13c3f3 2206 } else if (TCG_TARGET_HAS_bswap16_i64) {
2b836c2a 2207 tcg_gen_op3i_i64(INDEX_op_bswap16_i64, ret, arg, flags);
951c6300 2208 } else {
5dd48602
RH
2209 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2210 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
951c6300 2211
8b078800
PMD
2212 /* arg = ......ab or xxxxxxab */
2213 tcg_gen_shri_i64(t0, arg, 8); /* t0 = .......a or .xxxxxxa */
2b836c2a 2214 if (!(flags & TCG_BSWAP_IZ)) {
8b078800 2215 tcg_gen_ext8u_i64(t0, t0); /* t0 = .......a */
2b836c2a
RH
2216 }
2217
2218 if (flags & TCG_BSWAP_OS) {
8b078800
PMD
2219 tcg_gen_shli_i64(t1, arg, 56); /* t1 = b....... */
2220 tcg_gen_sari_i64(t1, t1, 48); /* t1 = ssssssb. */
2b836c2a 2221 } else if (flags & TCG_BSWAP_OZ) {
8b078800
PMD
2222 tcg_gen_ext8u_i64(t1, arg); /* t1 = .......b */
2223 tcg_gen_shli_i64(t1, t1, 8); /* t1 = ......b. */
2b836c2a 2224 } else {
8b078800 2225 tcg_gen_shli_i64(t1, arg, 8); /* t1 = xxxxxab. */
2b836c2a
RH
2226 }
2227
8b078800
PMD
2228 tcg_gen_or_i64(ret, t0, t1); /* ret = ......ba (OZ) */
2229 /* ssssssba (OS) */
2230 /* xxxxxaba (no flag) */
951c6300 2231 tcg_temp_free_i64(t0);
2b836c2a 2232 tcg_temp_free_i64(t1);
951c6300 2233 }
951c6300
RH
2234}
2235
9c406215
PMD
2236/*
2237 * bswap32_i64: 32-bit byte swap on the low bits of a 64-bit value.
2238 *
2239 * Byte pattern: xxxxabcd -> yyyydcba
2240 *
2241 * With TCG_BSWAP_IZ, x == zero, else undefined.
2242 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2243 */
2b836c2a 2244void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
951c6300 2245{
2b836c2a
RH
2246 /* Only one extension flag may be present. */
2247 tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
2248
3a13c3f3
RH
2249 if (TCG_TARGET_REG_BITS == 32) {
2250 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2b836c2a
RH
2251 if (flags & TCG_BSWAP_OS) {
2252 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2253 } else {
2254 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2255 }
3a13c3f3 2256 } else if (TCG_TARGET_HAS_bswap32_i64) {
2b836c2a 2257 tcg_gen_op3i_i64(INDEX_op_bswap32_i64, ret, arg, flags);
951c6300 2258 } else {
5dd48602
RH
2259 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2260 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
11d11d61 2261 TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
951c6300 2262
2b836c2a
RH
2263 /* arg = xxxxabcd */
2264 tcg_gen_shri_i64(t0, arg, 8); /* t0 = .xxxxabc */
2265 tcg_gen_and_i64(t1, arg, t2); /* t1 = .....b.d */
2266 tcg_gen_and_i64(t0, t0, t2); /* t0 = .....a.c */
2267 tcg_gen_shli_i64(t1, t1, 8); /* t1 = ....b.d. */
2268 tcg_gen_or_i64(ret, t0, t1); /* ret = ....badc */
2269
2270 tcg_gen_shli_i64(t1, ret, 48); /* t1 = dc...... */
2271 tcg_gen_shri_i64(t0, ret, 16); /* t0 = ......ba */
2272 if (flags & TCG_BSWAP_OS) {
2273 tcg_gen_sari_i64(t1, t1, 32); /* t1 = ssssdc.. */
2274 } else {
2275 tcg_gen_shri_i64(t1, t1, 32); /* t1 = ....dc.. */
2276 }
9c406215
PMD
2277 tcg_gen_or_i64(ret, t0, t1); /* ret = ssssdcba (OS) */
2278 /* ....dcba (else) */
951c6300 2279
951c6300
RH
2280 tcg_temp_free_i64(t0);
2281 tcg_temp_free_i64(t1);
2282 }
951c6300
RH
2283}
2284
95180e75
PMD
2285/*
2286 * bswap64_i64: 64-bit byte swap on a 64-bit value.
2287 *
2288 * Byte pattern: abcdefgh -> hgfedcba
2289 */
951c6300
RH
2290void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
2291{
3a13c3f3
RH
2292 if (TCG_TARGET_REG_BITS == 32) {
2293 TCGv_i32 t0, t1;
5dd48602
RH
2294 t0 = tcg_temp_ebb_new_i32();
2295 t1 = tcg_temp_ebb_new_i32();
951c6300 2296
3a13c3f3
RH
2297 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
2298 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
2299 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
2300 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
2301 tcg_temp_free_i32(t0);
2302 tcg_temp_free_i32(t1);
2303 } else if (TCG_TARGET_HAS_bswap64_i64) {
587195bd 2304 tcg_gen_op3i_i64(INDEX_op_bswap64_i64, ret, arg, 0);
951c6300 2305 } else {
5dd48602
RH
2306 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2307 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2308 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
951c6300 2309
9e821eab
RH
2310 /* arg = abcdefgh */
2311 tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
2312 tcg_gen_shri_i64(t0, arg, 8); /* t0 = .abcdefg */
2313 tcg_gen_and_i64(t1, arg, t2); /* t1 = .b.d.f.h */
2314 tcg_gen_and_i64(t0, t0, t2); /* t0 = .a.c.e.g */
2315 tcg_gen_shli_i64(t1, t1, 8); /* t1 = b.d.f.h. */
2316 tcg_gen_or_i64(ret, t0, t1); /* ret = badcfehg */
2317
2318 tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
2319 tcg_gen_shri_i64(t0, ret, 16); /* t0 = ..badcfe */
2320 tcg_gen_and_i64(t1, ret, t2); /* t1 = ..dc..hg */
2321 tcg_gen_and_i64(t0, t0, t2); /* t0 = ..ba..fe */
2322 tcg_gen_shli_i64(t1, t1, 16); /* t1 = dc..hg.. */
2323 tcg_gen_or_i64(ret, t0, t1); /* ret = dcbahgfe */
2324
2325 tcg_gen_shri_i64(t0, ret, 32); /* t0 = ....dcba */
2326 tcg_gen_shli_i64(t1, ret, 32); /* t1 = hgfe.... */
2327 tcg_gen_or_i64(ret, t0, t1); /* ret = hgfedcba */
951c6300 2328
951c6300
RH
2329 tcg_temp_free_i64(t0);
2330 tcg_temp_free_i64(t1);
9e821eab 2331 tcg_temp_free_i64(t2);
951c6300 2332 }
951c6300
RH
2333}
2334
b8976aa5
PMD
2335/*
2336 * hswap_i64: Swap 16-bit halfwords within a 64-bit value.
2337 * See also include/qemu/bitops.h, hswap64.
2338 *
2339 * Byte pattern: abcdefgh -> ghefcdab
2340 */
46be8425
RH
2341void tcg_gen_hswap_i64(TCGv_i64 ret, TCGv_i64 arg)
2342{
2343 uint64_t m = 0x0000ffff0000ffffull;
5dd48602
RH
2344 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2345 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
46be8425 2346
b8976aa5
PMD
2347 /* arg = abcdefgh */
2348 tcg_gen_rotli_i64(t1, arg, 32); /* t1 = efghabcd */
2349 tcg_gen_andi_i64(t0, t1, m); /* t0 = ..gh..cd */
2350 tcg_gen_shli_i64(t0, t0, 16); /* t0 = gh..cd.. */
2351 tcg_gen_shri_i64(t1, t1, 16); /* t1 = ..efghab */
2352 tcg_gen_andi_i64(t1, t1, m); /* t1 = ..ef..ab */
2353 tcg_gen_or_i64(ret, t0, t1); /* ret = ghefcdab */
46be8425
RH
2354
2355 tcg_temp_free_i64(t0);
2356 tcg_temp_free_i64(t1);
2357}
2358
ad262fb5
PMD
2359/*
2360 * wswap_i64: Swap 32-bit words within a 64-bit value.
2361 *
2362 * Byte pattern: abcdefgh -> efghabcd
2363 */
46be8425
RH
2364void tcg_gen_wswap_i64(TCGv_i64 ret, TCGv_i64 arg)
2365{
2366 /* Swapping 2 32-bit elements is a rotate. */
2367 tcg_gen_rotli_i64(ret, arg, 32);
2368}
2369
951c6300
RH
2370void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
2371{
3a13c3f3
RH
2372 if (TCG_TARGET_REG_BITS == 32) {
2373 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2374 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
2375 } else if (TCG_TARGET_HAS_not_i64) {
951c6300
RH
2376 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
2377 } else {
2378 tcg_gen_xori_i64(ret, arg, -1);
2379 }
951c6300
RH
2380}
2381
2382void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2383{
3a13c3f3
RH
2384 if (TCG_TARGET_REG_BITS == 32) {
2385 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2386 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2387 } else if (TCG_TARGET_HAS_andc_i64) {
951c6300
RH
2388 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
2389 } else {
5dd48602 2390 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
2391 tcg_gen_not_i64(t0, arg2);
2392 tcg_gen_and_i64(ret, arg1, t0);
2393 tcg_temp_free_i64(t0);
2394 }
951c6300
RH
2395}
2396
2397void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2398{
3a13c3f3
RH
2399 if (TCG_TARGET_REG_BITS == 32) {
2400 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2401 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2402 } else if (TCG_TARGET_HAS_eqv_i64) {
951c6300
RH
2403 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
2404 } else {
2405 tcg_gen_xor_i64(ret, arg1, arg2);
2406 tcg_gen_not_i64(ret, ret);
2407 }
951c6300
RH
2408}
2409
2410void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2411{
3a13c3f3
RH
2412 if (TCG_TARGET_REG_BITS == 32) {
2413 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2414 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2415 } else if (TCG_TARGET_HAS_nand_i64) {
951c6300
RH
2416 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
2417 } else {
2418 tcg_gen_and_i64(ret, arg1, arg2);
2419 tcg_gen_not_i64(ret, ret);
2420 }
951c6300
RH
2421}
2422
2423void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2424{
3a13c3f3
RH
2425 if (TCG_TARGET_REG_BITS == 32) {
2426 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2427 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2428 } else if (TCG_TARGET_HAS_nor_i64) {
951c6300
RH
2429 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
2430 } else {
2431 tcg_gen_or_i64(ret, arg1, arg2);
2432 tcg_gen_not_i64(ret, ret);
2433 }
951c6300
RH
2434}
2435
2436void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2437{
3a13c3f3
RH
2438 if (TCG_TARGET_REG_BITS == 32) {
2439 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2440 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2441 } else if (TCG_TARGET_HAS_orc_i64) {
951c6300
RH
2442 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
2443 } else {
5dd48602 2444 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
2445 tcg_gen_not_i64(t0, arg2);
2446 tcg_gen_or_i64(ret, arg1, t0);
2447 tcg_temp_free_i64(t0);
2448 }
951c6300
RH
2449}
2450
0e28d006
RH
2451void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2452{
2453 if (TCG_TARGET_HAS_clz_i64) {
2454 tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
2455 } else {
2456 gen_helper_clz_i64(ret, arg1, arg2);
2457 }
2458}
2459
2460void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2461{
2462 if (TCG_TARGET_REG_BITS == 32
2463 && TCG_TARGET_HAS_clz_i32
2464 && arg2 <= 0xffffffffu) {
5dd48602 2465 TCGv_i32 t = tcg_temp_ebb_new_i32();
11d11d61 2466 tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
0e28d006
RH
2467 tcg_gen_addi_i32(t, t, 32);
2468 tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
2469 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2470 tcg_temp_free_i32(t);
2471 } else {
f04de891 2472 tcg_gen_clz_i64(ret, arg1, tcg_constant_i64(arg2));
0e28d006
RH
2473 }
2474}
2475
2476void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2477{
2478 if (TCG_TARGET_HAS_ctz_i64) {
2479 tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
14e99210 2480 } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
5dd48602 2481 TCGv_i64 z, t = tcg_temp_ebb_new_i64();
14e99210
RH
2482
2483 if (TCG_TARGET_HAS_ctpop_i64) {
2484 tcg_gen_subi_i64(t, arg1, 1);
2485 tcg_gen_andc_i64(t, t, arg1);
2486 tcg_gen_ctpop_i64(t, t);
2487 } else {
2488 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
2489 tcg_gen_neg_i64(t, arg1);
2490 tcg_gen_and_i64(t, t, arg1);
2491 tcg_gen_clzi_i64(t, t, 64);
2492 tcg_gen_xori_i64(t, t, 63);
2493 }
11d11d61 2494 z = tcg_constant_i64(0);
14e99210
RH
2495 tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
2496 tcg_temp_free_i64(t);
2497 tcg_temp_free_i64(z);
0e28d006
RH
2498 } else {
2499 gen_helper_ctz_i64(ret, arg1, arg2);
2500 }
2501}
2502
2503void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2504{
2505 if (TCG_TARGET_REG_BITS == 32
2506 && TCG_TARGET_HAS_ctz_i32
2507 && arg2 <= 0xffffffffu) {
5dd48602 2508 TCGv_i32 t32 = tcg_temp_ebb_new_i32();
11d11d61 2509 tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
0e28d006
RH
2510 tcg_gen_addi_i32(t32, t32, 32);
2511 tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
2512 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2513 tcg_temp_free_i32(t32);
14e99210
RH
2514 } else if (!TCG_TARGET_HAS_ctz_i64
2515 && TCG_TARGET_HAS_ctpop_i64
2516 && arg2 == 64) {
2517 /* This equivalence has the advantage of not requiring a fixup. */
5dd48602 2518 TCGv_i64 t = tcg_temp_ebb_new_i64();
14e99210
RH
2519 tcg_gen_subi_i64(t, arg1, 1);
2520 tcg_gen_andc_i64(t, t, arg1);
2521 tcg_gen_ctpop_i64(ret, t);
2522 tcg_temp_free_i64(t);
0e28d006 2523 } else {
f04de891 2524 tcg_gen_ctz_i64(ret, arg1, tcg_constant_i64(arg2));
0e28d006
RH
2525 }
2526}
2527
086920c2
RH
2528void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
2529{
2530 if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
5dd48602 2531 TCGv_i64 t = tcg_temp_ebb_new_i64();
086920c2
RH
2532 tcg_gen_sari_i64(t, arg, 63);
2533 tcg_gen_xor_i64(t, t, arg);
2534 tcg_gen_clzi_i64(t, t, 64);
2535 tcg_gen_subi_i64(ret, t, 1);
2536 tcg_temp_free_i64(t);
2537 } else {
2538 gen_helper_clrsb_i64(ret, arg);
2539 }
2540}
2541
a768e4e9
RH
2542void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
2543{
2544 if (TCG_TARGET_HAS_ctpop_i64) {
2545 tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
2546 } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
2547 tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2548 tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2549 tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
2550 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2551 } else {
2552 gen_helper_ctpop_i64(ret, arg1);
2553 }
2554}
2555
951c6300
RH
2556void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2557{
2558 if (TCG_TARGET_HAS_rot_i64) {
2559 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
2560 } else {
2561 TCGv_i64 t0, t1;
5dd48602
RH
2562 t0 = tcg_temp_ebb_new_i64();
2563 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
2564 tcg_gen_shl_i64(t0, arg1, arg2);
2565 tcg_gen_subfi_i64(t1, 64, arg2);
2566 tcg_gen_shr_i64(t1, arg1, t1);
2567 tcg_gen_or_i64(ret, t0, t1);
2568 tcg_temp_free_i64(t0);
2569 tcg_temp_free_i64(t1);
2570 }
2571}
2572
07dada03 2573void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
951c6300 2574{
07dada03 2575 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
951c6300
RH
2576 /* some cases can be optimized here */
2577 if (arg2 == 0) {
2578 tcg_gen_mov_i64(ret, arg1);
2579 } else if (TCG_TARGET_HAS_rot_i64) {
11d11d61 2580 tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
951c6300
RH
2581 } else {
2582 TCGv_i64 t0, t1;
5dd48602
RH
2583 t0 = tcg_temp_ebb_new_i64();
2584 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
2585 tcg_gen_shli_i64(t0, arg1, arg2);
2586 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2587 tcg_gen_or_i64(ret, t0, t1);
2588 tcg_temp_free_i64(t0);
2589 tcg_temp_free_i64(t1);
2590 }
2591}
2592
2593void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2594{
2595 if (TCG_TARGET_HAS_rot_i64) {
2596 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2597 } else {
2598 TCGv_i64 t0, t1;
5dd48602
RH
2599 t0 = tcg_temp_ebb_new_i64();
2600 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
2601 tcg_gen_shr_i64(t0, arg1, arg2);
2602 tcg_gen_subfi_i64(t1, 64, arg2);
2603 tcg_gen_shl_i64(t1, arg1, t1);
2604 tcg_gen_or_i64(ret, t0, t1);
2605 tcg_temp_free_i64(t0);
2606 tcg_temp_free_i64(t1);
2607 }
2608}
2609
07dada03 2610void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
951c6300 2611{
07dada03 2612 tcg_debug_assert(arg2 >= 0 && arg2 < 64);
951c6300
RH
2613 /* some cases can be optimized here */
2614 if (arg2 == 0) {
2615 tcg_gen_mov_i64(ret, arg1);
2616 } else {
2617 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2618 }
2619}
2620
2621void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2622 unsigned int ofs, unsigned int len)
2623{
2624 uint64_t mask;
2625 TCGv_i64 t1;
2626
2627 tcg_debug_assert(ofs < 64);
0d0d309d 2628 tcg_debug_assert(len > 0);
951c6300
RH
2629 tcg_debug_assert(len <= 64);
2630 tcg_debug_assert(ofs + len <= 64);
2631
0d0d309d 2632 if (len == 64) {
951c6300
RH
2633 tcg_gen_mov_i64(ret, arg2);
2634 return;
2635 }
2636 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2637 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2638 return;
2639 }
2640
3a13c3f3
RH
2641 if (TCG_TARGET_REG_BITS == 32) {
2642 if (ofs >= 32) {
2643 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2644 TCGV_LOW(arg2), ofs - 32, len);
2645 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2646 return;
2647 }
2648 if (ofs + len <= 32) {
2649 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2650 TCGV_LOW(arg2), ofs, len);
2651 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2652 return;
2653 }
951c6300 2654 }
951c6300 2655
5dd48602 2656 t1 = tcg_temp_ebb_new_i64();
951c6300 2657
b0a60567
RH
2658 if (TCG_TARGET_HAS_extract2_i64) {
2659 if (ofs + len == 64) {
2660 tcg_gen_shli_i64(t1, arg1, len);
2661 tcg_gen_extract2_i64(ret, t1, arg2, len);
2662 goto done;
2663 }
2664 if (ofs == 0) {
2665 tcg_gen_extract2_i64(ret, arg1, arg2, len);
2666 tcg_gen_rotli_i64(ret, ret, len);
2667 goto done;
2668 }
2669 }
2670
2671 mask = (1ull << len) - 1;
951c6300
RH
2672 if (ofs + len < 64) {
2673 tcg_gen_andi_i64(t1, arg2, mask);
2674 tcg_gen_shli_i64(t1, t1, ofs);
2675 } else {
2676 tcg_gen_shli_i64(t1, arg2, ofs);
2677 }
2678 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2679 tcg_gen_or_i64(ret, ret, t1);
b0a60567 2680 done:
951c6300
RH
2681 tcg_temp_free_i64(t1);
2682}
2683
07cc68d5
RH
2684void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2685 unsigned int ofs, unsigned int len)
2686{
2687 tcg_debug_assert(ofs < 64);
2688 tcg_debug_assert(len > 0);
2689 tcg_debug_assert(len <= 64);
2690 tcg_debug_assert(ofs + len <= 64);
2691
2692 if (ofs + len == 64) {
2693 tcg_gen_shli_i64(ret, arg, ofs);
2694 } else if (ofs == 0) {
2695 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2696 } else if (TCG_TARGET_HAS_deposit_i64
2697 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
11d11d61 2698 TCGv_i64 zero = tcg_constant_i64(0);
07cc68d5 2699 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
07cc68d5
RH
2700 } else {
2701 if (TCG_TARGET_REG_BITS == 32) {
2702 if (ofs >= 32) {
2703 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2704 ofs - 32, len);
2705 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2706 return;
2707 }
2708 if (ofs + len <= 32) {
2709 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2710 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2711 return;
2712 }
2713 }
2714 /* To help two-operand hosts we prefer to zero-extend first,
2715 which allows ARG to stay live. */
2716 switch (len) {
2717 case 32:
2718 if (TCG_TARGET_HAS_ext32u_i64) {
2719 tcg_gen_ext32u_i64(ret, arg);
2720 tcg_gen_shli_i64(ret, ret, ofs);
2721 return;
2722 }
2723 break;
2724 case 16:
2725 if (TCG_TARGET_HAS_ext16u_i64) {
2726 tcg_gen_ext16u_i64(ret, arg);
2727 tcg_gen_shli_i64(ret, ret, ofs);
2728 return;
2729 }
2730 break;
2731 case 8:
2732 if (TCG_TARGET_HAS_ext8u_i64) {
2733 tcg_gen_ext8u_i64(ret, arg);
2734 tcg_gen_shli_i64(ret, ret, ofs);
2735 return;
2736 }
2737 break;
2738 }
2739 /* Otherwise prefer zero-extension over AND for code size. */
2740 switch (ofs + len) {
2741 case 32:
2742 if (TCG_TARGET_HAS_ext32u_i64) {
2743 tcg_gen_shli_i64(ret, arg, ofs);
2744 tcg_gen_ext32u_i64(ret, ret);
2745 return;
2746 }
2747 break;
2748 case 16:
2749 if (TCG_TARGET_HAS_ext16u_i64) {
2750 tcg_gen_shli_i64(ret, arg, ofs);
2751 tcg_gen_ext16u_i64(ret, ret);
2752 return;
2753 }
2754 break;
2755 case 8:
2756 if (TCG_TARGET_HAS_ext8u_i64) {
2757 tcg_gen_shli_i64(ret, arg, ofs);
2758 tcg_gen_ext8u_i64(ret, ret);
2759 return;
2760 }
2761 break;
2762 }
2763 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2764 tcg_gen_shli_i64(ret, ret, ofs);
2765 }
2766}
2767
7ec8bab3
RH
2768void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2769 unsigned int ofs, unsigned int len)
2770{
2771 tcg_debug_assert(ofs < 64);
2772 tcg_debug_assert(len > 0);
2773 tcg_debug_assert(len <= 64);
2774 tcg_debug_assert(ofs + len <= 64);
2775
2776 /* Canonicalize certain special cases, even if extract is supported. */
2777 if (ofs + len == 64) {
2778 tcg_gen_shri_i64(ret, arg, 64 - len);
2779 return;
2780 }
2781 if (ofs == 0) {
2782 tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2783 return;
2784 }
2785
2786 if (TCG_TARGET_REG_BITS == 32) {
2787 /* Look for a 32-bit extract within one of the two words. */
2788 if (ofs >= 32) {
2789 tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2790 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2791 return;
2792 }
2793 if (ofs + len <= 32) {
2794 tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2795 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2796 return;
2797 }
2798 /* The field is split across two words. One double-word
2799 shift is better than two double-word shifts. */
2800 goto do_shift_and;
2801 }
2802
2803 if (TCG_TARGET_HAS_extract_i64
2804 && TCG_TARGET_extract_i64_valid(ofs, len)) {
2805 tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2806 return;
2807 }
2808
2809 /* Assume that zero-extension, if available, is cheaper than a shift. */
2810 switch (ofs + len) {
2811 case 32:
2812 if (TCG_TARGET_HAS_ext32u_i64) {
2813 tcg_gen_ext32u_i64(ret, arg);
2814 tcg_gen_shri_i64(ret, ret, ofs);
2815 return;
2816 }
2817 break;
2818 case 16:
2819 if (TCG_TARGET_HAS_ext16u_i64) {
2820 tcg_gen_ext16u_i64(ret, arg);
2821 tcg_gen_shri_i64(ret, ret, ofs);
2822 return;
2823 }
2824 break;
2825 case 8:
2826 if (TCG_TARGET_HAS_ext8u_i64) {
2827 tcg_gen_ext8u_i64(ret, arg);
2828 tcg_gen_shri_i64(ret, ret, ofs);
2829 return;
2830 }
2831 break;
2832 }
2833
2834 /* ??? Ideally we'd know what values are available for immediate AND.
2835 Assume that 8 bits are available, plus the special cases of 16 and 32,
2836 so that we get ext8u, ext16u, and ext32u. */
2837 switch (len) {
2838 case 1 ... 8: case 16: case 32:
2839 do_shift_and:
2840 tcg_gen_shri_i64(ret, arg, ofs);
2841 tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2842 break;
2843 default:
2844 tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2845 tcg_gen_shri_i64(ret, ret, 64 - len);
2846 break;
2847 }
2848}
2849
2850void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2851 unsigned int ofs, unsigned int len)
2852{
2853 tcg_debug_assert(ofs < 64);
2854 tcg_debug_assert(len > 0);
2855 tcg_debug_assert(len <= 64);
2856 tcg_debug_assert(ofs + len <= 64);
2857
2858 /* Canonicalize certain special cases, even if sextract is supported. */
2859 if (ofs + len == 64) {
2860 tcg_gen_sari_i64(ret, arg, 64 - len);
2861 return;
2862 }
2863 if (ofs == 0) {
2864 switch (len) {
2865 case 32:
2866 tcg_gen_ext32s_i64(ret, arg);
2867 return;
2868 case 16:
2869 tcg_gen_ext16s_i64(ret, arg);
2870 return;
2871 case 8:
2872 tcg_gen_ext8s_i64(ret, arg);
2873 return;
2874 }
2875 }
2876
2877 if (TCG_TARGET_REG_BITS == 32) {
2878 /* Look for a 32-bit extract within one of the two words. */
2879 if (ofs >= 32) {
2880 tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2881 } else if (ofs + len <= 32) {
2882 tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2883 } else if (ofs == 0) {
2884 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2885 tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2886 return;
2887 } else if (len > 32) {
5dd48602 2888 TCGv_i32 t = tcg_temp_ebb_new_i32();
7ec8bab3
RH
2889 /* Extract the bits for the high word normally. */
2890 tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2891 /* Shift the field down for the low part. */
2892 tcg_gen_shri_i64(ret, arg, ofs);
2893 /* Overwrite the shift into the high part. */
2894 tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2895 tcg_temp_free_i32(t);
2896 return;
2897 } else {
2898 /* Shift the field down for the low part, such that the
2899 field sits at the MSB. */
2900 tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2901 /* Shift the field down from the MSB, sign extending. */
2902 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2903 }
2904 /* Sign-extend the field from 32 bits. */
2905 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2906 return;
2907 }
2908
2909 if (TCG_TARGET_HAS_sextract_i64
2910 && TCG_TARGET_extract_i64_valid(ofs, len)) {
2911 tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2912 return;
2913 }
2914
2915 /* Assume that sign-extension, if available, is cheaper than a shift. */
2916 switch (ofs + len) {
2917 case 32:
2918 if (TCG_TARGET_HAS_ext32s_i64) {
2919 tcg_gen_ext32s_i64(ret, arg);
2920 tcg_gen_sari_i64(ret, ret, ofs);
2921 return;
2922 }
2923 break;
2924 case 16:
2925 if (TCG_TARGET_HAS_ext16s_i64) {
2926 tcg_gen_ext16s_i64(ret, arg);
2927 tcg_gen_sari_i64(ret, ret, ofs);
2928 return;
2929 }
2930 break;
2931 case 8:
2932 if (TCG_TARGET_HAS_ext8s_i64) {
2933 tcg_gen_ext8s_i64(ret, arg);
2934 tcg_gen_sari_i64(ret, ret, ofs);
2935 return;
2936 }
2937 break;
2938 }
2939 switch (len) {
2940 case 32:
2941 if (TCG_TARGET_HAS_ext32s_i64) {
2942 tcg_gen_shri_i64(ret, arg, ofs);
2943 tcg_gen_ext32s_i64(ret, ret);
2944 return;
2945 }
2946 break;
2947 case 16:
2948 if (TCG_TARGET_HAS_ext16s_i64) {
2949 tcg_gen_shri_i64(ret, arg, ofs);
2950 tcg_gen_ext16s_i64(ret, ret);
2951 return;
2952 }
2953 break;
2954 case 8:
2955 if (TCG_TARGET_HAS_ext8s_i64) {
2956 tcg_gen_shri_i64(ret, arg, ofs);
2957 tcg_gen_ext8s_i64(ret, ret);
2958 return;
2959 }
2960 break;
2961 }
2962 tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2963 tcg_gen_sari_i64(ret, ret, 64 - len);
2964}
2965
2089fcc9
DH
2966/*
2967 * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2968 * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2969 */
2970void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2971 unsigned int ofs)
2972{
2973 tcg_debug_assert(ofs <= 64);
2974 if (ofs == 0) {
2975 tcg_gen_mov_i64(ret, al);
2976 } else if (ofs == 64) {
2977 tcg_gen_mov_i64(ret, ah);
2978 } else if (al == ah) {
2979 tcg_gen_rotri_i64(ret, al, ofs);
fce1296f
RH
2980 } else if (TCG_TARGET_HAS_extract2_i64) {
2981 tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2089fcc9 2982 } else {
5dd48602 2983 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2089fcc9
DH
2984 tcg_gen_shri_i64(t0, al, ofs);
2985 tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2986 tcg_temp_free_i64(t0);
2987 }
2988}
2989
951c6300
RH
2990void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2991 TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2992{
37ed3bf1
RH
2993 if (cond == TCG_COND_ALWAYS) {
2994 tcg_gen_mov_i64(ret, v1);
2995 } else if (cond == TCG_COND_NEVER) {
2996 tcg_gen_mov_i64(ret, v2);
3871be75
RH
2997 } else if (TCG_TARGET_REG_BITS == 64) {
2998 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2999 } else {
5dd48602 3000 TCGv_i32 t0 = tcg_temp_ebb_new_i32();
3871be75
RH
3001 TCGv_i32 zero = tcg_constant_i32(0);
3002
3a13c3f3
RH
3003 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
3004 TCGV_LOW(c1), TCGV_HIGH(c1),
3005 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
3006
3871be75
RH
3007 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, zero,
3008 TCGV_LOW(v1), TCGV_LOW(v2));
3009 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, zero,
3010 TCGV_HIGH(v1), TCGV_HIGH(v2));
951c6300 3011
3a13c3f3 3012 tcg_temp_free_i32(t0);
951c6300 3013 }
951c6300
RH
3014}
3015
3016void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
3017 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
3018{
3019 if (TCG_TARGET_HAS_add2_i64) {
3020 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
951c6300 3021 } else {
5dd48602
RH
3022 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3023 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
3024 tcg_gen_add_i64(t0, al, bl);
3025 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
3026 tcg_gen_add_i64(rh, ah, bh);
3027 tcg_gen_add_i64(rh, rh, t1);
3028 tcg_gen_mov_i64(rl, t0);
3029 tcg_temp_free_i64(t0);
3030 tcg_temp_free_i64(t1);
3031 }
3032}
3033
3034void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
3035 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
3036{
3037 if (TCG_TARGET_HAS_sub2_i64) {
3038 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
951c6300 3039 } else {
5dd48602
RH
3040 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3041 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
951c6300
RH
3042 tcg_gen_sub_i64(t0, al, bl);
3043 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
3044 tcg_gen_sub_i64(rh, ah, bh);
3045 tcg_gen_sub_i64(rh, rh, t1);
3046 tcg_gen_mov_i64(rl, t0);
3047 tcg_temp_free_i64(t0);
3048 tcg_temp_free_i64(t1);
3049 }
3050}
3051
3052void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3053{
3054 if (TCG_TARGET_HAS_mulu2_i64) {
3055 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
951c6300 3056 } else if (TCG_TARGET_HAS_muluh_i64) {
5dd48602 3057 TCGv_i64 t = tcg_temp_ebb_new_i64();
951c6300
RH
3058 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
3059 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
3060 tcg_gen_mov_i64(rl, t);
3061 tcg_temp_free_i64(t);
3062 } else {
5dd48602 3063 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
3064 tcg_gen_mul_i64(t0, arg1, arg2);
3065 gen_helper_muluh_i64(rh, arg1, arg2);
3066 tcg_gen_mov_i64(rl, t0);
3067 tcg_temp_free_i64(t0);
3068 }
3069}
3070
3071void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3072{
3073 if (TCG_TARGET_HAS_muls2_i64) {
3074 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
951c6300 3075 } else if (TCG_TARGET_HAS_mulsh_i64) {
5dd48602 3076 TCGv_i64 t = tcg_temp_ebb_new_i64();
951c6300
RH
3077 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
3078 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
3079 tcg_gen_mov_i64(rl, t);
3080 tcg_temp_free_i64(t);
3081 } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
5dd48602
RH
3082 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3083 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3084 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
3085 TCGv_i64 t3 = tcg_temp_ebb_new_i64();
951c6300
RH
3086 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
3087 /* Adjust for negative inputs. */
3088 tcg_gen_sari_i64(t2, arg1, 63);
3089 tcg_gen_sari_i64(t3, arg2, 63);
3090 tcg_gen_and_i64(t2, t2, arg2);
3091 tcg_gen_and_i64(t3, t3, arg1);
3092 tcg_gen_sub_i64(rh, t1, t2);
3093 tcg_gen_sub_i64(rh, rh, t3);
3094 tcg_gen_mov_i64(rl, t0);
3095 tcg_temp_free_i64(t0);
3096 tcg_temp_free_i64(t1);
3097 tcg_temp_free_i64(t2);
3098 tcg_temp_free_i64(t3);
3099 } else {
5dd48602 3100 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
951c6300
RH
3101 tcg_gen_mul_i64(t0, arg1, arg2);
3102 gen_helper_mulsh_i64(rh, arg1, arg2);
3103 tcg_gen_mov_i64(rl, t0);
3104 tcg_temp_free_i64(t0);
3105 }
3106}
3107
5087abfb
RH
3108void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3109{
5dd48602
RH
3110 TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3111 TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3112 TCGv_i64 t2 = tcg_temp_ebb_new_i64();
5087abfb
RH
3113 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
3114 /* Adjust for negative input for the signed arg1. */
3115 tcg_gen_sari_i64(t2, arg1, 63);
3116 tcg_gen_and_i64(t2, t2, arg2);
3117 tcg_gen_sub_i64(rh, t1, t2);
3118 tcg_gen_mov_i64(rl, t0);
3119 tcg_temp_free_i64(t0);
3120 tcg_temp_free_i64(t1);
3121 tcg_temp_free_i64(t2);
3122}
3123
b87fb8cd
RH
3124void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3125{
3126 tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
3127}
3128
3129void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3130{
3131 tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
3132}
3133
3134void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3135{
3136 tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
3137}
3138
3139void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3140{
3141 tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
3142}
3143
ff1f11f7
RH
3144void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
3145{
5dd48602 3146 TCGv_i64 t = tcg_temp_ebb_new_i64();
ff1f11f7
RH
3147
3148 tcg_gen_sari_i64(t, a, 63);
3149 tcg_gen_xor_i64(ret, a, t);
3150 tcg_gen_sub_i64(ret, ret, t);
3151 tcg_temp_free_i64(t);
3152}
3153
951c6300
RH
3154/* Size changing operations. */
3155
609ad705 3156void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
951c6300 3157{
3a13c3f3 3158 if (TCG_TARGET_REG_BITS == 32) {
609ad705 3159 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
13d885b0 3160 } else if (TCG_TARGET_HAS_extr_i64_i32) {
b7e8b17a 3161 tcg_gen_op2(INDEX_op_extrl_i64_i32,
ae8b75dc 3162 tcgv_i32_arg(ret), tcgv_i64_arg(arg));
609ad705 3163 } else {
dc41aa7d 3164 tcg_gen_mov_i32(ret, (TCGv_i32)arg);
609ad705
RH
3165 }
3166}
3167
3168void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
3169{
3170 if (TCG_TARGET_REG_BITS == 32) {
3171 tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
13d885b0 3172 } else if (TCG_TARGET_HAS_extr_i64_i32) {
b7e8b17a 3173 tcg_gen_op2(INDEX_op_extrh_i64_i32,
ae8b75dc 3174 tcgv_i32_arg(ret), tcgv_i64_arg(arg));
951c6300 3175 } else {
5dd48602 3176 TCGv_i64 t = tcg_temp_ebb_new_i64();
609ad705 3177 tcg_gen_shri_i64(t, arg, 32);
dc41aa7d 3178 tcg_gen_mov_i32(ret, (TCGv_i32)t);
951c6300
RH
3179 tcg_temp_free_i64(t);
3180 }
951c6300
RH
3181}
3182
3183void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
3184{
3a13c3f3
RH
3185 if (TCG_TARGET_REG_BITS == 32) {
3186 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
3187 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
3188 } else {
b7e8b17a 3189 tcg_gen_op2(INDEX_op_extu_i32_i64,
ae8b75dc 3190 tcgv_i64_arg(ret), tcgv_i32_arg(arg));
3a13c3f3 3191 }
951c6300
RH
3192}
3193
3194void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
3195{
3a13c3f3
RH
3196 if (TCG_TARGET_REG_BITS == 32) {
3197 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
3198 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
3199 } else {
b7e8b17a 3200 tcg_gen_op2(INDEX_op_ext_i32_i64,
ae8b75dc 3201 tcgv_i64_arg(ret), tcgv_i32_arg(arg));
3a13c3f3 3202 }
951c6300
RH
3203}
3204
3205void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
3206{
3a13c3f3
RH
3207 TCGv_i64 tmp;
3208
3209 if (TCG_TARGET_REG_BITS == 32) {
3210 tcg_gen_mov_i32(TCGV_LOW(dest), low);
3211 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
3212 return;
3213 }
3214
5dd48602 3215 tmp = tcg_temp_ebb_new_i64();
951c6300
RH
3216 /* These extensions are only needed for type correctness.
3217 We may be able to do better given target specific information. */
3218 tcg_gen_extu_i32_i64(tmp, high);
3219 tcg_gen_extu_i32_i64(dest, low);
3220 /* If deposit is available, use it. Otherwise use the extra
3221 knowledge that we have of the zero-extensions above. */
3222 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
3223 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
3224 } else {
3225 tcg_gen_shli_i64(tmp, tmp, 32);
3226 tcg_gen_or_i64(dest, dest, tmp);
3227 }
3228 tcg_temp_free_i64(tmp);
951c6300
RH
3229}
3230
3231void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
3232{
3a13c3f3
RH
3233 if (TCG_TARGET_REG_BITS == 32) {
3234 tcg_gen_mov_i32(lo, TCGV_LOW(arg));
3235 tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
3236 } else {
609ad705
RH
3237 tcg_gen_extrl_i64_i32(lo, arg);
3238 tcg_gen_extrh_i64_i32(hi, arg);
3a13c3f3 3239 }
951c6300
RH
3240}
3241
3242void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
3243{
3244 tcg_gen_ext32u_i64(lo, arg);
3245 tcg_gen_shri_i64(hi, arg, 32);
3246}
3247
e0de2f55
RH
3248void tcg_gen_concat32_i64(TCGv_i64 ret, TCGv_i64 lo, TCGv_i64 hi)
3249{
3250 tcg_gen_deposit_i64(ret, lo, hi, 32, 32);
3251}
3252
4771e71c
RH
3253void tcg_gen_extr_i128_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i128 arg)
3254{
3255 tcg_gen_mov_i64(lo, TCGV128_LOW(arg));
3256 tcg_gen_mov_i64(hi, TCGV128_HIGH(arg));
3257}
3258
3259void tcg_gen_concat_i64_i128(TCGv_i128 ret, TCGv_i64 lo, TCGv_i64 hi)
3260{
3261 tcg_gen_mov_i64(TCGV128_LOW(ret), lo);
3262 tcg_gen_mov_i64(TCGV128_HIGH(ret), hi);
3263}
3264
3265void tcg_gen_mov_i128(TCGv_i128 dst, TCGv_i128 src)
3266{
3267 if (dst != src) {
3268 tcg_gen_mov_i64(TCGV128_LOW(dst), TCGV128_LOW(src));
3269 tcg_gen_mov_i64(TCGV128_HIGH(dst), TCGV128_HIGH(src));
3270 }
3271}
3272
a01d9792
RH
3273void tcg_gen_ld_i128(TCGv_i128 ret, TCGv_ptr base, tcg_target_long offset)
3274{
3275 if (HOST_BIG_ENDIAN) {
3276 tcg_gen_ld_i64(TCGV128_HIGH(ret), base, offset);
3277 tcg_gen_ld_i64(TCGV128_LOW(ret), base, offset + 8);
3278 } else {
3279 tcg_gen_ld_i64(TCGV128_LOW(ret), base, offset);
3280 tcg_gen_ld_i64(TCGV128_HIGH(ret), base, offset + 8);
3281 }
3282}
3283
3284void tcg_gen_st_i128(TCGv_i128 val, TCGv_ptr base, tcg_target_long offset)
3285{
3286 if (HOST_BIG_ENDIAN) {
3287 tcg_gen_st_i64(TCGV128_HIGH(val), base, offset);
3288 tcg_gen_st_i64(TCGV128_LOW(val), base, offset + 8);
3289 } else {
3290 tcg_gen_st_i64(TCGV128_LOW(val), base, offset);
3291 tcg_gen_st_i64(TCGV128_HIGH(val), base, offset + 8);
3292 }
3293}
3294
951c6300
RH
3295/* QEMU specific operations. */
3296
d9971435 3297void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
07ea28b4 3298{
eba40358
RH
3299 /*
3300 * Let the jit code return the read-only version of the
3301 * TranslationBlock, so that we minimize the pc-relative
3302 * distance of the address of the exit_tb code to TB.
3303 * This will improve utilization of pc-relative address loads.
3304 *
3305 * TODO: Move this to translator_loop, so that all const
3306 * TranslationBlock pointers refer to read-only memory.
3307 * This requires coordination with targets that do not use
3308 * the translator_loop.
3309 */
3310 uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
07ea28b4
RH
3311
3312 if (tb == NULL) {
3313 tcg_debug_assert(idx == 0);
3314 } else if (idx <= TB_EXIT_IDXMAX) {
3315#ifdef CONFIG_DEBUG_TCG
3316 /* This is an exit following a goto_tb. Verify that we have
3317 seen this numbered exit before, via tcg_gen_goto_tb. */
3318 tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
3319#endif
3320 } else {
3321 /* This is an exit via the exitreq label. */
3322 tcg_debug_assert(idx == TB_EXIT_REQUESTED);
3323 }
3324
3325 tcg_gen_op1i(INDEX_op_exit_tb, val);
3326}
3327
951c6300
RH
3328void tcg_gen_goto_tb(unsigned idx)
3329{
84f15616 3330 /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
b7e4afbd 3331 tcg_debug_assert(!(tcg_ctx->gen_tb->cflags & CF_NO_GOTO_TB));
951c6300 3332 /* We only support two chained exits. */
07ea28b4 3333 tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
951c6300 3334#ifdef CONFIG_DEBUG_TCG
a4761232 3335 /* Verify that we haven't seen this numbered exit before. */
b1311c4a
EC
3336 tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
3337 tcg_ctx->goto_tb_issue_mask |= 1 << idx;
951c6300 3338#endif
e6d86bed 3339 plugin_gen_disable_mem_helpers();
84f15616 3340 tcg_gen_op1i(INDEX_op_goto_tb, idx);
951c6300
RH
3341}
3342
7f11636d 3343void tcg_gen_lookup_and_goto_ptr(void)
cedbcb01 3344{
84f15616 3345 TCGv_ptr ptr;
e6d86bed 3346
b7e4afbd 3347 if (tcg_ctx->gen_tb->cflags & CF_NO_GOTO_PTR) {
07ea28b4 3348 tcg_gen_exit_tb(NULL, 0);
84f15616 3349 return;
cedbcb01 3350 }
84f15616
RH
3351
3352 plugin_gen_disable_mem_helpers();
5dd48602 3353 ptr = tcg_temp_ebb_new_ptr();
ad75a51e 3354 gen_helper_lookup_tb_ptr(ptr, tcg_env);
84f15616
RH
3355 tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
3356 tcg_temp_free_ptr(ptr);
cedbcb01 3357}