]> git.proxmox.com Git - mirror_qemu.git/blame - tcg/tcg-op.c
tcg-mips: Support 64-bit opcodes
[mirror_qemu.git] / tcg / tcg-op.c
CommitLineData
951c6300
RH
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2008 Fabrice Bellard
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
757e725b 25#include "qemu/osdep.h"
33c11879
PB
26#include "qemu-common.h"
27#include "cpu.h"
63c91552 28#include "exec/exec-all.h"
951c6300
RH
29#include "tcg.h"
30#include "tcg-op.h"
dcdaadb6
LV
31#include "trace-tcg.h"
32#include "trace/mem.h"
951c6300 33
3a13c3f3
RH
34/* Reduce the number of ifdefs below. This assumes that all uses of
35 TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
36 the compiler can eliminate. */
37#if TCG_TARGET_REG_BITS == 64
38extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
39extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
40#define TCGV_LOW TCGV_LOW_link_error
41#define TCGV_HIGH TCGV_HIGH_link_error
42#endif
951c6300 43
c45cb8bb
RH
44/* Note that this is optimized for sequential allocation during translate.
45 Up to and including filling in the forward link immediately. We'll do
46 proper termination of the end of the list after we finish translation. */
47
48static void tcg_emit_op(TCGContext *ctx, TCGOpcode opc, int args)
49{
50 int oi = ctx->gen_next_op_idx;
51 int ni = oi + 1;
52 int pi = oi - 1;
53
54 tcg_debug_assert(oi < OPC_BUF_SIZE);
dcb8e758 55 ctx->gen_op_buf[0].prev = oi;
c45cb8bb
RH
56 ctx->gen_next_op_idx = ni;
57
58 ctx->gen_op_buf[oi] = (TCGOp){
59 .opc = opc,
60 .args = args,
61 .prev = pi,
62 .next = ni
63 };
64}
65
951c6300
RH
66void tcg_gen_op1(TCGContext *ctx, TCGOpcode opc, TCGArg a1)
67{
c45cb8bb 68 int pi = ctx->gen_next_parm_idx;
951c6300 69
c45cb8bb
RH
70 tcg_debug_assert(pi + 1 <= OPPARAM_BUF_SIZE);
71 ctx->gen_next_parm_idx = pi + 1;
72 ctx->gen_opparam_buf[pi] = a1;
951c6300 73
c45cb8bb 74 tcg_emit_op(ctx, opc, pi);
951c6300
RH
75}
76
77void tcg_gen_op2(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2)
78{
c45cb8bb 79 int pi = ctx->gen_next_parm_idx;
951c6300 80
c45cb8bb
RH
81 tcg_debug_assert(pi + 2 <= OPPARAM_BUF_SIZE);
82 ctx->gen_next_parm_idx = pi + 2;
83 ctx->gen_opparam_buf[pi + 0] = a1;
84 ctx->gen_opparam_buf[pi + 1] = a2;
951c6300 85
c45cb8bb 86 tcg_emit_op(ctx, opc, pi);
951c6300
RH
87}
88
89void tcg_gen_op3(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
90 TCGArg a2, TCGArg a3)
91{
c45cb8bb 92 int pi = ctx->gen_next_parm_idx;
951c6300 93
c45cb8bb
RH
94 tcg_debug_assert(pi + 3 <= OPPARAM_BUF_SIZE);
95 ctx->gen_next_parm_idx = pi + 3;
96 ctx->gen_opparam_buf[pi + 0] = a1;
97 ctx->gen_opparam_buf[pi + 1] = a2;
98 ctx->gen_opparam_buf[pi + 2] = a3;
951c6300 99
c45cb8bb 100 tcg_emit_op(ctx, opc, pi);
951c6300
RH
101}
102
103void tcg_gen_op4(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
104 TCGArg a2, TCGArg a3, TCGArg a4)
105{
c45cb8bb 106 int pi = ctx->gen_next_parm_idx;
951c6300 107
c45cb8bb
RH
108 tcg_debug_assert(pi + 4 <= OPPARAM_BUF_SIZE);
109 ctx->gen_next_parm_idx = pi + 4;
110 ctx->gen_opparam_buf[pi + 0] = a1;
111 ctx->gen_opparam_buf[pi + 1] = a2;
112 ctx->gen_opparam_buf[pi + 2] = a3;
113 ctx->gen_opparam_buf[pi + 3] = a4;
951c6300 114
c45cb8bb 115 tcg_emit_op(ctx, opc, pi);
951c6300
RH
116}
117
118void tcg_gen_op5(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
119 TCGArg a2, TCGArg a3, TCGArg a4, TCGArg a5)
120{
c45cb8bb 121 int pi = ctx->gen_next_parm_idx;
951c6300 122
c45cb8bb
RH
123 tcg_debug_assert(pi + 5 <= OPPARAM_BUF_SIZE);
124 ctx->gen_next_parm_idx = pi + 5;
125 ctx->gen_opparam_buf[pi + 0] = a1;
126 ctx->gen_opparam_buf[pi + 1] = a2;
127 ctx->gen_opparam_buf[pi + 2] = a3;
128 ctx->gen_opparam_buf[pi + 3] = a4;
129 ctx->gen_opparam_buf[pi + 4] = a5;
951c6300 130
c45cb8bb 131 tcg_emit_op(ctx, opc, pi);
951c6300
RH
132}
133
134void tcg_gen_op6(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2,
135 TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
136{
c45cb8bb 137 int pi = ctx->gen_next_parm_idx;
951c6300 138
c45cb8bb
RH
139 tcg_debug_assert(pi + 6 <= OPPARAM_BUF_SIZE);
140 ctx->gen_next_parm_idx = pi + 6;
141 ctx->gen_opparam_buf[pi + 0] = a1;
142 ctx->gen_opparam_buf[pi + 1] = a2;
143 ctx->gen_opparam_buf[pi + 2] = a3;
144 ctx->gen_opparam_buf[pi + 3] = a4;
145 ctx->gen_opparam_buf[pi + 4] = a5;
146 ctx->gen_opparam_buf[pi + 5] = a6;
951c6300 147
c45cb8bb 148 tcg_emit_op(ctx, opc, pi);
951c6300
RH
149}
150
f65e19bc
PK
151void tcg_gen_mb(TCGBar mb_type)
152{
91682118 153 if (parallel_cpus) {
f65e19bc
PK
154 tcg_gen_op1(&tcg_ctx, INDEX_op_mb, mb_type);
155 }
156}
157
951c6300
RH
158/* 32 bit ops */
159
160void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
161{
162 /* some cases can be optimized here */
163 if (arg2 == 0) {
164 tcg_gen_mov_i32(ret, arg1);
165 } else {
166 TCGv_i32 t0 = tcg_const_i32(arg2);
167 tcg_gen_add_i32(ret, arg1, t0);
168 tcg_temp_free_i32(t0);
169 }
170}
171
172void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
173{
174 if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
175 /* Don't recurse with tcg_gen_neg_i32. */
176 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
177 } else {
178 TCGv_i32 t0 = tcg_const_i32(arg1);
179 tcg_gen_sub_i32(ret, t0, arg2);
180 tcg_temp_free_i32(t0);
181 }
182}
183
184void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
185{
186 /* some cases can be optimized here */
187 if (arg2 == 0) {
188 tcg_gen_mov_i32(ret, arg1);
189 } else {
190 TCGv_i32 t0 = tcg_const_i32(arg2);
191 tcg_gen_sub_i32(ret, arg1, t0);
192 tcg_temp_free_i32(t0);
193 }
194}
195
196void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
197{
198 TCGv_i32 t0;
199 /* Some cases can be optimized here. */
200 switch (arg2) {
201 case 0:
202 tcg_gen_movi_i32(ret, 0);
203 return;
204 case 0xffffffffu:
205 tcg_gen_mov_i32(ret, arg1);
206 return;
207 case 0xffu:
208 /* Don't recurse with tcg_gen_ext8u_i32. */
209 if (TCG_TARGET_HAS_ext8u_i32) {
210 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
211 return;
212 }
213 break;
214 case 0xffffu:
215 if (TCG_TARGET_HAS_ext16u_i32) {
216 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
217 return;
218 }
219 break;
220 }
221 t0 = tcg_const_i32(arg2);
222 tcg_gen_and_i32(ret, arg1, t0);
223 tcg_temp_free_i32(t0);
224}
225
226void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
227{
228 /* Some cases can be optimized here. */
229 if (arg2 == -1) {
230 tcg_gen_movi_i32(ret, -1);
231 } else if (arg2 == 0) {
232 tcg_gen_mov_i32(ret, arg1);
233 } else {
234 TCGv_i32 t0 = tcg_const_i32(arg2);
235 tcg_gen_or_i32(ret, arg1, t0);
236 tcg_temp_free_i32(t0);
237 }
238}
239
240void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
241{
242 /* Some cases can be optimized here. */
243 if (arg2 == 0) {
244 tcg_gen_mov_i32(ret, arg1);
245 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
246 /* Don't recurse with tcg_gen_not_i32. */
247 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
248 } else {
249 TCGv_i32 t0 = tcg_const_i32(arg2);
250 tcg_gen_xor_i32(ret, arg1, t0);
251 tcg_temp_free_i32(t0);
252 }
253}
254
255void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
256{
257 tcg_debug_assert(arg2 < 32);
258 if (arg2 == 0) {
259 tcg_gen_mov_i32(ret, arg1);
260 } else {
261 TCGv_i32 t0 = tcg_const_i32(arg2);
262 tcg_gen_shl_i32(ret, arg1, t0);
263 tcg_temp_free_i32(t0);
264 }
265}
266
267void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
268{
269 tcg_debug_assert(arg2 < 32);
270 if (arg2 == 0) {
271 tcg_gen_mov_i32(ret, arg1);
272 } else {
273 TCGv_i32 t0 = tcg_const_i32(arg2);
274 tcg_gen_shr_i32(ret, arg1, t0);
275 tcg_temp_free_i32(t0);
276 }
277}
278
279void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
280{
281 tcg_debug_assert(arg2 < 32);
282 if (arg2 == 0) {
283 tcg_gen_mov_i32(ret, arg1);
284 } else {
285 TCGv_i32 t0 = tcg_const_i32(arg2);
286 tcg_gen_sar_i32(ret, arg1, t0);
287 tcg_temp_free_i32(t0);
288 }
289}
290
42a268c2 291void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
951c6300
RH
292{
293 if (cond == TCG_COND_ALWAYS) {
42a268c2 294 tcg_gen_br(l);
951c6300 295 } else if (cond != TCG_COND_NEVER) {
42a268c2 296 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
951c6300
RH
297 }
298}
299
42a268c2 300void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
951c6300 301{
37ed3bf1
RH
302 if (cond == TCG_COND_ALWAYS) {
303 tcg_gen_br(l);
304 } else if (cond != TCG_COND_NEVER) {
305 TCGv_i32 t0 = tcg_const_i32(arg2);
306 tcg_gen_brcond_i32(cond, arg1, t0, l);
307 tcg_temp_free_i32(t0);
308 }
951c6300
RH
309}
310
311void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
312 TCGv_i32 arg1, TCGv_i32 arg2)
313{
314 if (cond == TCG_COND_ALWAYS) {
315 tcg_gen_movi_i32(ret, 1);
316 } else if (cond == TCG_COND_NEVER) {
317 tcg_gen_movi_i32(ret, 0);
318 } else {
319 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
320 }
321}
322
323void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
324 TCGv_i32 arg1, int32_t arg2)
325{
326 TCGv_i32 t0 = tcg_const_i32(arg2);
327 tcg_gen_setcond_i32(cond, ret, arg1, t0);
328 tcg_temp_free_i32(t0);
329}
330
331void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
332{
333 TCGv_i32 t0 = tcg_const_i32(arg2);
334 tcg_gen_mul_i32(ret, arg1, t0);
335 tcg_temp_free_i32(t0);
336}
337
338void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
339{
340 if (TCG_TARGET_HAS_div_i32) {
341 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
342 } else if (TCG_TARGET_HAS_div2_i32) {
343 TCGv_i32 t0 = tcg_temp_new_i32();
344 tcg_gen_sari_i32(t0, arg1, 31);
345 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
346 tcg_temp_free_i32(t0);
347 } else {
348 gen_helper_div_i32(ret, arg1, arg2);
349 }
350}
351
352void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
353{
354 if (TCG_TARGET_HAS_rem_i32) {
355 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
356 } else if (TCG_TARGET_HAS_div_i32) {
357 TCGv_i32 t0 = tcg_temp_new_i32();
358 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
359 tcg_gen_mul_i32(t0, t0, arg2);
360 tcg_gen_sub_i32(ret, arg1, t0);
361 tcg_temp_free_i32(t0);
362 } else if (TCG_TARGET_HAS_div2_i32) {
363 TCGv_i32 t0 = tcg_temp_new_i32();
364 tcg_gen_sari_i32(t0, arg1, 31);
365 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
366 tcg_temp_free_i32(t0);
367 } else {
368 gen_helper_rem_i32(ret, arg1, arg2);
369 }
370}
371
372void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
373{
374 if (TCG_TARGET_HAS_div_i32) {
375 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
376 } else if (TCG_TARGET_HAS_div2_i32) {
377 TCGv_i32 t0 = tcg_temp_new_i32();
378 tcg_gen_movi_i32(t0, 0);
379 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
380 tcg_temp_free_i32(t0);
381 } else {
382 gen_helper_divu_i32(ret, arg1, arg2);
383 }
384}
385
386void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
387{
388 if (TCG_TARGET_HAS_rem_i32) {
389 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
390 } else if (TCG_TARGET_HAS_div_i32) {
391 TCGv_i32 t0 = tcg_temp_new_i32();
392 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
393 tcg_gen_mul_i32(t0, t0, arg2);
394 tcg_gen_sub_i32(ret, arg1, t0);
395 tcg_temp_free_i32(t0);
396 } else if (TCG_TARGET_HAS_div2_i32) {
397 TCGv_i32 t0 = tcg_temp_new_i32();
398 tcg_gen_movi_i32(t0, 0);
399 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
400 tcg_temp_free_i32(t0);
401 } else {
402 gen_helper_remu_i32(ret, arg1, arg2);
403 }
404}
405
406void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
407{
408 if (TCG_TARGET_HAS_andc_i32) {
409 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
410 } else {
411 TCGv_i32 t0 = tcg_temp_new_i32();
412 tcg_gen_not_i32(t0, arg2);
413 tcg_gen_and_i32(ret, arg1, t0);
414 tcg_temp_free_i32(t0);
415 }
416}
417
418void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
419{
420 if (TCG_TARGET_HAS_eqv_i32) {
421 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
422 } else {
423 tcg_gen_xor_i32(ret, arg1, arg2);
424 tcg_gen_not_i32(ret, ret);
425 }
426}
427
428void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
429{
430 if (TCG_TARGET_HAS_nand_i32) {
431 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
432 } else {
433 tcg_gen_and_i32(ret, arg1, arg2);
434 tcg_gen_not_i32(ret, ret);
435 }
436}
437
438void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
439{
440 if (TCG_TARGET_HAS_nor_i32) {
441 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
442 } else {
443 tcg_gen_or_i32(ret, arg1, arg2);
444 tcg_gen_not_i32(ret, ret);
445 }
446}
447
448void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
449{
450 if (TCG_TARGET_HAS_orc_i32) {
451 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
452 } else {
453 TCGv_i32 t0 = tcg_temp_new_i32();
454 tcg_gen_not_i32(t0, arg2);
455 tcg_gen_or_i32(ret, arg1, t0);
456 tcg_temp_free_i32(t0);
457 }
458}
459
460void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
461{
462 if (TCG_TARGET_HAS_rot_i32) {
463 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
464 } else {
465 TCGv_i32 t0, t1;
466
467 t0 = tcg_temp_new_i32();
468 t1 = tcg_temp_new_i32();
469 tcg_gen_shl_i32(t0, arg1, arg2);
470 tcg_gen_subfi_i32(t1, 32, arg2);
471 tcg_gen_shr_i32(t1, arg1, t1);
472 tcg_gen_or_i32(ret, t0, t1);
473 tcg_temp_free_i32(t0);
474 tcg_temp_free_i32(t1);
475 }
476}
477
478void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
479{
480 tcg_debug_assert(arg2 < 32);
481 /* some cases can be optimized here */
482 if (arg2 == 0) {
483 tcg_gen_mov_i32(ret, arg1);
484 } else if (TCG_TARGET_HAS_rot_i32) {
485 TCGv_i32 t0 = tcg_const_i32(arg2);
486 tcg_gen_rotl_i32(ret, arg1, t0);
487 tcg_temp_free_i32(t0);
488 } else {
489 TCGv_i32 t0, t1;
490 t0 = tcg_temp_new_i32();
491 t1 = tcg_temp_new_i32();
492 tcg_gen_shli_i32(t0, arg1, arg2);
493 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
494 tcg_gen_or_i32(ret, t0, t1);
495 tcg_temp_free_i32(t0);
496 tcg_temp_free_i32(t1);
497 }
498}
499
500void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
501{
502 if (TCG_TARGET_HAS_rot_i32) {
503 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
504 } else {
505 TCGv_i32 t0, t1;
506
507 t0 = tcg_temp_new_i32();
508 t1 = tcg_temp_new_i32();
509 tcg_gen_shr_i32(t0, arg1, arg2);
510 tcg_gen_subfi_i32(t1, 32, arg2);
511 tcg_gen_shl_i32(t1, arg1, t1);
512 tcg_gen_or_i32(ret, t0, t1);
513 tcg_temp_free_i32(t0);
514 tcg_temp_free_i32(t1);
515 }
516}
517
518void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
519{
520 tcg_debug_assert(arg2 < 32);
521 /* some cases can be optimized here */
522 if (arg2 == 0) {
523 tcg_gen_mov_i32(ret, arg1);
524 } else {
525 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
526 }
527}
528
529void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
530 unsigned int ofs, unsigned int len)
531{
532 uint32_t mask;
533 TCGv_i32 t1;
534
535 tcg_debug_assert(ofs < 32);
536 tcg_debug_assert(len <= 32);
537 tcg_debug_assert(ofs + len <= 32);
538
539 if (ofs == 0 && len == 32) {
540 tcg_gen_mov_i32(ret, arg2);
541 return;
542 }
543 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
544 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
545 return;
546 }
547
548 mask = (1u << len) - 1;
549 t1 = tcg_temp_new_i32();
550
551 if (ofs + len < 32) {
552 tcg_gen_andi_i32(t1, arg2, mask);
553 tcg_gen_shli_i32(t1, t1, ofs);
554 } else {
555 tcg_gen_shli_i32(t1, arg2, ofs);
556 }
557 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
558 tcg_gen_or_i32(ret, ret, t1);
559
560 tcg_temp_free_i32(t1);
561}
562
563void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
564 TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
565{
37ed3bf1
RH
566 if (cond == TCG_COND_ALWAYS) {
567 tcg_gen_mov_i32(ret, v1);
568 } else if (cond == TCG_COND_NEVER) {
569 tcg_gen_mov_i32(ret, v2);
570 } else if (TCG_TARGET_HAS_movcond_i32) {
951c6300
RH
571 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
572 } else {
573 TCGv_i32 t0 = tcg_temp_new_i32();
574 TCGv_i32 t1 = tcg_temp_new_i32();
575 tcg_gen_setcond_i32(cond, t0, c1, c2);
576 tcg_gen_neg_i32(t0, t0);
577 tcg_gen_and_i32(t1, v1, t0);
578 tcg_gen_andc_i32(ret, v2, t0);
579 tcg_gen_or_i32(ret, ret, t1);
580 tcg_temp_free_i32(t0);
581 tcg_temp_free_i32(t1);
582 }
583}
584
585void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
586 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
587{
588 if (TCG_TARGET_HAS_add2_i32) {
589 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
951c6300
RH
590 } else {
591 TCGv_i64 t0 = tcg_temp_new_i64();
592 TCGv_i64 t1 = tcg_temp_new_i64();
593 tcg_gen_concat_i32_i64(t0, al, ah);
594 tcg_gen_concat_i32_i64(t1, bl, bh);
595 tcg_gen_add_i64(t0, t0, t1);
596 tcg_gen_extr_i64_i32(rl, rh, t0);
597 tcg_temp_free_i64(t0);
598 tcg_temp_free_i64(t1);
599 }
600}
601
602void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
603 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
604{
605 if (TCG_TARGET_HAS_sub2_i32) {
606 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
951c6300
RH
607 } else {
608 TCGv_i64 t0 = tcg_temp_new_i64();
609 TCGv_i64 t1 = tcg_temp_new_i64();
610 tcg_gen_concat_i32_i64(t0, al, ah);
611 tcg_gen_concat_i32_i64(t1, bl, bh);
612 tcg_gen_sub_i64(t0, t0, t1);
613 tcg_gen_extr_i64_i32(rl, rh, t0);
614 tcg_temp_free_i64(t0);
615 tcg_temp_free_i64(t1);
616 }
617}
618
619void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
620{
621 if (TCG_TARGET_HAS_mulu2_i32) {
622 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
951c6300
RH
623 } else if (TCG_TARGET_HAS_muluh_i32) {
624 TCGv_i32 t = tcg_temp_new_i32();
625 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
626 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
627 tcg_gen_mov_i32(rl, t);
628 tcg_temp_free_i32(t);
629 } else {
630 TCGv_i64 t0 = tcg_temp_new_i64();
631 TCGv_i64 t1 = tcg_temp_new_i64();
632 tcg_gen_extu_i32_i64(t0, arg1);
633 tcg_gen_extu_i32_i64(t1, arg2);
634 tcg_gen_mul_i64(t0, t0, t1);
635 tcg_gen_extr_i64_i32(rl, rh, t0);
636 tcg_temp_free_i64(t0);
637 tcg_temp_free_i64(t1);
638 }
639}
640
641void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
642{
643 if (TCG_TARGET_HAS_muls2_i32) {
644 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
951c6300
RH
645 } else if (TCG_TARGET_HAS_mulsh_i32) {
646 TCGv_i32 t = tcg_temp_new_i32();
647 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
648 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
649 tcg_gen_mov_i32(rl, t);
650 tcg_temp_free_i32(t);
651 } else if (TCG_TARGET_REG_BITS == 32) {
652 TCGv_i32 t0 = tcg_temp_new_i32();
653 TCGv_i32 t1 = tcg_temp_new_i32();
654 TCGv_i32 t2 = tcg_temp_new_i32();
655 TCGv_i32 t3 = tcg_temp_new_i32();
656 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
657 /* Adjust for negative inputs. */
658 tcg_gen_sari_i32(t2, arg1, 31);
659 tcg_gen_sari_i32(t3, arg2, 31);
660 tcg_gen_and_i32(t2, t2, arg2);
661 tcg_gen_and_i32(t3, t3, arg1);
662 tcg_gen_sub_i32(rh, t1, t2);
663 tcg_gen_sub_i32(rh, rh, t3);
664 tcg_gen_mov_i32(rl, t0);
665 tcg_temp_free_i32(t0);
666 tcg_temp_free_i32(t1);
667 tcg_temp_free_i32(t2);
668 tcg_temp_free_i32(t3);
669 } else {
670 TCGv_i64 t0 = tcg_temp_new_i64();
671 TCGv_i64 t1 = tcg_temp_new_i64();
672 tcg_gen_ext_i32_i64(t0, arg1);
673 tcg_gen_ext_i32_i64(t1, arg2);
674 tcg_gen_mul_i64(t0, t0, t1);
675 tcg_gen_extr_i64_i32(rl, rh, t0);
676 tcg_temp_free_i64(t0);
677 tcg_temp_free_i64(t1);
678 }
679}
680
5087abfb
RH
681void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
682{
683 if (TCG_TARGET_REG_BITS == 32) {
684 TCGv_i32 t0 = tcg_temp_new_i32();
685 TCGv_i32 t1 = tcg_temp_new_i32();
686 TCGv_i32 t2 = tcg_temp_new_i32();
687 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
688 /* Adjust for negative input for the signed arg1. */
689 tcg_gen_sari_i32(t2, arg1, 31);
690 tcg_gen_and_i32(t2, t2, arg2);
691 tcg_gen_sub_i32(rh, t1, t2);
692 tcg_gen_mov_i32(rl, t0);
693 tcg_temp_free_i32(t0);
694 tcg_temp_free_i32(t1);
695 tcg_temp_free_i32(t2);
696 } else {
697 TCGv_i64 t0 = tcg_temp_new_i64();
698 TCGv_i64 t1 = tcg_temp_new_i64();
699 tcg_gen_ext_i32_i64(t0, arg1);
700 tcg_gen_extu_i32_i64(t1, arg2);
701 tcg_gen_mul_i64(t0, t0, t1);
702 tcg_gen_extr_i64_i32(rl, rh, t0);
703 tcg_temp_free_i64(t0);
704 tcg_temp_free_i64(t1);
705 }
706}
707
951c6300
RH
708void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
709{
710 if (TCG_TARGET_HAS_ext8s_i32) {
711 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
712 } else {
713 tcg_gen_shli_i32(ret, arg, 24);
714 tcg_gen_sari_i32(ret, ret, 24);
715 }
716}
717
718void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
719{
720 if (TCG_TARGET_HAS_ext16s_i32) {
721 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
722 } else {
723 tcg_gen_shli_i32(ret, arg, 16);
724 tcg_gen_sari_i32(ret, ret, 16);
725 }
726}
727
728void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
729{
730 if (TCG_TARGET_HAS_ext8u_i32) {
731 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
732 } else {
733 tcg_gen_andi_i32(ret, arg, 0xffu);
734 }
735}
736
737void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
738{
739 if (TCG_TARGET_HAS_ext16u_i32) {
740 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
741 } else {
742 tcg_gen_andi_i32(ret, arg, 0xffffu);
743 }
744}
745
746/* Note: we assume the two high bytes are set to zero */
747void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
748{
749 if (TCG_TARGET_HAS_bswap16_i32) {
750 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
751 } else {
752 TCGv_i32 t0 = tcg_temp_new_i32();
753
754 tcg_gen_ext8u_i32(t0, arg);
755 tcg_gen_shli_i32(t0, t0, 8);
756 tcg_gen_shri_i32(ret, arg, 8);
757 tcg_gen_or_i32(ret, ret, t0);
758 tcg_temp_free_i32(t0);
759 }
760}
761
762void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
763{
764 if (TCG_TARGET_HAS_bswap32_i32) {
765 tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
766 } else {
767 TCGv_i32 t0, t1;
768 t0 = tcg_temp_new_i32();
769 t1 = tcg_temp_new_i32();
770
771 tcg_gen_shli_i32(t0, arg, 24);
772
773 tcg_gen_andi_i32(t1, arg, 0x0000ff00);
774 tcg_gen_shli_i32(t1, t1, 8);
775 tcg_gen_or_i32(t0, t0, t1);
776
777 tcg_gen_shri_i32(t1, arg, 8);
778 tcg_gen_andi_i32(t1, t1, 0x0000ff00);
779 tcg_gen_or_i32(t0, t0, t1);
780
781 tcg_gen_shri_i32(t1, arg, 24);
782 tcg_gen_or_i32(ret, t0, t1);
783 tcg_temp_free_i32(t0);
784 tcg_temp_free_i32(t1);
785 }
786}
787
788/* 64-bit ops */
789
790#if TCG_TARGET_REG_BITS == 32
791/* These are all inline for TCG_TARGET_REG_BITS == 64. */
792
793void tcg_gen_discard_i64(TCGv_i64 arg)
794{
795 tcg_gen_discard_i32(TCGV_LOW(arg));
796 tcg_gen_discard_i32(TCGV_HIGH(arg));
797}
798
799void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
800{
801 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
802 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
803}
804
805void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
806{
807 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
808 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
809}
810
811void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
812{
813 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
814 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
815}
816
817void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
818{
819 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
3ff91d7e 820 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
951c6300
RH
821}
822
823void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
824{
825 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
826 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
827}
828
829void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
830{
831 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
832 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
833}
834
835void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
836{
837 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
838 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
839}
840
841void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
842{
843 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
844 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
845}
846
847void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
848{
849 /* Since arg2 and ret have different types,
850 they cannot be the same temporary */
cf811fff 851#ifdef HOST_WORDS_BIGENDIAN
951c6300
RH
852 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
853 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
854#else
855 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
856 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
857#endif
858}
859
860void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
861{
cf811fff 862#ifdef HOST_WORDS_BIGENDIAN
951c6300
RH
863 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
864 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
865#else
866 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
867 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
868#endif
869}
870
871void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
872{
873 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
874 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
875}
876
877void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
878{
879 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
880 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
881}
882
883void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
884{
885 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
886 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
887}
888
889void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
890{
891 gen_helper_shl_i64(ret, arg1, arg2);
892}
893
894void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
895{
896 gen_helper_shr_i64(ret, arg1, arg2);
897}
898
899void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
900{
901 gen_helper_sar_i64(ret, arg1, arg2);
902}
903
904void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
905{
906 TCGv_i64 t0;
907 TCGv_i32 t1;
908
909 t0 = tcg_temp_new_i64();
910 t1 = tcg_temp_new_i32();
911
912 tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
913 TCGV_LOW(arg1), TCGV_LOW(arg2));
914
915 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
916 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
917 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
918 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
919
920 tcg_gen_mov_i64(ret, t0);
921 tcg_temp_free_i64(t0);
922 tcg_temp_free_i32(t1);
923}
924#endif /* TCG_TARGET_REG_SIZE == 32 */
925
926void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
927{
928 /* some cases can be optimized here */
929 if (arg2 == 0) {
930 tcg_gen_mov_i64(ret, arg1);
931 } else {
932 TCGv_i64 t0 = tcg_const_i64(arg2);
933 tcg_gen_add_i64(ret, arg1, t0);
934 tcg_temp_free_i64(t0);
935 }
936}
937
938void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
939{
940 if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
941 /* Don't recurse with tcg_gen_neg_i64. */
942 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
943 } else {
944 TCGv_i64 t0 = tcg_const_i64(arg1);
945 tcg_gen_sub_i64(ret, t0, arg2);
946 tcg_temp_free_i64(t0);
947 }
948}
949
950void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
951{
952 /* some cases can be optimized here */
953 if (arg2 == 0) {
954 tcg_gen_mov_i64(ret, arg1);
955 } else {
956 TCGv_i64 t0 = tcg_const_i64(arg2);
957 tcg_gen_sub_i64(ret, arg1, t0);
958 tcg_temp_free_i64(t0);
959 }
960}
961
962void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
963{
951c6300 964 TCGv_i64 t0;
3a13c3f3
RH
965
966 if (TCG_TARGET_REG_BITS == 32) {
967 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
968 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
969 return;
970 }
971
951c6300
RH
972 /* Some cases can be optimized here. */
973 switch (arg2) {
974 case 0:
975 tcg_gen_movi_i64(ret, 0);
976 return;
977 case 0xffffffffffffffffull:
978 tcg_gen_mov_i64(ret, arg1);
979 return;
980 case 0xffull:
981 /* Don't recurse with tcg_gen_ext8u_i64. */
982 if (TCG_TARGET_HAS_ext8u_i64) {
983 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
984 return;
985 }
986 break;
987 case 0xffffu:
988 if (TCG_TARGET_HAS_ext16u_i64) {
989 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
990 return;
991 }
992 break;
993 case 0xffffffffull:
994 if (TCG_TARGET_HAS_ext32u_i64) {
995 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
996 return;
997 }
998 break;
999 }
1000 t0 = tcg_const_i64(arg2);
1001 tcg_gen_and_i64(ret, arg1, t0);
1002 tcg_temp_free_i64(t0);
951c6300
RH
1003}
1004
1005void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1006{
3a13c3f3
RH
1007 if (TCG_TARGET_REG_BITS == 32) {
1008 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1009 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1010 return;
1011 }
951c6300
RH
1012 /* Some cases can be optimized here. */
1013 if (arg2 == -1) {
1014 tcg_gen_movi_i64(ret, -1);
1015 } else if (arg2 == 0) {
1016 tcg_gen_mov_i64(ret, arg1);
1017 } else {
1018 TCGv_i64 t0 = tcg_const_i64(arg2);
1019 tcg_gen_or_i64(ret, arg1, t0);
1020 tcg_temp_free_i64(t0);
1021 }
951c6300
RH
1022}
1023
1024void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1025{
3a13c3f3
RH
1026 if (TCG_TARGET_REG_BITS == 32) {
1027 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1028 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1029 return;
1030 }
951c6300
RH
1031 /* Some cases can be optimized here. */
1032 if (arg2 == 0) {
1033 tcg_gen_mov_i64(ret, arg1);
1034 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1035 /* Don't recurse with tcg_gen_not_i64. */
1036 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1037 } else {
1038 TCGv_i64 t0 = tcg_const_i64(arg2);
1039 tcg_gen_xor_i64(ret, arg1, t0);
1040 tcg_temp_free_i64(t0);
1041 }
951c6300
RH
1042}
1043
951c6300
RH
1044static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1045 unsigned c, bool right, bool arith)
1046{
1047 tcg_debug_assert(c < 64);
1048 if (c == 0) {
1049 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1050 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1051 } else if (c >= 32) {
1052 c -= 32;
1053 if (right) {
1054 if (arith) {
1055 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1056 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1057 } else {
1058 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1059 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1060 }
1061 } else {
1062 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1063 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1064 }
1065 } else {
1066 TCGv_i32 t0, t1;
1067
1068 t0 = tcg_temp_new_i32();
1069 t1 = tcg_temp_new_i32();
1070 if (right) {
1071 tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c);
1072 if (arith) {
1073 tcg_gen_sari_i32(t1, TCGV_HIGH(arg1), c);
1074 } else {
1075 tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c);
1076 }
1077 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1078 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0);
1079 tcg_gen_mov_i32(TCGV_HIGH(ret), t1);
1080 } else {
1081 tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1082 /* Note: ret can be the same as arg1, so we use t1 */
1083 tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c);
1084 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1085 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0);
1086 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1087 }
1088 tcg_temp_free_i32(t0);
1089 tcg_temp_free_i32(t1);
1090 }
1091}
1092
951c6300
RH
1093void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1094{
1095 tcg_debug_assert(arg2 < 64);
3a13c3f3
RH
1096 if (TCG_TARGET_REG_BITS == 32) {
1097 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1098 } else if (arg2 == 0) {
951c6300
RH
1099 tcg_gen_mov_i64(ret, arg1);
1100 } else {
1101 TCGv_i64 t0 = tcg_const_i64(arg2);
1102 tcg_gen_shl_i64(ret, arg1, t0);
1103 tcg_temp_free_i64(t0);
1104 }
1105}
1106
1107void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1108{
1109 tcg_debug_assert(arg2 < 64);
3a13c3f3
RH
1110 if (TCG_TARGET_REG_BITS == 32) {
1111 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1112 } else if (arg2 == 0) {
951c6300
RH
1113 tcg_gen_mov_i64(ret, arg1);
1114 } else {
1115 TCGv_i64 t0 = tcg_const_i64(arg2);
1116 tcg_gen_shr_i64(ret, arg1, t0);
1117 tcg_temp_free_i64(t0);
1118 }
1119}
1120
1121void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1122{
1123 tcg_debug_assert(arg2 < 64);
3a13c3f3
RH
1124 if (TCG_TARGET_REG_BITS == 32) {
1125 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1126 } else if (arg2 == 0) {
951c6300
RH
1127 tcg_gen_mov_i64(ret, arg1);
1128 } else {
1129 TCGv_i64 t0 = tcg_const_i64(arg2);
1130 tcg_gen_sar_i64(ret, arg1, t0);
1131 tcg_temp_free_i64(t0);
1132 }
1133}
951c6300 1134
42a268c2 1135void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
951c6300
RH
1136{
1137 if (cond == TCG_COND_ALWAYS) {
42a268c2 1138 tcg_gen_br(l);
951c6300 1139 } else if (cond != TCG_COND_NEVER) {
3a13c3f3
RH
1140 if (TCG_TARGET_REG_BITS == 32) {
1141 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1142 TCGV_HIGH(arg1), TCGV_LOW(arg2),
42a268c2 1143 TCGV_HIGH(arg2), cond, label_arg(l));
3a13c3f3 1144 } else {
42a268c2
RH
1145 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1146 label_arg(l));
3a13c3f3 1147 }
951c6300
RH
1148 }
1149}
1150
42a268c2 1151void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
951c6300
RH
1152{
1153 if (cond == TCG_COND_ALWAYS) {
42a268c2 1154 tcg_gen_br(l);
951c6300
RH
1155 } else if (cond != TCG_COND_NEVER) {
1156 TCGv_i64 t0 = tcg_const_i64(arg2);
42a268c2 1157 tcg_gen_brcond_i64(cond, arg1, t0, l);
951c6300
RH
1158 tcg_temp_free_i64(t0);
1159 }
1160}
1161
1162void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1163 TCGv_i64 arg1, TCGv_i64 arg2)
1164{
1165 if (cond == TCG_COND_ALWAYS) {
1166 tcg_gen_movi_i64(ret, 1);
1167 } else if (cond == TCG_COND_NEVER) {
1168 tcg_gen_movi_i64(ret, 0);
1169 } else {
3a13c3f3
RH
1170 if (TCG_TARGET_REG_BITS == 32) {
1171 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1172 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1173 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1174 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1175 } else {
1176 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1177 }
951c6300
RH
1178 }
1179}
1180
1181void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1182 TCGv_i64 arg1, int64_t arg2)
1183{
1184 TCGv_i64 t0 = tcg_const_i64(arg2);
1185 tcg_gen_setcond_i64(cond, ret, arg1, t0);
1186 tcg_temp_free_i64(t0);
1187}
1188
1189void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1190{
1191 TCGv_i64 t0 = tcg_const_i64(arg2);
1192 tcg_gen_mul_i64(ret, arg1, t0);
1193 tcg_temp_free_i64(t0);
1194}
1195
1196void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1197{
1198 if (TCG_TARGET_HAS_div_i64) {
1199 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1200 } else if (TCG_TARGET_HAS_div2_i64) {
1201 TCGv_i64 t0 = tcg_temp_new_i64();
1202 tcg_gen_sari_i64(t0, arg1, 63);
1203 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1204 tcg_temp_free_i64(t0);
1205 } else {
1206 gen_helper_div_i64(ret, arg1, arg2);
1207 }
1208}
1209
1210void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1211{
1212 if (TCG_TARGET_HAS_rem_i64) {
1213 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1214 } else if (TCG_TARGET_HAS_div_i64) {
1215 TCGv_i64 t0 = tcg_temp_new_i64();
1216 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1217 tcg_gen_mul_i64(t0, t0, arg2);
1218 tcg_gen_sub_i64(ret, arg1, t0);
1219 tcg_temp_free_i64(t0);
1220 } else if (TCG_TARGET_HAS_div2_i64) {
1221 TCGv_i64 t0 = tcg_temp_new_i64();
1222 tcg_gen_sari_i64(t0, arg1, 63);
1223 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1224 tcg_temp_free_i64(t0);
1225 } else {
1226 gen_helper_rem_i64(ret, arg1, arg2);
1227 }
1228}
1229
1230void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1231{
1232 if (TCG_TARGET_HAS_div_i64) {
1233 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1234 } else if (TCG_TARGET_HAS_div2_i64) {
1235 TCGv_i64 t0 = tcg_temp_new_i64();
1236 tcg_gen_movi_i64(t0, 0);
1237 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1238 tcg_temp_free_i64(t0);
1239 } else {
1240 gen_helper_divu_i64(ret, arg1, arg2);
1241 }
1242}
1243
1244void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1245{
1246 if (TCG_TARGET_HAS_rem_i64) {
1247 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1248 } else if (TCG_TARGET_HAS_div_i64) {
1249 TCGv_i64 t0 = tcg_temp_new_i64();
1250 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1251 tcg_gen_mul_i64(t0, t0, arg2);
1252 tcg_gen_sub_i64(ret, arg1, t0);
1253 tcg_temp_free_i64(t0);
1254 } else if (TCG_TARGET_HAS_div2_i64) {
1255 TCGv_i64 t0 = tcg_temp_new_i64();
1256 tcg_gen_movi_i64(t0, 0);
1257 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1258 tcg_temp_free_i64(t0);
1259 } else {
1260 gen_helper_remu_i64(ret, arg1, arg2);
1261 }
1262}
1263
1264void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1265{
3a13c3f3
RH
1266 if (TCG_TARGET_REG_BITS == 32) {
1267 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1268 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1269 } else if (TCG_TARGET_HAS_ext8s_i64) {
951c6300
RH
1270 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1271 } else {
1272 tcg_gen_shli_i64(ret, arg, 56);
1273 tcg_gen_sari_i64(ret, ret, 56);
1274 }
951c6300
RH
1275}
1276
1277void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1278{
3a13c3f3
RH
1279 if (TCG_TARGET_REG_BITS == 32) {
1280 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1281 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1282 } else if (TCG_TARGET_HAS_ext16s_i64) {
951c6300
RH
1283 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1284 } else {
1285 tcg_gen_shli_i64(ret, arg, 48);
1286 tcg_gen_sari_i64(ret, ret, 48);
1287 }
951c6300
RH
1288}
1289
1290void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1291{
3a13c3f3
RH
1292 if (TCG_TARGET_REG_BITS == 32) {
1293 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1294 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1295 } else if (TCG_TARGET_HAS_ext32s_i64) {
951c6300
RH
1296 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1297 } else {
1298 tcg_gen_shli_i64(ret, arg, 32);
1299 tcg_gen_sari_i64(ret, ret, 32);
1300 }
951c6300
RH
1301}
1302
1303void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1304{
3a13c3f3
RH
1305 if (TCG_TARGET_REG_BITS == 32) {
1306 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1307 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1308 } else if (TCG_TARGET_HAS_ext8u_i64) {
951c6300
RH
1309 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1310 } else {
1311 tcg_gen_andi_i64(ret, arg, 0xffu);
1312 }
951c6300
RH
1313}
1314
1315void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1316{
3a13c3f3
RH
1317 if (TCG_TARGET_REG_BITS == 32) {
1318 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1319 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1320 } else if (TCG_TARGET_HAS_ext16u_i64) {
951c6300
RH
1321 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1322 } else {
1323 tcg_gen_andi_i64(ret, arg, 0xffffu);
1324 }
951c6300
RH
1325}
1326
1327void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1328{
3a13c3f3
RH
1329 if (TCG_TARGET_REG_BITS == 32) {
1330 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1331 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1332 } else if (TCG_TARGET_HAS_ext32u_i64) {
951c6300
RH
1333 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1334 } else {
1335 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1336 }
951c6300
RH
1337}
1338
1339/* Note: we assume the six high bytes are set to zero */
1340void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1341{
3a13c3f3
RH
1342 if (TCG_TARGET_REG_BITS == 32) {
1343 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1344 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1345 } else if (TCG_TARGET_HAS_bswap16_i64) {
951c6300
RH
1346 tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1347 } else {
1348 TCGv_i64 t0 = tcg_temp_new_i64();
1349
1350 tcg_gen_ext8u_i64(t0, arg);
1351 tcg_gen_shli_i64(t0, t0, 8);
1352 tcg_gen_shri_i64(ret, arg, 8);
1353 tcg_gen_or_i64(ret, ret, t0);
1354 tcg_temp_free_i64(t0);
1355 }
951c6300
RH
1356}
1357
1358/* Note: we assume the four high bytes are set to zero */
1359void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1360{
3a13c3f3
RH
1361 if (TCG_TARGET_REG_BITS == 32) {
1362 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1363 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1364 } else if (TCG_TARGET_HAS_bswap32_i64) {
951c6300
RH
1365 tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1366 } else {
1367 TCGv_i64 t0, t1;
1368 t0 = tcg_temp_new_i64();
1369 t1 = tcg_temp_new_i64();
1370
1371 tcg_gen_shli_i64(t0, arg, 24);
1372 tcg_gen_ext32u_i64(t0, t0);
1373
1374 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1375 tcg_gen_shli_i64(t1, t1, 8);
1376 tcg_gen_or_i64(t0, t0, t1);
1377
1378 tcg_gen_shri_i64(t1, arg, 8);
1379 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1380 tcg_gen_or_i64(t0, t0, t1);
1381
1382 tcg_gen_shri_i64(t1, arg, 24);
1383 tcg_gen_or_i64(ret, t0, t1);
1384 tcg_temp_free_i64(t0);
1385 tcg_temp_free_i64(t1);
1386 }
951c6300
RH
1387}
1388
1389void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1390{
3a13c3f3
RH
1391 if (TCG_TARGET_REG_BITS == 32) {
1392 TCGv_i32 t0, t1;
1393 t0 = tcg_temp_new_i32();
1394 t1 = tcg_temp_new_i32();
951c6300 1395
3a13c3f3
RH
1396 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1397 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1398 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1399 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1400 tcg_temp_free_i32(t0);
1401 tcg_temp_free_i32(t1);
1402 } else if (TCG_TARGET_HAS_bswap64_i64) {
951c6300
RH
1403 tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1404 } else {
1405 TCGv_i64 t0 = tcg_temp_new_i64();
1406 TCGv_i64 t1 = tcg_temp_new_i64();
1407
1408 tcg_gen_shli_i64(t0, arg, 56);
1409
1410 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1411 tcg_gen_shli_i64(t1, t1, 40);
1412 tcg_gen_or_i64(t0, t0, t1);
1413
1414 tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1415 tcg_gen_shli_i64(t1, t1, 24);
1416 tcg_gen_or_i64(t0, t0, t1);
1417
1418 tcg_gen_andi_i64(t1, arg, 0xff000000);
1419 tcg_gen_shli_i64(t1, t1, 8);
1420 tcg_gen_or_i64(t0, t0, t1);
1421
1422 tcg_gen_shri_i64(t1, arg, 8);
1423 tcg_gen_andi_i64(t1, t1, 0xff000000);
1424 tcg_gen_or_i64(t0, t0, t1);
1425
1426 tcg_gen_shri_i64(t1, arg, 24);
1427 tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1428 tcg_gen_or_i64(t0, t0, t1);
1429
1430 tcg_gen_shri_i64(t1, arg, 40);
1431 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1432 tcg_gen_or_i64(t0, t0, t1);
1433
1434 tcg_gen_shri_i64(t1, arg, 56);
1435 tcg_gen_or_i64(ret, t0, t1);
1436 tcg_temp_free_i64(t0);
1437 tcg_temp_free_i64(t1);
1438 }
951c6300
RH
1439}
1440
1441void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1442{
3a13c3f3
RH
1443 if (TCG_TARGET_REG_BITS == 32) {
1444 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1445 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1446 } else if (TCG_TARGET_HAS_not_i64) {
951c6300
RH
1447 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1448 } else {
1449 tcg_gen_xori_i64(ret, arg, -1);
1450 }
951c6300
RH
1451}
1452
1453void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1454{
3a13c3f3
RH
1455 if (TCG_TARGET_REG_BITS == 32) {
1456 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1457 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1458 } else if (TCG_TARGET_HAS_andc_i64) {
951c6300
RH
1459 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1460 } else {
1461 TCGv_i64 t0 = tcg_temp_new_i64();
1462 tcg_gen_not_i64(t0, arg2);
1463 tcg_gen_and_i64(ret, arg1, t0);
1464 tcg_temp_free_i64(t0);
1465 }
951c6300
RH
1466}
1467
1468void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1469{
3a13c3f3
RH
1470 if (TCG_TARGET_REG_BITS == 32) {
1471 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1472 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1473 } else if (TCG_TARGET_HAS_eqv_i64) {
951c6300
RH
1474 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1475 } else {
1476 tcg_gen_xor_i64(ret, arg1, arg2);
1477 tcg_gen_not_i64(ret, ret);
1478 }
951c6300
RH
1479}
1480
1481void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1482{
3a13c3f3
RH
1483 if (TCG_TARGET_REG_BITS == 32) {
1484 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1485 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1486 } else if (TCG_TARGET_HAS_nand_i64) {
951c6300
RH
1487 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1488 } else {
1489 tcg_gen_and_i64(ret, arg1, arg2);
1490 tcg_gen_not_i64(ret, ret);
1491 }
951c6300
RH
1492}
1493
1494void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1495{
3a13c3f3
RH
1496 if (TCG_TARGET_REG_BITS == 32) {
1497 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1498 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1499 } else if (TCG_TARGET_HAS_nor_i64) {
951c6300
RH
1500 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1501 } else {
1502 tcg_gen_or_i64(ret, arg1, arg2);
1503 tcg_gen_not_i64(ret, ret);
1504 }
951c6300
RH
1505}
1506
1507void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1508{
3a13c3f3
RH
1509 if (TCG_TARGET_REG_BITS == 32) {
1510 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1511 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1512 } else if (TCG_TARGET_HAS_orc_i64) {
951c6300
RH
1513 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1514 } else {
1515 TCGv_i64 t0 = tcg_temp_new_i64();
1516 tcg_gen_not_i64(t0, arg2);
1517 tcg_gen_or_i64(ret, arg1, t0);
1518 tcg_temp_free_i64(t0);
1519 }
951c6300
RH
1520}
1521
1522void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1523{
1524 if (TCG_TARGET_HAS_rot_i64) {
1525 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1526 } else {
1527 TCGv_i64 t0, t1;
1528 t0 = tcg_temp_new_i64();
1529 t1 = tcg_temp_new_i64();
1530 tcg_gen_shl_i64(t0, arg1, arg2);
1531 tcg_gen_subfi_i64(t1, 64, arg2);
1532 tcg_gen_shr_i64(t1, arg1, t1);
1533 tcg_gen_or_i64(ret, t0, t1);
1534 tcg_temp_free_i64(t0);
1535 tcg_temp_free_i64(t1);
1536 }
1537}
1538
1539void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1540{
1541 tcg_debug_assert(arg2 < 64);
1542 /* some cases can be optimized here */
1543 if (arg2 == 0) {
1544 tcg_gen_mov_i64(ret, arg1);
1545 } else if (TCG_TARGET_HAS_rot_i64) {
1546 TCGv_i64 t0 = tcg_const_i64(arg2);
1547 tcg_gen_rotl_i64(ret, arg1, t0);
1548 tcg_temp_free_i64(t0);
1549 } else {
1550 TCGv_i64 t0, t1;
1551 t0 = tcg_temp_new_i64();
1552 t1 = tcg_temp_new_i64();
1553 tcg_gen_shli_i64(t0, arg1, arg2);
1554 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1555 tcg_gen_or_i64(ret, t0, t1);
1556 tcg_temp_free_i64(t0);
1557 tcg_temp_free_i64(t1);
1558 }
1559}
1560
1561void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1562{
1563 if (TCG_TARGET_HAS_rot_i64) {
1564 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1565 } else {
1566 TCGv_i64 t0, t1;
1567 t0 = tcg_temp_new_i64();
1568 t1 = tcg_temp_new_i64();
1569 tcg_gen_shr_i64(t0, arg1, arg2);
1570 tcg_gen_subfi_i64(t1, 64, arg2);
1571 tcg_gen_shl_i64(t1, arg1, t1);
1572 tcg_gen_or_i64(ret, t0, t1);
1573 tcg_temp_free_i64(t0);
1574 tcg_temp_free_i64(t1);
1575 }
1576}
1577
1578void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1579{
1580 tcg_debug_assert(arg2 < 64);
1581 /* some cases can be optimized here */
1582 if (arg2 == 0) {
1583 tcg_gen_mov_i64(ret, arg1);
1584 } else {
1585 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
1586 }
1587}
1588
1589void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
1590 unsigned int ofs, unsigned int len)
1591{
1592 uint64_t mask;
1593 TCGv_i64 t1;
1594
1595 tcg_debug_assert(ofs < 64);
1596 tcg_debug_assert(len <= 64);
1597 tcg_debug_assert(ofs + len <= 64);
1598
1599 if (ofs == 0 && len == 64) {
1600 tcg_gen_mov_i64(ret, arg2);
1601 return;
1602 }
1603 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
1604 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
1605 return;
1606 }
1607
3a13c3f3
RH
1608 if (TCG_TARGET_REG_BITS == 32) {
1609 if (ofs >= 32) {
1610 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
1611 TCGV_LOW(arg2), ofs - 32, len);
1612 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1613 return;
1614 }
1615 if (ofs + len <= 32) {
1616 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
1617 TCGV_LOW(arg2), ofs, len);
1618 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1619 return;
1620 }
951c6300 1621 }
951c6300
RH
1622
1623 mask = (1ull << len) - 1;
1624 t1 = tcg_temp_new_i64();
1625
1626 if (ofs + len < 64) {
1627 tcg_gen_andi_i64(t1, arg2, mask);
1628 tcg_gen_shli_i64(t1, t1, ofs);
1629 } else {
1630 tcg_gen_shli_i64(t1, arg2, ofs);
1631 }
1632 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
1633 tcg_gen_or_i64(ret, ret, t1);
1634
1635 tcg_temp_free_i64(t1);
1636}
1637
1638void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
1639 TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
1640{
37ed3bf1
RH
1641 if (cond == TCG_COND_ALWAYS) {
1642 tcg_gen_mov_i64(ret, v1);
1643 } else if (cond == TCG_COND_NEVER) {
1644 tcg_gen_mov_i64(ret, v2);
1645 } else if (TCG_TARGET_REG_BITS == 32) {
3a13c3f3
RH
1646 TCGv_i32 t0 = tcg_temp_new_i32();
1647 TCGv_i32 t1 = tcg_temp_new_i32();
1648 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
1649 TCGV_LOW(c1), TCGV_HIGH(c1),
1650 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
1651
1652 if (TCG_TARGET_HAS_movcond_i32) {
1653 tcg_gen_movi_i32(t1, 0);
1654 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
1655 TCGV_LOW(v1), TCGV_LOW(v2));
1656 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
1657 TCGV_HIGH(v1), TCGV_HIGH(v2));
1658 } else {
1659 tcg_gen_neg_i32(t0, t0);
951c6300 1660
3a13c3f3
RH
1661 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
1662 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
1663 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
951c6300 1664
3a13c3f3
RH
1665 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
1666 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
1667 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
1668 }
1669 tcg_temp_free_i32(t0);
1670 tcg_temp_free_i32(t1);
1671 } else if (TCG_TARGET_HAS_movcond_i64) {
951c6300
RH
1672 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
1673 } else {
1674 TCGv_i64 t0 = tcg_temp_new_i64();
1675 TCGv_i64 t1 = tcg_temp_new_i64();
1676 tcg_gen_setcond_i64(cond, t0, c1, c2);
1677 tcg_gen_neg_i64(t0, t0);
1678 tcg_gen_and_i64(t1, v1, t0);
1679 tcg_gen_andc_i64(ret, v2, t0);
1680 tcg_gen_or_i64(ret, ret, t1);
1681 tcg_temp_free_i64(t0);
1682 tcg_temp_free_i64(t1);
1683 }
951c6300
RH
1684}
1685
1686void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1687 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1688{
1689 if (TCG_TARGET_HAS_add2_i64) {
1690 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
951c6300
RH
1691 } else {
1692 TCGv_i64 t0 = tcg_temp_new_i64();
1693 TCGv_i64 t1 = tcg_temp_new_i64();
1694 tcg_gen_add_i64(t0, al, bl);
1695 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
1696 tcg_gen_add_i64(rh, ah, bh);
1697 tcg_gen_add_i64(rh, rh, t1);
1698 tcg_gen_mov_i64(rl, t0);
1699 tcg_temp_free_i64(t0);
1700 tcg_temp_free_i64(t1);
1701 }
1702}
1703
1704void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1705 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1706{
1707 if (TCG_TARGET_HAS_sub2_i64) {
1708 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
951c6300
RH
1709 } else {
1710 TCGv_i64 t0 = tcg_temp_new_i64();
1711 TCGv_i64 t1 = tcg_temp_new_i64();
1712 tcg_gen_sub_i64(t0, al, bl);
1713 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
1714 tcg_gen_sub_i64(rh, ah, bh);
1715 tcg_gen_sub_i64(rh, rh, t1);
1716 tcg_gen_mov_i64(rl, t0);
1717 tcg_temp_free_i64(t0);
1718 tcg_temp_free_i64(t1);
1719 }
1720}
1721
1722void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1723{
1724 if (TCG_TARGET_HAS_mulu2_i64) {
1725 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
951c6300
RH
1726 } else if (TCG_TARGET_HAS_muluh_i64) {
1727 TCGv_i64 t = tcg_temp_new_i64();
1728 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1729 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
1730 tcg_gen_mov_i64(rl, t);
1731 tcg_temp_free_i64(t);
1732 } else {
1733 TCGv_i64 t0 = tcg_temp_new_i64();
1734 tcg_gen_mul_i64(t0, arg1, arg2);
1735 gen_helper_muluh_i64(rh, arg1, arg2);
1736 tcg_gen_mov_i64(rl, t0);
1737 tcg_temp_free_i64(t0);
1738 }
1739}
1740
1741void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1742{
1743 if (TCG_TARGET_HAS_muls2_i64) {
1744 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
951c6300
RH
1745 } else if (TCG_TARGET_HAS_mulsh_i64) {
1746 TCGv_i64 t = tcg_temp_new_i64();
1747 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1748 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
1749 tcg_gen_mov_i64(rl, t);
1750 tcg_temp_free_i64(t);
1751 } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
1752 TCGv_i64 t0 = tcg_temp_new_i64();
1753 TCGv_i64 t1 = tcg_temp_new_i64();
1754 TCGv_i64 t2 = tcg_temp_new_i64();
1755 TCGv_i64 t3 = tcg_temp_new_i64();
1756 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1757 /* Adjust for negative inputs. */
1758 tcg_gen_sari_i64(t2, arg1, 63);
1759 tcg_gen_sari_i64(t3, arg2, 63);
1760 tcg_gen_and_i64(t2, t2, arg2);
1761 tcg_gen_and_i64(t3, t3, arg1);
1762 tcg_gen_sub_i64(rh, t1, t2);
1763 tcg_gen_sub_i64(rh, rh, t3);
1764 tcg_gen_mov_i64(rl, t0);
1765 tcg_temp_free_i64(t0);
1766 tcg_temp_free_i64(t1);
1767 tcg_temp_free_i64(t2);
1768 tcg_temp_free_i64(t3);
1769 } else {
1770 TCGv_i64 t0 = tcg_temp_new_i64();
1771 tcg_gen_mul_i64(t0, arg1, arg2);
1772 gen_helper_mulsh_i64(rh, arg1, arg2);
1773 tcg_gen_mov_i64(rl, t0);
1774 tcg_temp_free_i64(t0);
1775 }
1776}
1777
5087abfb
RH
1778void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1779{
1780 TCGv_i64 t0 = tcg_temp_new_i64();
1781 TCGv_i64 t1 = tcg_temp_new_i64();
1782 TCGv_i64 t2 = tcg_temp_new_i64();
1783 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1784 /* Adjust for negative input for the signed arg1. */
1785 tcg_gen_sari_i64(t2, arg1, 63);
1786 tcg_gen_and_i64(t2, t2, arg2);
1787 tcg_gen_sub_i64(rh, t1, t2);
1788 tcg_gen_mov_i64(rl, t0);
1789 tcg_temp_free_i64(t0);
1790 tcg_temp_free_i64(t1);
1791 tcg_temp_free_i64(t2);
1792}
1793
951c6300
RH
1794/* Size changing operations. */
1795
609ad705 1796void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
951c6300 1797{
3a13c3f3 1798 if (TCG_TARGET_REG_BITS == 32) {
609ad705
RH
1799 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1800 } else if (TCG_TARGET_HAS_extrl_i64_i32) {
1801 tcg_gen_op2(&tcg_ctx, INDEX_op_extrl_i64_i32,
1802 GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1803 } else {
951c6300 1804 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
609ad705
RH
1805 }
1806}
1807
1808void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1809{
1810 if (TCG_TARGET_REG_BITS == 32) {
1811 tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
1812 } else if (TCG_TARGET_HAS_extrh_i64_i32) {
1813 tcg_gen_op2(&tcg_ctx, INDEX_op_extrh_i64_i32,
1814 GET_TCGV_I32(ret), GET_TCGV_I64(arg));
951c6300
RH
1815 } else {
1816 TCGv_i64 t = tcg_temp_new_i64();
609ad705 1817 tcg_gen_shri_i64(t, arg, 32);
951c6300
RH
1818 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
1819 tcg_temp_free_i64(t);
1820 }
951c6300
RH
1821}
1822
1823void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1824{
3a13c3f3
RH
1825 if (TCG_TARGET_REG_BITS == 32) {
1826 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1827 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1828 } else {
4f2331e5
AJ
1829 tcg_gen_op2(&tcg_ctx, INDEX_op_extu_i32_i64,
1830 GET_TCGV_I64(ret), GET_TCGV_I32(arg));
3a13c3f3 1831 }
951c6300
RH
1832}
1833
1834void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1835{
3a13c3f3
RH
1836 if (TCG_TARGET_REG_BITS == 32) {
1837 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1838 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1839 } else {
4f2331e5
AJ
1840 tcg_gen_op2(&tcg_ctx, INDEX_op_ext_i32_i64,
1841 GET_TCGV_I64(ret), GET_TCGV_I32(arg));
3a13c3f3 1842 }
951c6300
RH
1843}
1844
1845void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
1846{
3a13c3f3
RH
1847 TCGv_i64 tmp;
1848
1849 if (TCG_TARGET_REG_BITS == 32) {
1850 tcg_gen_mov_i32(TCGV_LOW(dest), low);
1851 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
1852 return;
1853 }
1854
1855 tmp = tcg_temp_new_i64();
951c6300
RH
1856 /* These extensions are only needed for type correctness.
1857 We may be able to do better given target specific information. */
1858 tcg_gen_extu_i32_i64(tmp, high);
1859 tcg_gen_extu_i32_i64(dest, low);
1860 /* If deposit is available, use it. Otherwise use the extra
1861 knowledge that we have of the zero-extensions above. */
1862 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
1863 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
1864 } else {
1865 tcg_gen_shli_i64(tmp, tmp, 32);
1866 tcg_gen_or_i64(dest, dest, tmp);
1867 }
1868 tcg_temp_free_i64(tmp);
951c6300
RH
1869}
1870
1871void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
1872{
3a13c3f3
RH
1873 if (TCG_TARGET_REG_BITS == 32) {
1874 tcg_gen_mov_i32(lo, TCGV_LOW(arg));
1875 tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
1876 } else {
609ad705
RH
1877 tcg_gen_extrl_i64_i32(lo, arg);
1878 tcg_gen_extrh_i64_i32(hi, arg);
3a13c3f3 1879 }
951c6300
RH
1880}
1881
1882void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
1883{
1884 tcg_gen_ext32u_i64(lo, arg);
1885 tcg_gen_shri_i64(hi, arg, 32);
1886}
1887
1888/* QEMU specific operations. */
1889
1890void tcg_gen_goto_tb(unsigned idx)
1891{
1892 /* We only support two chained exits. */
1893 tcg_debug_assert(idx <= 1);
1894#ifdef CONFIG_DEBUG_TCG
1895 /* Verify that we havn't seen this numbered exit before. */
1896 tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
1897 tcg_ctx.goto_tb_issue_mask |= 1 << idx;
1898#endif
1899 tcg_gen_op1i(INDEX_op_goto_tb, idx);
1900}
1901
1902static inline TCGMemOp tcg_canonicalize_memop(TCGMemOp op, bool is64, bool st)
1903{
1f00b27f
SS
1904 /* Trigger the asserts within as early as possible. */
1905 (void)get_alignment_bits(op);
1906
951c6300
RH
1907 switch (op & MO_SIZE) {
1908 case MO_8:
1909 op &= ~MO_BSWAP;
1910 break;
1911 case MO_16:
1912 break;
1913 case MO_32:
1914 if (!is64) {
1915 op &= ~MO_SIGN;
1916 }
1917 break;
1918 case MO_64:
1919 if (!is64) {
1920 tcg_abort();
1921 }
1922 break;
1923 }
1924 if (st) {
1925 op &= ~MO_SIGN;
1926 }
1927 return op;
1928}
1929
c45cb8bb
RH
1930static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
1931 TCGMemOp memop, TCGArg idx)
951c6300 1932{
59227d5d 1933 TCGMemOpIdx oi = make_memop_idx(memop, idx);
c45cb8bb 1934#if TARGET_LONG_BITS == 32
59227d5d 1935 tcg_gen_op3i_i32(opc, val, addr, oi);
c45cb8bb 1936#else
3a13c3f3 1937 if (TCG_TARGET_REG_BITS == 32) {
59227d5d 1938 tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
3a13c3f3 1939 } else {
59227d5d 1940 tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I32(val), GET_TCGV_I64(addr), oi);
3a13c3f3 1941 }
c45cb8bb 1942#endif
951c6300
RH
1943}
1944
c45cb8bb
RH
1945static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
1946 TCGMemOp memop, TCGArg idx)
1947{
59227d5d 1948 TCGMemOpIdx oi = make_memop_idx(memop, idx);
951c6300 1949#if TARGET_LONG_BITS == 32
c45cb8bb 1950 if (TCG_TARGET_REG_BITS == 32) {
59227d5d 1951 tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
c45cb8bb 1952 } else {
59227d5d 1953 tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I64(val), GET_TCGV_I32(addr), oi);
c45cb8bb 1954 }
951c6300 1955#else
c45cb8bb 1956 if (TCG_TARGET_REG_BITS == 32) {
59227d5d
RH
1957 tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
1958 TCGV_LOW(addr), TCGV_HIGH(addr), oi);
c45cb8bb 1959 } else {
59227d5d 1960 tcg_gen_op3i_i64(opc, val, addr, oi);
c45cb8bb 1961 }
951c6300 1962#endif
c45cb8bb 1963}
951c6300
RH
1964
1965void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1966{
1967 memop = tcg_canonicalize_memop(memop, 0, 0);
dcdaadb6
LV
1968 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1969 addr, trace_mem_get_info(memop, 0));
c45cb8bb 1970 gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
951c6300
RH
1971}
1972
1973void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1974{
1975 memop = tcg_canonicalize_memop(memop, 0, 1);
dcdaadb6
LV
1976 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1977 addr, trace_mem_get_info(memop, 1));
c45cb8bb 1978 gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
951c6300
RH
1979}
1980
1981void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1982{
3a13c3f3 1983 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
951c6300
RH
1984 tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
1985 if (memop & MO_SIGN) {
1986 tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
1987 } else {
1988 tcg_gen_movi_i32(TCGV_HIGH(val), 0);
1989 }
1990 return;
1991 }
951c6300 1992
c45cb8bb 1993 memop = tcg_canonicalize_memop(memop, 1, 0);
dcdaadb6
LV
1994 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1995 addr, trace_mem_get_info(memop, 0));
c45cb8bb 1996 gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
951c6300
RH
1997}
1998
1999void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
2000{
3a13c3f3 2001 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
951c6300
RH
2002 tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
2003 return;
2004 }
951c6300 2005
c45cb8bb 2006 memop = tcg_canonicalize_memop(memop, 1, 1);
dcdaadb6
LV
2007 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
2008 addr, trace_mem_get_info(memop, 1));
c45cb8bb 2009 gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
951c6300 2010}
c482cb11
RH
2011
2012static void tcg_gen_ext_i32(TCGv_i32 ret, TCGv_i32 val, TCGMemOp opc)
2013{
2014 switch (opc & MO_SSIZE) {
2015 case MO_SB:
2016 tcg_gen_ext8s_i32(ret, val);
2017 break;
2018 case MO_UB:
2019 tcg_gen_ext8u_i32(ret, val);
2020 break;
2021 case MO_SW:
2022 tcg_gen_ext16s_i32(ret, val);
2023 break;
2024 case MO_UW:
2025 tcg_gen_ext16u_i32(ret, val);
2026 break;
2027 default:
2028 tcg_gen_mov_i32(ret, val);
2029 break;
2030 }
2031}
2032
2033static void tcg_gen_ext_i64(TCGv_i64 ret, TCGv_i64 val, TCGMemOp opc)
2034{
2035 switch (opc & MO_SSIZE) {
2036 case MO_SB:
2037 tcg_gen_ext8s_i64(ret, val);
2038 break;
2039 case MO_UB:
2040 tcg_gen_ext8u_i64(ret, val);
2041 break;
2042 case MO_SW:
2043 tcg_gen_ext16s_i64(ret, val);
2044 break;
2045 case MO_UW:
2046 tcg_gen_ext16u_i64(ret, val);
2047 break;
2048 case MO_SL:
2049 tcg_gen_ext32s_i64(ret, val);
2050 break;
2051 case MO_UL:
2052 tcg_gen_ext32u_i64(ret, val);
2053 break;
2054 default:
2055 tcg_gen_mov_i64(ret, val);
2056 break;
2057 }
2058}
2059
2060#ifdef CONFIG_SOFTMMU
2061typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv,
2062 TCGv_i32, TCGv_i32, TCGv_i32);
2063typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv,
2064 TCGv_i64, TCGv_i64, TCGv_i32);
2065typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv,
2066 TCGv_i32, TCGv_i32);
2067typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv,
2068 TCGv_i64, TCGv_i32);
2069#else
2070typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv, TCGv_i32, TCGv_i32);
2071typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv, TCGv_i64, TCGv_i64);
2072typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv, TCGv_i32);
2073typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv, TCGv_i64);
2074#endif
2075
df79b996
RH
2076#ifdef CONFIG_ATOMIC64
2077# define WITH_ATOMIC64(X) X,
2078#else
2079# define WITH_ATOMIC64(X)
2080#endif
2081
c482cb11
RH
2082static void * const table_cmpxchg[16] = {
2083 [MO_8] = gen_helper_atomic_cmpxchgb,
2084 [MO_16 | MO_LE] = gen_helper_atomic_cmpxchgw_le,
2085 [MO_16 | MO_BE] = gen_helper_atomic_cmpxchgw_be,
2086 [MO_32 | MO_LE] = gen_helper_atomic_cmpxchgl_le,
2087 [MO_32 | MO_BE] = gen_helper_atomic_cmpxchgl_be,
df79b996
RH
2088 WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_cmpxchgq_le)
2089 WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_cmpxchgq_be)
c482cb11
RH
2090};
2091
2092void tcg_gen_atomic_cmpxchg_i32(TCGv_i32 retv, TCGv addr, TCGv_i32 cmpv,
2093 TCGv_i32 newv, TCGArg idx, TCGMemOp memop)
2094{
2095 memop = tcg_canonicalize_memop(memop, 0, 0);
2096
2097 if (!parallel_cpus) {
2098 TCGv_i32 t1 = tcg_temp_new_i32();
2099 TCGv_i32 t2 = tcg_temp_new_i32();
2100
2101 tcg_gen_ext_i32(t2, cmpv, memop & MO_SIZE);
2102
2103 tcg_gen_qemu_ld_i32(t1, addr, idx, memop & ~MO_SIGN);
2104 tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, t2, newv, t1);
2105 tcg_gen_qemu_st_i32(t2, addr, idx, memop);
2106 tcg_temp_free_i32(t2);
2107
2108 if (memop & MO_SIGN) {
2109 tcg_gen_ext_i32(retv, t1, memop);
2110 } else {
2111 tcg_gen_mov_i32(retv, t1);
2112 }
2113 tcg_temp_free_i32(t1);
2114 } else {
2115 gen_atomic_cx_i32 gen;
2116
2117 gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
2118 tcg_debug_assert(gen != NULL);
2119
2120#ifdef CONFIG_SOFTMMU
2121 {
2122 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
2123 gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv, oi);
2124 tcg_temp_free_i32(oi);
2125 }
2126#else
2127 gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv);
2128#endif
2129
2130 if (memop & MO_SIGN) {
2131 tcg_gen_ext_i32(retv, retv, memop);
2132 }
2133 }
2134}
2135
2136void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv, TCGv addr, TCGv_i64 cmpv,
2137 TCGv_i64 newv, TCGArg idx, TCGMemOp memop)
2138{
2139 memop = tcg_canonicalize_memop(memop, 1, 0);
2140
2141 if (!parallel_cpus) {
2142 TCGv_i64 t1 = tcg_temp_new_i64();
2143 TCGv_i64 t2 = tcg_temp_new_i64();
2144
2145 tcg_gen_ext_i64(t2, cmpv, memop & MO_SIZE);
2146
2147 tcg_gen_qemu_ld_i64(t1, addr, idx, memop & ~MO_SIGN);
2148 tcg_gen_movcond_i64(TCG_COND_EQ, t2, t1, t2, newv, t1);
2149 tcg_gen_qemu_st_i64(t2, addr, idx, memop);
2150 tcg_temp_free_i64(t2);
2151
2152 if (memop & MO_SIGN) {
2153 tcg_gen_ext_i64(retv, t1, memop);
2154 } else {
2155 tcg_gen_mov_i64(retv, t1);
2156 }
2157 tcg_temp_free_i64(t1);
2158 } else if ((memop & MO_SIZE) == MO_64) {
df79b996 2159#ifdef CONFIG_ATOMIC64
c482cb11
RH
2160 gen_atomic_cx_i64 gen;
2161
2162 gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
2163 tcg_debug_assert(gen != NULL);
2164
2165#ifdef CONFIG_SOFTMMU
2166 {
2167 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop, idx));
2168 gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv, oi);
2169 tcg_temp_free_i32(oi);
2170 }
2171#else
2172 gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv);
2173#endif
df79b996
RH
2174#else
2175 gen_helper_exit_atomic(tcg_ctx.tcg_env);
2176#endif /* CONFIG_ATOMIC64 */
c482cb11
RH
2177 } else {
2178 TCGv_i32 c32 = tcg_temp_new_i32();
2179 TCGv_i32 n32 = tcg_temp_new_i32();
2180 TCGv_i32 r32 = tcg_temp_new_i32();
2181
2182 tcg_gen_extrl_i64_i32(c32, cmpv);
2183 tcg_gen_extrl_i64_i32(n32, newv);
2184 tcg_gen_atomic_cmpxchg_i32(r32, addr, c32, n32, idx, memop & ~MO_SIGN);
2185 tcg_temp_free_i32(c32);
2186 tcg_temp_free_i32(n32);
2187
2188 tcg_gen_extu_i32_i64(retv, r32);
2189 tcg_temp_free_i32(r32);
2190
2191 if (memop & MO_SIGN) {
2192 tcg_gen_ext_i64(retv, retv, memop);
2193 }
2194 }
2195}
2196
2197static void do_nonatomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
2198 TCGArg idx, TCGMemOp memop, bool new_val,
2199 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
2200{
2201 TCGv_i32 t1 = tcg_temp_new_i32();
2202 TCGv_i32 t2 = tcg_temp_new_i32();
2203
2204 memop = tcg_canonicalize_memop(memop, 0, 0);
2205
2206 tcg_gen_qemu_ld_i32(t1, addr, idx, memop & ~MO_SIGN);
2207 gen(t2, t1, val);
2208 tcg_gen_qemu_st_i32(t2, addr, idx, memop);
2209
2210 tcg_gen_ext_i32(ret, (new_val ? t2 : t1), memop);
2211 tcg_temp_free_i32(t1);
2212 tcg_temp_free_i32(t2);
2213}
2214
2215static void do_atomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
2216 TCGArg idx, TCGMemOp memop, void * const table[])
2217{
2218 gen_atomic_op_i32 gen;
2219
2220 memop = tcg_canonicalize_memop(memop, 0, 0);
2221
2222 gen = table[memop & (MO_SIZE | MO_BSWAP)];
2223 tcg_debug_assert(gen != NULL);
2224
2225#ifdef CONFIG_SOFTMMU
2226 {
2227 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
2228 gen(ret, tcg_ctx.tcg_env, addr, val, oi);
2229 tcg_temp_free_i32(oi);
2230 }
2231#else
2232 gen(ret, tcg_ctx.tcg_env, addr, val);
2233#endif
2234
2235 if (memop & MO_SIGN) {
2236 tcg_gen_ext_i32(ret, ret, memop);
2237 }
2238}
2239
2240static void do_nonatomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
2241 TCGArg idx, TCGMemOp memop, bool new_val,
2242 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
2243{
2244 TCGv_i64 t1 = tcg_temp_new_i64();
2245 TCGv_i64 t2 = tcg_temp_new_i64();
2246
2247 memop = tcg_canonicalize_memop(memop, 1, 0);
2248
2249 tcg_gen_qemu_ld_i64(t1, addr, idx, memop & ~MO_SIGN);
2250 gen(t2, t1, val);
2251 tcg_gen_qemu_st_i64(t2, addr, idx, memop);
2252
2253 tcg_gen_ext_i64(ret, (new_val ? t2 : t1), memop);
2254 tcg_temp_free_i64(t1);
2255 tcg_temp_free_i64(t2);
2256}
2257
2258static void do_atomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
2259 TCGArg idx, TCGMemOp memop, void * const table[])
2260{
2261 memop = tcg_canonicalize_memop(memop, 1, 0);
2262
2263 if ((memop & MO_SIZE) == MO_64) {
df79b996 2264#ifdef CONFIG_ATOMIC64
c482cb11
RH
2265 gen_atomic_op_i64 gen;
2266
2267 gen = table[memop & (MO_SIZE | MO_BSWAP)];
2268 tcg_debug_assert(gen != NULL);
2269
2270#ifdef CONFIG_SOFTMMU
2271 {
2272 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
2273 gen(ret, tcg_ctx.tcg_env, addr, val, oi);
2274 tcg_temp_free_i32(oi);
2275 }
2276#else
2277 gen(ret, tcg_ctx.tcg_env, addr, val);
2278#endif
df79b996
RH
2279#else
2280 gen_helper_exit_atomic(tcg_ctx.tcg_env);
2281#endif /* CONFIG_ATOMIC64 */
c482cb11
RH
2282 } else {
2283 TCGv_i32 v32 = tcg_temp_new_i32();
2284 TCGv_i32 r32 = tcg_temp_new_i32();
2285
2286 tcg_gen_extrl_i64_i32(v32, val);
2287 do_atomic_op_i32(r32, addr, v32, idx, memop & ~MO_SIGN, table);
2288 tcg_temp_free_i32(v32);
2289
2290 tcg_gen_extu_i32_i64(ret, r32);
2291 tcg_temp_free_i32(r32);
2292
2293 if (memop & MO_SIGN) {
2294 tcg_gen_ext_i64(ret, ret, memop);
2295 }
2296 }
2297}
2298
2299#define GEN_ATOMIC_HELPER(NAME, OP, NEW) \
2300static void * const table_##NAME[16] = { \
2301 [MO_8] = gen_helper_atomic_##NAME##b, \
2302 [MO_16 | MO_LE] = gen_helper_atomic_##NAME##w_le, \
2303 [MO_16 | MO_BE] = gen_helper_atomic_##NAME##w_be, \
2304 [MO_32 | MO_LE] = gen_helper_atomic_##NAME##l_le, \
2305 [MO_32 | MO_BE] = gen_helper_atomic_##NAME##l_be, \
df79b996
RH
2306 WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_##NAME##q_le) \
2307 WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_##NAME##q_be) \
c482cb11
RH
2308}; \
2309void tcg_gen_atomic_##NAME##_i32 \
2310 (TCGv_i32 ret, TCGv addr, TCGv_i32 val, TCGArg idx, TCGMemOp memop) \
2311{ \
2312 if (parallel_cpus) { \
2313 do_atomic_op_i32(ret, addr, val, idx, memop, table_##NAME); \
2314 } else { \
2315 do_nonatomic_op_i32(ret, addr, val, idx, memop, NEW, \
2316 tcg_gen_##OP##_i32); \
2317 } \
2318} \
2319void tcg_gen_atomic_##NAME##_i64 \
2320 (TCGv_i64 ret, TCGv addr, TCGv_i64 val, TCGArg idx, TCGMemOp memop) \
2321{ \
2322 if (parallel_cpus) { \
2323 do_atomic_op_i64(ret, addr, val, idx, memop, table_##NAME); \
2324 } else { \
2325 do_nonatomic_op_i64(ret, addr, val, idx, memop, NEW, \
2326 tcg_gen_##OP##_i64); \
2327 } \
2328}
2329
2330GEN_ATOMIC_HELPER(fetch_add, add, 0)
2331GEN_ATOMIC_HELPER(fetch_and, and, 0)
2332GEN_ATOMIC_HELPER(fetch_or, or, 0)
2333GEN_ATOMIC_HELPER(fetch_xor, xor, 0)
2334
2335GEN_ATOMIC_HELPER(add_fetch, add, 1)
2336GEN_ATOMIC_HELPER(and_fetch, and, 1)
2337GEN_ATOMIC_HELPER(or_fetch, or, 1)
2338GEN_ATOMIC_HELPER(xor_fetch, xor, 1)
2339
2340static void tcg_gen_mov2_i32(TCGv_i32 r, TCGv_i32 a, TCGv_i32 b)
2341{
2342 tcg_gen_mov_i32(r, b);
2343}
2344
2345static void tcg_gen_mov2_i64(TCGv_i64 r, TCGv_i64 a, TCGv_i64 b)
2346{
2347 tcg_gen_mov_i64(r, b);
2348}
2349
2350GEN_ATOMIC_HELPER(xchg, mov2, 0)
2351
2352#undef GEN_ATOMIC_HELPER