]> git.proxmox.com Git - mirror_qemu.git/blame - tcg/tcg-op.c
ivshmem: Fix 64 bit memory bar configuration
[mirror_qemu.git] / tcg / tcg-op.c
CommitLineData
951c6300
RH
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2008 Fabrice Bellard
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
757e725b 25#include "qemu/osdep.h"
33c11879
PB
26#include "qemu-common.h"
27#include "cpu.h"
63c91552 28#include "exec/exec-all.h"
951c6300
RH
29#include "tcg.h"
30#include "tcg-op.h"
dcdaadb6
LV
31#include "trace-tcg.h"
32#include "trace/mem.h"
951c6300 33
3a13c3f3
RH
34/* Reduce the number of ifdefs below. This assumes that all uses of
35 TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
36 the compiler can eliminate. */
37#if TCG_TARGET_REG_BITS == 64
38extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
39extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
40#define TCGV_LOW TCGV_LOW_link_error
41#define TCGV_HIGH TCGV_HIGH_link_error
42#endif
951c6300 43
c45cb8bb
RH
44/* Note that this is optimized for sequential allocation during translate.
45 Up to and including filling in the forward link immediately. We'll do
46 proper termination of the end of the list after we finish translation. */
47
48static void tcg_emit_op(TCGContext *ctx, TCGOpcode opc, int args)
49{
50 int oi = ctx->gen_next_op_idx;
51 int ni = oi + 1;
52 int pi = oi - 1;
53
54 tcg_debug_assert(oi < OPC_BUF_SIZE);
dcb8e758 55 ctx->gen_op_buf[0].prev = oi;
c45cb8bb
RH
56 ctx->gen_next_op_idx = ni;
57
58 ctx->gen_op_buf[oi] = (TCGOp){
59 .opc = opc,
60 .args = args,
61 .prev = pi,
62 .next = ni
63 };
64}
65
951c6300
RH
66void tcg_gen_op1(TCGContext *ctx, TCGOpcode opc, TCGArg a1)
67{
c45cb8bb 68 int pi = ctx->gen_next_parm_idx;
951c6300 69
c45cb8bb
RH
70 tcg_debug_assert(pi + 1 <= OPPARAM_BUF_SIZE);
71 ctx->gen_next_parm_idx = pi + 1;
72 ctx->gen_opparam_buf[pi] = a1;
951c6300 73
c45cb8bb 74 tcg_emit_op(ctx, opc, pi);
951c6300
RH
75}
76
77void tcg_gen_op2(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2)
78{
c45cb8bb 79 int pi = ctx->gen_next_parm_idx;
951c6300 80
c45cb8bb
RH
81 tcg_debug_assert(pi + 2 <= OPPARAM_BUF_SIZE);
82 ctx->gen_next_parm_idx = pi + 2;
83 ctx->gen_opparam_buf[pi + 0] = a1;
84 ctx->gen_opparam_buf[pi + 1] = a2;
951c6300 85
c45cb8bb 86 tcg_emit_op(ctx, opc, pi);
951c6300
RH
87}
88
89void tcg_gen_op3(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
90 TCGArg a2, TCGArg a3)
91{
c45cb8bb 92 int pi = ctx->gen_next_parm_idx;
951c6300 93
c45cb8bb
RH
94 tcg_debug_assert(pi + 3 <= OPPARAM_BUF_SIZE);
95 ctx->gen_next_parm_idx = pi + 3;
96 ctx->gen_opparam_buf[pi + 0] = a1;
97 ctx->gen_opparam_buf[pi + 1] = a2;
98 ctx->gen_opparam_buf[pi + 2] = a3;
951c6300 99
c45cb8bb 100 tcg_emit_op(ctx, opc, pi);
951c6300
RH
101}
102
103void tcg_gen_op4(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
104 TCGArg a2, TCGArg a3, TCGArg a4)
105{
c45cb8bb 106 int pi = ctx->gen_next_parm_idx;
951c6300 107
c45cb8bb
RH
108 tcg_debug_assert(pi + 4 <= OPPARAM_BUF_SIZE);
109 ctx->gen_next_parm_idx = pi + 4;
110 ctx->gen_opparam_buf[pi + 0] = a1;
111 ctx->gen_opparam_buf[pi + 1] = a2;
112 ctx->gen_opparam_buf[pi + 2] = a3;
113 ctx->gen_opparam_buf[pi + 3] = a4;
951c6300 114
c45cb8bb 115 tcg_emit_op(ctx, opc, pi);
951c6300
RH
116}
117
118void tcg_gen_op5(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
119 TCGArg a2, TCGArg a3, TCGArg a4, TCGArg a5)
120{
c45cb8bb 121 int pi = ctx->gen_next_parm_idx;
951c6300 122
c45cb8bb
RH
123 tcg_debug_assert(pi + 5 <= OPPARAM_BUF_SIZE);
124 ctx->gen_next_parm_idx = pi + 5;
125 ctx->gen_opparam_buf[pi + 0] = a1;
126 ctx->gen_opparam_buf[pi + 1] = a2;
127 ctx->gen_opparam_buf[pi + 2] = a3;
128 ctx->gen_opparam_buf[pi + 3] = a4;
129 ctx->gen_opparam_buf[pi + 4] = a5;
951c6300 130
c45cb8bb 131 tcg_emit_op(ctx, opc, pi);
951c6300
RH
132}
133
134void tcg_gen_op6(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2,
135 TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
136{
c45cb8bb 137 int pi = ctx->gen_next_parm_idx;
951c6300 138
c45cb8bb
RH
139 tcg_debug_assert(pi + 6 <= OPPARAM_BUF_SIZE);
140 ctx->gen_next_parm_idx = pi + 6;
141 ctx->gen_opparam_buf[pi + 0] = a1;
142 ctx->gen_opparam_buf[pi + 1] = a2;
143 ctx->gen_opparam_buf[pi + 2] = a3;
144 ctx->gen_opparam_buf[pi + 3] = a4;
145 ctx->gen_opparam_buf[pi + 4] = a5;
146 ctx->gen_opparam_buf[pi + 5] = a6;
951c6300 147
c45cb8bb 148 tcg_emit_op(ctx, opc, pi);
951c6300
RH
149}
150
151/* 32 bit ops */
152
153void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
154{
155 /* some cases can be optimized here */
156 if (arg2 == 0) {
157 tcg_gen_mov_i32(ret, arg1);
158 } else {
159 TCGv_i32 t0 = tcg_const_i32(arg2);
160 tcg_gen_add_i32(ret, arg1, t0);
161 tcg_temp_free_i32(t0);
162 }
163}
164
165void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
166{
167 if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
168 /* Don't recurse with tcg_gen_neg_i32. */
169 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
170 } else {
171 TCGv_i32 t0 = tcg_const_i32(arg1);
172 tcg_gen_sub_i32(ret, t0, arg2);
173 tcg_temp_free_i32(t0);
174 }
175}
176
177void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
178{
179 /* some cases can be optimized here */
180 if (arg2 == 0) {
181 tcg_gen_mov_i32(ret, arg1);
182 } else {
183 TCGv_i32 t0 = tcg_const_i32(arg2);
184 tcg_gen_sub_i32(ret, arg1, t0);
185 tcg_temp_free_i32(t0);
186 }
187}
188
189void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
190{
191 TCGv_i32 t0;
192 /* Some cases can be optimized here. */
193 switch (arg2) {
194 case 0:
195 tcg_gen_movi_i32(ret, 0);
196 return;
197 case 0xffffffffu:
198 tcg_gen_mov_i32(ret, arg1);
199 return;
200 case 0xffu:
201 /* Don't recurse with tcg_gen_ext8u_i32. */
202 if (TCG_TARGET_HAS_ext8u_i32) {
203 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
204 return;
205 }
206 break;
207 case 0xffffu:
208 if (TCG_TARGET_HAS_ext16u_i32) {
209 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
210 return;
211 }
212 break;
213 }
214 t0 = tcg_const_i32(arg2);
215 tcg_gen_and_i32(ret, arg1, t0);
216 tcg_temp_free_i32(t0);
217}
218
219void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
220{
221 /* Some cases can be optimized here. */
222 if (arg2 == -1) {
223 tcg_gen_movi_i32(ret, -1);
224 } else if (arg2 == 0) {
225 tcg_gen_mov_i32(ret, arg1);
226 } else {
227 TCGv_i32 t0 = tcg_const_i32(arg2);
228 tcg_gen_or_i32(ret, arg1, t0);
229 tcg_temp_free_i32(t0);
230 }
231}
232
233void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
234{
235 /* Some cases can be optimized here. */
236 if (arg2 == 0) {
237 tcg_gen_mov_i32(ret, arg1);
238 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
239 /* Don't recurse with tcg_gen_not_i32. */
240 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
241 } else {
242 TCGv_i32 t0 = tcg_const_i32(arg2);
243 tcg_gen_xor_i32(ret, arg1, t0);
244 tcg_temp_free_i32(t0);
245 }
246}
247
248void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
249{
250 tcg_debug_assert(arg2 < 32);
251 if (arg2 == 0) {
252 tcg_gen_mov_i32(ret, arg1);
253 } else {
254 TCGv_i32 t0 = tcg_const_i32(arg2);
255 tcg_gen_shl_i32(ret, arg1, t0);
256 tcg_temp_free_i32(t0);
257 }
258}
259
260void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
261{
262 tcg_debug_assert(arg2 < 32);
263 if (arg2 == 0) {
264 tcg_gen_mov_i32(ret, arg1);
265 } else {
266 TCGv_i32 t0 = tcg_const_i32(arg2);
267 tcg_gen_shr_i32(ret, arg1, t0);
268 tcg_temp_free_i32(t0);
269 }
270}
271
272void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
273{
274 tcg_debug_assert(arg2 < 32);
275 if (arg2 == 0) {
276 tcg_gen_mov_i32(ret, arg1);
277 } else {
278 TCGv_i32 t0 = tcg_const_i32(arg2);
279 tcg_gen_sar_i32(ret, arg1, t0);
280 tcg_temp_free_i32(t0);
281 }
282}
283
42a268c2 284void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
951c6300
RH
285{
286 if (cond == TCG_COND_ALWAYS) {
42a268c2 287 tcg_gen_br(l);
951c6300 288 } else if (cond != TCG_COND_NEVER) {
42a268c2 289 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
951c6300
RH
290 }
291}
292
42a268c2 293void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
951c6300 294{
37ed3bf1
RH
295 if (cond == TCG_COND_ALWAYS) {
296 tcg_gen_br(l);
297 } else if (cond != TCG_COND_NEVER) {
298 TCGv_i32 t0 = tcg_const_i32(arg2);
299 tcg_gen_brcond_i32(cond, arg1, t0, l);
300 tcg_temp_free_i32(t0);
301 }
951c6300
RH
302}
303
304void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
305 TCGv_i32 arg1, TCGv_i32 arg2)
306{
307 if (cond == TCG_COND_ALWAYS) {
308 tcg_gen_movi_i32(ret, 1);
309 } else if (cond == TCG_COND_NEVER) {
310 tcg_gen_movi_i32(ret, 0);
311 } else {
312 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
313 }
314}
315
316void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
317 TCGv_i32 arg1, int32_t arg2)
318{
319 TCGv_i32 t0 = tcg_const_i32(arg2);
320 tcg_gen_setcond_i32(cond, ret, arg1, t0);
321 tcg_temp_free_i32(t0);
322}
323
324void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
325{
326 TCGv_i32 t0 = tcg_const_i32(arg2);
327 tcg_gen_mul_i32(ret, arg1, t0);
328 tcg_temp_free_i32(t0);
329}
330
331void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
332{
333 if (TCG_TARGET_HAS_div_i32) {
334 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
335 } else if (TCG_TARGET_HAS_div2_i32) {
336 TCGv_i32 t0 = tcg_temp_new_i32();
337 tcg_gen_sari_i32(t0, arg1, 31);
338 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
339 tcg_temp_free_i32(t0);
340 } else {
341 gen_helper_div_i32(ret, arg1, arg2);
342 }
343}
344
345void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
346{
347 if (TCG_TARGET_HAS_rem_i32) {
348 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
349 } else if (TCG_TARGET_HAS_div_i32) {
350 TCGv_i32 t0 = tcg_temp_new_i32();
351 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
352 tcg_gen_mul_i32(t0, t0, arg2);
353 tcg_gen_sub_i32(ret, arg1, t0);
354 tcg_temp_free_i32(t0);
355 } else if (TCG_TARGET_HAS_div2_i32) {
356 TCGv_i32 t0 = tcg_temp_new_i32();
357 tcg_gen_sari_i32(t0, arg1, 31);
358 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
359 tcg_temp_free_i32(t0);
360 } else {
361 gen_helper_rem_i32(ret, arg1, arg2);
362 }
363}
364
365void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
366{
367 if (TCG_TARGET_HAS_div_i32) {
368 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
369 } else if (TCG_TARGET_HAS_div2_i32) {
370 TCGv_i32 t0 = tcg_temp_new_i32();
371 tcg_gen_movi_i32(t0, 0);
372 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
373 tcg_temp_free_i32(t0);
374 } else {
375 gen_helper_divu_i32(ret, arg1, arg2);
376 }
377}
378
379void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
380{
381 if (TCG_TARGET_HAS_rem_i32) {
382 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
383 } else if (TCG_TARGET_HAS_div_i32) {
384 TCGv_i32 t0 = tcg_temp_new_i32();
385 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
386 tcg_gen_mul_i32(t0, t0, arg2);
387 tcg_gen_sub_i32(ret, arg1, t0);
388 tcg_temp_free_i32(t0);
389 } else if (TCG_TARGET_HAS_div2_i32) {
390 TCGv_i32 t0 = tcg_temp_new_i32();
391 tcg_gen_movi_i32(t0, 0);
392 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
393 tcg_temp_free_i32(t0);
394 } else {
395 gen_helper_remu_i32(ret, arg1, arg2);
396 }
397}
398
399void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
400{
401 if (TCG_TARGET_HAS_andc_i32) {
402 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
403 } else {
404 TCGv_i32 t0 = tcg_temp_new_i32();
405 tcg_gen_not_i32(t0, arg2);
406 tcg_gen_and_i32(ret, arg1, t0);
407 tcg_temp_free_i32(t0);
408 }
409}
410
411void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
412{
413 if (TCG_TARGET_HAS_eqv_i32) {
414 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
415 } else {
416 tcg_gen_xor_i32(ret, arg1, arg2);
417 tcg_gen_not_i32(ret, ret);
418 }
419}
420
421void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
422{
423 if (TCG_TARGET_HAS_nand_i32) {
424 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
425 } else {
426 tcg_gen_and_i32(ret, arg1, arg2);
427 tcg_gen_not_i32(ret, ret);
428 }
429}
430
431void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
432{
433 if (TCG_TARGET_HAS_nor_i32) {
434 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
435 } else {
436 tcg_gen_or_i32(ret, arg1, arg2);
437 tcg_gen_not_i32(ret, ret);
438 }
439}
440
441void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
442{
443 if (TCG_TARGET_HAS_orc_i32) {
444 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
445 } else {
446 TCGv_i32 t0 = tcg_temp_new_i32();
447 tcg_gen_not_i32(t0, arg2);
448 tcg_gen_or_i32(ret, arg1, t0);
449 tcg_temp_free_i32(t0);
450 }
451}
452
453void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
454{
455 if (TCG_TARGET_HAS_rot_i32) {
456 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
457 } else {
458 TCGv_i32 t0, t1;
459
460 t0 = tcg_temp_new_i32();
461 t1 = tcg_temp_new_i32();
462 tcg_gen_shl_i32(t0, arg1, arg2);
463 tcg_gen_subfi_i32(t1, 32, arg2);
464 tcg_gen_shr_i32(t1, arg1, t1);
465 tcg_gen_or_i32(ret, t0, t1);
466 tcg_temp_free_i32(t0);
467 tcg_temp_free_i32(t1);
468 }
469}
470
471void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
472{
473 tcg_debug_assert(arg2 < 32);
474 /* some cases can be optimized here */
475 if (arg2 == 0) {
476 tcg_gen_mov_i32(ret, arg1);
477 } else if (TCG_TARGET_HAS_rot_i32) {
478 TCGv_i32 t0 = tcg_const_i32(arg2);
479 tcg_gen_rotl_i32(ret, arg1, t0);
480 tcg_temp_free_i32(t0);
481 } else {
482 TCGv_i32 t0, t1;
483 t0 = tcg_temp_new_i32();
484 t1 = tcg_temp_new_i32();
485 tcg_gen_shli_i32(t0, arg1, arg2);
486 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
487 tcg_gen_or_i32(ret, t0, t1);
488 tcg_temp_free_i32(t0);
489 tcg_temp_free_i32(t1);
490 }
491}
492
493void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
494{
495 if (TCG_TARGET_HAS_rot_i32) {
496 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
497 } else {
498 TCGv_i32 t0, t1;
499
500 t0 = tcg_temp_new_i32();
501 t1 = tcg_temp_new_i32();
502 tcg_gen_shr_i32(t0, arg1, arg2);
503 tcg_gen_subfi_i32(t1, 32, arg2);
504 tcg_gen_shl_i32(t1, arg1, t1);
505 tcg_gen_or_i32(ret, t0, t1);
506 tcg_temp_free_i32(t0);
507 tcg_temp_free_i32(t1);
508 }
509}
510
511void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
512{
513 tcg_debug_assert(arg2 < 32);
514 /* some cases can be optimized here */
515 if (arg2 == 0) {
516 tcg_gen_mov_i32(ret, arg1);
517 } else {
518 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
519 }
520}
521
522void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
523 unsigned int ofs, unsigned int len)
524{
525 uint32_t mask;
526 TCGv_i32 t1;
527
528 tcg_debug_assert(ofs < 32);
529 tcg_debug_assert(len <= 32);
530 tcg_debug_assert(ofs + len <= 32);
531
532 if (ofs == 0 && len == 32) {
533 tcg_gen_mov_i32(ret, arg2);
534 return;
535 }
536 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
537 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
538 return;
539 }
540
541 mask = (1u << len) - 1;
542 t1 = tcg_temp_new_i32();
543
544 if (ofs + len < 32) {
545 tcg_gen_andi_i32(t1, arg2, mask);
546 tcg_gen_shli_i32(t1, t1, ofs);
547 } else {
548 tcg_gen_shli_i32(t1, arg2, ofs);
549 }
550 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
551 tcg_gen_or_i32(ret, ret, t1);
552
553 tcg_temp_free_i32(t1);
554}
555
556void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
557 TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
558{
37ed3bf1
RH
559 if (cond == TCG_COND_ALWAYS) {
560 tcg_gen_mov_i32(ret, v1);
561 } else if (cond == TCG_COND_NEVER) {
562 tcg_gen_mov_i32(ret, v2);
563 } else if (TCG_TARGET_HAS_movcond_i32) {
951c6300
RH
564 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
565 } else {
566 TCGv_i32 t0 = tcg_temp_new_i32();
567 TCGv_i32 t1 = tcg_temp_new_i32();
568 tcg_gen_setcond_i32(cond, t0, c1, c2);
569 tcg_gen_neg_i32(t0, t0);
570 tcg_gen_and_i32(t1, v1, t0);
571 tcg_gen_andc_i32(ret, v2, t0);
572 tcg_gen_or_i32(ret, ret, t1);
573 tcg_temp_free_i32(t0);
574 tcg_temp_free_i32(t1);
575 }
576}
577
578void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
579 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
580{
581 if (TCG_TARGET_HAS_add2_i32) {
582 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
951c6300
RH
583 } else {
584 TCGv_i64 t0 = tcg_temp_new_i64();
585 TCGv_i64 t1 = tcg_temp_new_i64();
586 tcg_gen_concat_i32_i64(t0, al, ah);
587 tcg_gen_concat_i32_i64(t1, bl, bh);
588 tcg_gen_add_i64(t0, t0, t1);
589 tcg_gen_extr_i64_i32(rl, rh, t0);
590 tcg_temp_free_i64(t0);
591 tcg_temp_free_i64(t1);
592 }
593}
594
595void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
596 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
597{
598 if (TCG_TARGET_HAS_sub2_i32) {
599 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
951c6300
RH
600 } else {
601 TCGv_i64 t0 = tcg_temp_new_i64();
602 TCGv_i64 t1 = tcg_temp_new_i64();
603 tcg_gen_concat_i32_i64(t0, al, ah);
604 tcg_gen_concat_i32_i64(t1, bl, bh);
605 tcg_gen_sub_i64(t0, t0, t1);
606 tcg_gen_extr_i64_i32(rl, rh, t0);
607 tcg_temp_free_i64(t0);
608 tcg_temp_free_i64(t1);
609 }
610}
611
612void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
613{
614 if (TCG_TARGET_HAS_mulu2_i32) {
615 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
951c6300
RH
616 } else if (TCG_TARGET_HAS_muluh_i32) {
617 TCGv_i32 t = tcg_temp_new_i32();
618 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
619 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
620 tcg_gen_mov_i32(rl, t);
621 tcg_temp_free_i32(t);
622 } else {
623 TCGv_i64 t0 = tcg_temp_new_i64();
624 TCGv_i64 t1 = tcg_temp_new_i64();
625 tcg_gen_extu_i32_i64(t0, arg1);
626 tcg_gen_extu_i32_i64(t1, arg2);
627 tcg_gen_mul_i64(t0, t0, t1);
628 tcg_gen_extr_i64_i32(rl, rh, t0);
629 tcg_temp_free_i64(t0);
630 tcg_temp_free_i64(t1);
631 }
632}
633
634void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
635{
636 if (TCG_TARGET_HAS_muls2_i32) {
637 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
951c6300
RH
638 } else if (TCG_TARGET_HAS_mulsh_i32) {
639 TCGv_i32 t = tcg_temp_new_i32();
640 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
641 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
642 tcg_gen_mov_i32(rl, t);
643 tcg_temp_free_i32(t);
644 } else if (TCG_TARGET_REG_BITS == 32) {
645 TCGv_i32 t0 = tcg_temp_new_i32();
646 TCGv_i32 t1 = tcg_temp_new_i32();
647 TCGv_i32 t2 = tcg_temp_new_i32();
648 TCGv_i32 t3 = tcg_temp_new_i32();
649 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
650 /* Adjust for negative inputs. */
651 tcg_gen_sari_i32(t2, arg1, 31);
652 tcg_gen_sari_i32(t3, arg2, 31);
653 tcg_gen_and_i32(t2, t2, arg2);
654 tcg_gen_and_i32(t3, t3, arg1);
655 tcg_gen_sub_i32(rh, t1, t2);
656 tcg_gen_sub_i32(rh, rh, t3);
657 tcg_gen_mov_i32(rl, t0);
658 tcg_temp_free_i32(t0);
659 tcg_temp_free_i32(t1);
660 tcg_temp_free_i32(t2);
661 tcg_temp_free_i32(t3);
662 } else {
663 TCGv_i64 t0 = tcg_temp_new_i64();
664 TCGv_i64 t1 = tcg_temp_new_i64();
665 tcg_gen_ext_i32_i64(t0, arg1);
666 tcg_gen_ext_i32_i64(t1, arg2);
667 tcg_gen_mul_i64(t0, t0, t1);
668 tcg_gen_extr_i64_i32(rl, rh, t0);
669 tcg_temp_free_i64(t0);
670 tcg_temp_free_i64(t1);
671 }
672}
673
674void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
675{
676 if (TCG_TARGET_HAS_ext8s_i32) {
677 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
678 } else {
679 tcg_gen_shli_i32(ret, arg, 24);
680 tcg_gen_sari_i32(ret, ret, 24);
681 }
682}
683
684void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
685{
686 if (TCG_TARGET_HAS_ext16s_i32) {
687 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
688 } else {
689 tcg_gen_shli_i32(ret, arg, 16);
690 tcg_gen_sari_i32(ret, ret, 16);
691 }
692}
693
694void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
695{
696 if (TCG_TARGET_HAS_ext8u_i32) {
697 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
698 } else {
699 tcg_gen_andi_i32(ret, arg, 0xffu);
700 }
701}
702
703void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
704{
705 if (TCG_TARGET_HAS_ext16u_i32) {
706 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
707 } else {
708 tcg_gen_andi_i32(ret, arg, 0xffffu);
709 }
710}
711
712/* Note: we assume the two high bytes are set to zero */
713void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
714{
715 if (TCG_TARGET_HAS_bswap16_i32) {
716 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
717 } else {
718 TCGv_i32 t0 = tcg_temp_new_i32();
719
720 tcg_gen_ext8u_i32(t0, arg);
721 tcg_gen_shli_i32(t0, t0, 8);
722 tcg_gen_shri_i32(ret, arg, 8);
723 tcg_gen_or_i32(ret, ret, t0);
724 tcg_temp_free_i32(t0);
725 }
726}
727
728void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
729{
730 if (TCG_TARGET_HAS_bswap32_i32) {
731 tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
732 } else {
733 TCGv_i32 t0, t1;
734 t0 = tcg_temp_new_i32();
735 t1 = tcg_temp_new_i32();
736
737 tcg_gen_shli_i32(t0, arg, 24);
738
739 tcg_gen_andi_i32(t1, arg, 0x0000ff00);
740 tcg_gen_shli_i32(t1, t1, 8);
741 tcg_gen_or_i32(t0, t0, t1);
742
743 tcg_gen_shri_i32(t1, arg, 8);
744 tcg_gen_andi_i32(t1, t1, 0x0000ff00);
745 tcg_gen_or_i32(t0, t0, t1);
746
747 tcg_gen_shri_i32(t1, arg, 24);
748 tcg_gen_or_i32(ret, t0, t1);
749 tcg_temp_free_i32(t0);
750 tcg_temp_free_i32(t1);
751 }
752}
753
754/* 64-bit ops */
755
756#if TCG_TARGET_REG_BITS == 32
757/* These are all inline for TCG_TARGET_REG_BITS == 64. */
758
759void tcg_gen_discard_i64(TCGv_i64 arg)
760{
761 tcg_gen_discard_i32(TCGV_LOW(arg));
762 tcg_gen_discard_i32(TCGV_HIGH(arg));
763}
764
765void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
766{
767 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
768 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
769}
770
771void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
772{
773 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
774 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
775}
776
777void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
778{
779 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
780 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
781}
782
783void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
784{
785 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
786 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31);
787}
788
789void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
790{
791 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
792 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
793}
794
795void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
796{
797 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
798 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
799}
800
801void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
802{
803 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
804 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
805}
806
807void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
808{
809 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
810 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
811}
812
813void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
814{
815 /* Since arg2 and ret have different types,
816 they cannot be the same temporary */
cf811fff 817#ifdef HOST_WORDS_BIGENDIAN
951c6300
RH
818 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
819 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
820#else
821 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
822 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
823#endif
824}
825
826void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
827{
cf811fff 828#ifdef HOST_WORDS_BIGENDIAN
951c6300
RH
829 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
830 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
831#else
832 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
833 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
834#endif
835}
836
837void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
838{
839 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
840 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
841}
842
843void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
844{
845 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
846 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
847}
848
849void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
850{
851 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
852 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
853}
854
855void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
856{
857 gen_helper_shl_i64(ret, arg1, arg2);
858}
859
860void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
861{
862 gen_helper_shr_i64(ret, arg1, arg2);
863}
864
865void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
866{
867 gen_helper_sar_i64(ret, arg1, arg2);
868}
869
870void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
871{
872 TCGv_i64 t0;
873 TCGv_i32 t1;
874
875 t0 = tcg_temp_new_i64();
876 t1 = tcg_temp_new_i32();
877
878 tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
879 TCGV_LOW(arg1), TCGV_LOW(arg2));
880
881 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
882 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
883 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
884 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
885
886 tcg_gen_mov_i64(ret, t0);
887 tcg_temp_free_i64(t0);
888 tcg_temp_free_i32(t1);
889}
890#endif /* TCG_TARGET_REG_SIZE == 32 */
891
892void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
893{
894 /* some cases can be optimized here */
895 if (arg2 == 0) {
896 tcg_gen_mov_i64(ret, arg1);
897 } else {
898 TCGv_i64 t0 = tcg_const_i64(arg2);
899 tcg_gen_add_i64(ret, arg1, t0);
900 tcg_temp_free_i64(t0);
901 }
902}
903
904void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
905{
906 if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
907 /* Don't recurse with tcg_gen_neg_i64. */
908 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
909 } else {
910 TCGv_i64 t0 = tcg_const_i64(arg1);
911 tcg_gen_sub_i64(ret, t0, arg2);
912 tcg_temp_free_i64(t0);
913 }
914}
915
916void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
917{
918 /* some cases can be optimized here */
919 if (arg2 == 0) {
920 tcg_gen_mov_i64(ret, arg1);
921 } else {
922 TCGv_i64 t0 = tcg_const_i64(arg2);
923 tcg_gen_sub_i64(ret, arg1, t0);
924 tcg_temp_free_i64(t0);
925 }
926}
927
928void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
929{
951c6300 930 TCGv_i64 t0;
3a13c3f3
RH
931
932 if (TCG_TARGET_REG_BITS == 32) {
933 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
934 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
935 return;
936 }
937
951c6300
RH
938 /* Some cases can be optimized here. */
939 switch (arg2) {
940 case 0:
941 tcg_gen_movi_i64(ret, 0);
942 return;
943 case 0xffffffffffffffffull:
944 tcg_gen_mov_i64(ret, arg1);
945 return;
946 case 0xffull:
947 /* Don't recurse with tcg_gen_ext8u_i64. */
948 if (TCG_TARGET_HAS_ext8u_i64) {
949 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
950 return;
951 }
952 break;
953 case 0xffffu:
954 if (TCG_TARGET_HAS_ext16u_i64) {
955 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
956 return;
957 }
958 break;
959 case 0xffffffffull:
960 if (TCG_TARGET_HAS_ext32u_i64) {
961 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
962 return;
963 }
964 break;
965 }
966 t0 = tcg_const_i64(arg2);
967 tcg_gen_and_i64(ret, arg1, t0);
968 tcg_temp_free_i64(t0);
951c6300
RH
969}
970
971void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
972{
3a13c3f3
RH
973 if (TCG_TARGET_REG_BITS == 32) {
974 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
975 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
976 return;
977 }
951c6300
RH
978 /* Some cases can be optimized here. */
979 if (arg2 == -1) {
980 tcg_gen_movi_i64(ret, -1);
981 } else if (arg2 == 0) {
982 tcg_gen_mov_i64(ret, arg1);
983 } else {
984 TCGv_i64 t0 = tcg_const_i64(arg2);
985 tcg_gen_or_i64(ret, arg1, t0);
986 tcg_temp_free_i64(t0);
987 }
951c6300
RH
988}
989
990void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
991{
3a13c3f3
RH
992 if (TCG_TARGET_REG_BITS == 32) {
993 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
994 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
995 return;
996 }
951c6300
RH
997 /* Some cases can be optimized here. */
998 if (arg2 == 0) {
999 tcg_gen_mov_i64(ret, arg1);
1000 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1001 /* Don't recurse with tcg_gen_not_i64. */
1002 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1003 } else {
1004 TCGv_i64 t0 = tcg_const_i64(arg2);
1005 tcg_gen_xor_i64(ret, arg1, t0);
1006 tcg_temp_free_i64(t0);
1007 }
951c6300
RH
1008}
1009
951c6300
RH
1010static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1011 unsigned c, bool right, bool arith)
1012{
1013 tcg_debug_assert(c < 64);
1014 if (c == 0) {
1015 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1016 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1017 } else if (c >= 32) {
1018 c -= 32;
1019 if (right) {
1020 if (arith) {
1021 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1022 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1023 } else {
1024 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1025 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1026 }
1027 } else {
1028 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1029 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1030 }
1031 } else {
1032 TCGv_i32 t0, t1;
1033
1034 t0 = tcg_temp_new_i32();
1035 t1 = tcg_temp_new_i32();
1036 if (right) {
1037 tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c);
1038 if (arith) {
1039 tcg_gen_sari_i32(t1, TCGV_HIGH(arg1), c);
1040 } else {
1041 tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c);
1042 }
1043 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1044 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0);
1045 tcg_gen_mov_i32(TCGV_HIGH(ret), t1);
1046 } else {
1047 tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1048 /* Note: ret can be the same as arg1, so we use t1 */
1049 tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c);
1050 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1051 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0);
1052 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1053 }
1054 tcg_temp_free_i32(t0);
1055 tcg_temp_free_i32(t1);
1056 }
1057}
1058
951c6300
RH
1059void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1060{
1061 tcg_debug_assert(arg2 < 64);
3a13c3f3
RH
1062 if (TCG_TARGET_REG_BITS == 32) {
1063 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1064 } else if (arg2 == 0) {
951c6300
RH
1065 tcg_gen_mov_i64(ret, arg1);
1066 } else {
1067 TCGv_i64 t0 = tcg_const_i64(arg2);
1068 tcg_gen_shl_i64(ret, arg1, t0);
1069 tcg_temp_free_i64(t0);
1070 }
1071}
1072
1073void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1074{
1075 tcg_debug_assert(arg2 < 64);
3a13c3f3
RH
1076 if (TCG_TARGET_REG_BITS == 32) {
1077 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1078 } else if (arg2 == 0) {
951c6300
RH
1079 tcg_gen_mov_i64(ret, arg1);
1080 } else {
1081 TCGv_i64 t0 = tcg_const_i64(arg2);
1082 tcg_gen_shr_i64(ret, arg1, t0);
1083 tcg_temp_free_i64(t0);
1084 }
1085}
1086
1087void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1088{
1089 tcg_debug_assert(arg2 < 64);
3a13c3f3
RH
1090 if (TCG_TARGET_REG_BITS == 32) {
1091 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1092 } else if (arg2 == 0) {
951c6300
RH
1093 tcg_gen_mov_i64(ret, arg1);
1094 } else {
1095 TCGv_i64 t0 = tcg_const_i64(arg2);
1096 tcg_gen_sar_i64(ret, arg1, t0);
1097 tcg_temp_free_i64(t0);
1098 }
1099}
951c6300 1100
42a268c2 1101void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
951c6300
RH
1102{
1103 if (cond == TCG_COND_ALWAYS) {
42a268c2 1104 tcg_gen_br(l);
951c6300 1105 } else if (cond != TCG_COND_NEVER) {
3a13c3f3
RH
1106 if (TCG_TARGET_REG_BITS == 32) {
1107 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1108 TCGV_HIGH(arg1), TCGV_LOW(arg2),
42a268c2 1109 TCGV_HIGH(arg2), cond, label_arg(l));
3a13c3f3 1110 } else {
42a268c2
RH
1111 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1112 label_arg(l));
3a13c3f3 1113 }
951c6300
RH
1114 }
1115}
1116
42a268c2 1117void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
951c6300
RH
1118{
1119 if (cond == TCG_COND_ALWAYS) {
42a268c2 1120 tcg_gen_br(l);
951c6300
RH
1121 } else if (cond != TCG_COND_NEVER) {
1122 TCGv_i64 t0 = tcg_const_i64(arg2);
42a268c2 1123 tcg_gen_brcond_i64(cond, arg1, t0, l);
951c6300
RH
1124 tcg_temp_free_i64(t0);
1125 }
1126}
1127
1128void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1129 TCGv_i64 arg1, TCGv_i64 arg2)
1130{
1131 if (cond == TCG_COND_ALWAYS) {
1132 tcg_gen_movi_i64(ret, 1);
1133 } else if (cond == TCG_COND_NEVER) {
1134 tcg_gen_movi_i64(ret, 0);
1135 } else {
3a13c3f3
RH
1136 if (TCG_TARGET_REG_BITS == 32) {
1137 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1138 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1139 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1140 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1141 } else {
1142 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1143 }
951c6300
RH
1144 }
1145}
1146
1147void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1148 TCGv_i64 arg1, int64_t arg2)
1149{
1150 TCGv_i64 t0 = tcg_const_i64(arg2);
1151 tcg_gen_setcond_i64(cond, ret, arg1, t0);
1152 tcg_temp_free_i64(t0);
1153}
1154
1155void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1156{
1157 TCGv_i64 t0 = tcg_const_i64(arg2);
1158 tcg_gen_mul_i64(ret, arg1, t0);
1159 tcg_temp_free_i64(t0);
1160}
1161
1162void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1163{
1164 if (TCG_TARGET_HAS_div_i64) {
1165 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1166 } else if (TCG_TARGET_HAS_div2_i64) {
1167 TCGv_i64 t0 = tcg_temp_new_i64();
1168 tcg_gen_sari_i64(t0, arg1, 63);
1169 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1170 tcg_temp_free_i64(t0);
1171 } else {
1172 gen_helper_div_i64(ret, arg1, arg2);
1173 }
1174}
1175
1176void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1177{
1178 if (TCG_TARGET_HAS_rem_i64) {
1179 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1180 } else if (TCG_TARGET_HAS_div_i64) {
1181 TCGv_i64 t0 = tcg_temp_new_i64();
1182 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1183 tcg_gen_mul_i64(t0, t0, arg2);
1184 tcg_gen_sub_i64(ret, arg1, t0);
1185 tcg_temp_free_i64(t0);
1186 } else if (TCG_TARGET_HAS_div2_i64) {
1187 TCGv_i64 t0 = tcg_temp_new_i64();
1188 tcg_gen_sari_i64(t0, arg1, 63);
1189 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1190 tcg_temp_free_i64(t0);
1191 } else {
1192 gen_helper_rem_i64(ret, arg1, arg2);
1193 }
1194}
1195
1196void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1197{
1198 if (TCG_TARGET_HAS_div_i64) {
1199 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1200 } else if (TCG_TARGET_HAS_div2_i64) {
1201 TCGv_i64 t0 = tcg_temp_new_i64();
1202 tcg_gen_movi_i64(t0, 0);
1203 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1204 tcg_temp_free_i64(t0);
1205 } else {
1206 gen_helper_divu_i64(ret, arg1, arg2);
1207 }
1208}
1209
1210void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1211{
1212 if (TCG_TARGET_HAS_rem_i64) {
1213 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1214 } else if (TCG_TARGET_HAS_div_i64) {
1215 TCGv_i64 t0 = tcg_temp_new_i64();
1216 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1217 tcg_gen_mul_i64(t0, t0, arg2);
1218 tcg_gen_sub_i64(ret, arg1, t0);
1219 tcg_temp_free_i64(t0);
1220 } else if (TCG_TARGET_HAS_div2_i64) {
1221 TCGv_i64 t0 = tcg_temp_new_i64();
1222 tcg_gen_movi_i64(t0, 0);
1223 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1224 tcg_temp_free_i64(t0);
1225 } else {
1226 gen_helper_remu_i64(ret, arg1, arg2);
1227 }
1228}
1229
1230void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1231{
3a13c3f3
RH
1232 if (TCG_TARGET_REG_BITS == 32) {
1233 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1234 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1235 } else if (TCG_TARGET_HAS_ext8s_i64) {
951c6300
RH
1236 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1237 } else {
1238 tcg_gen_shli_i64(ret, arg, 56);
1239 tcg_gen_sari_i64(ret, ret, 56);
1240 }
951c6300
RH
1241}
1242
1243void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1244{
3a13c3f3
RH
1245 if (TCG_TARGET_REG_BITS == 32) {
1246 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1247 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1248 } else if (TCG_TARGET_HAS_ext16s_i64) {
951c6300
RH
1249 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1250 } else {
1251 tcg_gen_shli_i64(ret, arg, 48);
1252 tcg_gen_sari_i64(ret, ret, 48);
1253 }
951c6300
RH
1254}
1255
1256void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1257{
3a13c3f3
RH
1258 if (TCG_TARGET_REG_BITS == 32) {
1259 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1260 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1261 } else if (TCG_TARGET_HAS_ext32s_i64) {
951c6300
RH
1262 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1263 } else {
1264 tcg_gen_shli_i64(ret, arg, 32);
1265 tcg_gen_sari_i64(ret, ret, 32);
1266 }
951c6300
RH
1267}
1268
1269void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1270{
3a13c3f3
RH
1271 if (TCG_TARGET_REG_BITS == 32) {
1272 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1273 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1274 } else if (TCG_TARGET_HAS_ext8u_i64) {
951c6300
RH
1275 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1276 } else {
1277 tcg_gen_andi_i64(ret, arg, 0xffu);
1278 }
951c6300
RH
1279}
1280
1281void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1282{
3a13c3f3
RH
1283 if (TCG_TARGET_REG_BITS == 32) {
1284 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1285 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1286 } else if (TCG_TARGET_HAS_ext16u_i64) {
951c6300
RH
1287 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1288 } else {
1289 tcg_gen_andi_i64(ret, arg, 0xffffu);
1290 }
951c6300
RH
1291}
1292
1293void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1294{
3a13c3f3
RH
1295 if (TCG_TARGET_REG_BITS == 32) {
1296 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1297 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1298 } else if (TCG_TARGET_HAS_ext32u_i64) {
951c6300
RH
1299 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1300 } else {
1301 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1302 }
951c6300
RH
1303}
1304
1305/* Note: we assume the six high bytes are set to zero */
1306void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1307{
3a13c3f3
RH
1308 if (TCG_TARGET_REG_BITS == 32) {
1309 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1310 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1311 } else if (TCG_TARGET_HAS_bswap16_i64) {
951c6300
RH
1312 tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1313 } else {
1314 TCGv_i64 t0 = tcg_temp_new_i64();
1315
1316 tcg_gen_ext8u_i64(t0, arg);
1317 tcg_gen_shli_i64(t0, t0, 8);
1318 tcg_gen_shri_i64(ret, arg, 8);
1319 tcg_gen_or_i64(ret, ret, t0);
1320 tcg_temp_free_i64(t0);
1321 }
951c6300
RH
1322}
1323
1324/* Note: we assume the four high bytes are set to zero */
1325void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1326{
3a13c3f3
RH
1327 if (TCG_TARGET_REG_BITS == 32) {
1328 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1329 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1330 } else if (TCG_TARGET_HAS_bswap32_i64) {
951c6300
RH
1331 tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1332 } else {
1333 TCGv_i64 t0, t1;
1334 t0 = tcg_temp_new_i64();
1335 t1 = tcg_temp_new_i64();
1336
1337 tcg_gen_shli_i64(t0, arg, 24);
1338 tcg_gen_ext32u_i64(t0, t0);
1339
1340 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1341 tcg_gen_shli_i64(t1, t1, 8);
1342 tcg_gen_or_i64(t0, t0, t1);
1343
1344 tcg_gen_shri_i64(t1, arg, 8);
1345 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1346 tcg_gen_or_i64(t0, t0, t1);
1347
1348 tcg_gen_shri_i64(t1, arg, 24);
1349 tcg_gen_or_i64(ret, t0, t1);
1350 tcg_temp_free_i64(t0);
1351 tcg_temp_free_i64(t1);
1352 }
951c6300
RH
1353}
1354
1355void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1356{
3a13c3f3
RH
1357 if (TCG_TARGET_REG_BITS == 32) {
1358 TCGv_i32 t0, t1;
1359 t0 = tcg_temp_new_i32();
1360 t1 = tcg_temp_new_i32();
951c6300 1361
3a13c3f3
RH
1362 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1363 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1364 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1365 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1366 tcg_temp_free_i32(t0);
1367 tcg_temp_free_i32(t1);
1368 } else if (TCG_TARGET_HAS_bswap64_i64) {
951c6300
RH
1369 tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1370 } else {
1371 TCGv_i64 t0 = tcg_temp_new_i64();
1372 TCGv_i64 t1 = tcg_temp_new_i64();
1373
1374 tcg_gen_shli_i64(t0, arg, 56);
1375
1376 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1377 tcg_gen_shli_i64(t1, t1, 40);
1378 tcg_gen_or_i64(t0, t0, t1);
1379
1380 tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1381 tcg_gen_shli_i64(t1, t1, 24);
1382 tcg_gen_or_i64(t0, t0, t1);
1383
1384 tcg_gen_andi_i64(t1, arg, 0xff000000);
1385 tcg_gen_shli_i64(t1, t1, 8);
1386 tcg_gen_or_i64(t0, t0, t1);
1387
1388 tcg_gen_shri_i64(t1, arg, 8);
1389 tcg_gen_andi_i64(t1, t1, 0xff000000);
1390 tcg_gen_or_i64(t0, t0, t1);
1391
1392 tcg_gen_shri_i64(t1, arg, 24);
1393 tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1394 tcg_gen_or_i64(t0, t0, t1);
1395
1396 tcg_gen_shri_i64(t1, arg, 40);
1397 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1398 tcg_gen_or_i64(t0, t0, t1);
1399
1400 tcg_gen_shri_i64(t1, arg, 56);
1401 tcg_gen_or_i64(ret, t0, t1);
1402 tcg_temp_free_i64(t0);
1403 tcg_temp_free_i64(t1);
1404 }
951c6300
RH
1405}
1406
1407void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1408{
3a13c3f3
RH
1409 if (TCG_TARGET_REG_BITS == 32) {
1410 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1411 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1412 } else if (TCG_TARGET_HAS_not_i64) {
951c6300
RH
1413 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1414 } else {
1415 tcg_gen_xori_i64(ret, arg, -1);
1416 }
951c6300
RH
1417}
1418
1419void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1420{
3a13c3f3
RH
1421 if (TCG_TARGET_REG_BITS == 32) {
1422 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1423 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1424 } else if (TCG_TARGET_HAS_andc_i64) {
951c6300
RH
1425 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1426 } else {
1427 TCGv_i64 t0 = tcg_temp_new_i64();
1428 tcg_gen_not_i64(t0, arg2);
1429 tcg_gen_and_i64(ret, arg1, t0);
1430 tcg_temp_free_i64(t0);
1431 }
951c6300
RH
1432}
1433
1434void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1435{
3a13c3f3
RH
1436 if (TCG_TARGET_REG_BITS == 32) {
1437 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1438 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1439 } else if (TCG_TARGET_HAS_eqv_i64) {
951c6300
RH
1440 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1441 } else {
1442 tcg_gen_xor_i64(ret, arg1, arg2);
1443 tcg_gen_not_i64(ret, ret);
1444 }
951c6300
RH
1445}
1446
1447void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1448{
3a13c3f3
RH
1449 if (TCG_TARGET_REG_BITS == 32) {
1450 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1451 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1452 } else if (TCG_TARGET_HAS_nand_i64) {
951c6300
RH
1453 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1454 } else {
1455 tcg_gen_and_i64(ret, arg1, arg2);
1456 tcg_gen_not_i64(ret, ret);
1457 }
951c6300
RH
1458}
1459
1460void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1461{
3a13c3f3
RH
1462 if (TCG_TARGET_REG_BITS == 32) {
1463 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1464 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1465 } else if (TCG_TARGET_HAS_nor_i64) {
951c6300
RH
1466 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1467 } else {
1468 tcg_gen_or_i64(ret, arg1, arg2);
1469 tcg_gen_not_i64(ret, ret);
1470 }
951c6300
RH
1471}
1472
1473void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1474{
3a13c3f3
RH
1475 if (TCG_TARGET_REG_BITS == 32) {
1476 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1477 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1478 } else if (TCG_TARGET_HAS_orc_i64) {
951c6300
RH
1479 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1480 } else {
1481 TCGv_i64 t0 = tcg_temp_new_i64();
1482 tcg_gen_not_i64(t0, arg2);
1483 tcg_gen_or_i64(ret, arg1, t0);
1484 tcg_temp_free_i64(t0);
1485 }
951c6300
RH
1486}
1487
1488void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1489{
1490 if (TCG_TARGET_HAS_rot_i64) {
1491 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1492 } else {
1493 TCGv_i64 t0, t1;
1494 t0 = tcg_temp_new_i64();
1495 t1 = tcg_temp_new_i64();
1496 tcg_gen_shl_i64(t0, arg1, arg2);
1497 tcg_gen_subfi_i64(t1, 64, arg2);
1498 tcg_gen_shr_i64(t1, arg1, t1);
1499 tcg_gen_or_i64(ret, t0, t1);
1500 tcg_temp_free_i64(t0);
1501 tcg_temp_free_i64(t1);
1502 }
1503}
1504
1505void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1506{
1507 tcg_debug_assert(arg2 < 64);
1508 /* some cases can be optimized here */
1509 if (arg2 == 0) {
1510 tcg_gen_mov_i64(ret, arg1);
1511 } else if (TCG_TARGET_HAS_rot_i64) {
1512 TCGv_i64 t0 = tcg_const_i64(arg2);
1513 tcg_gen_rotl_i64(ret, arg1, t0);
1514 tcg_temp_free_i64(t0);
1515 } else {
1516 TCGv_i64 t0, t1;
1517 t0 = tcg_temp_new_i64();
1518 t1 = tcg_temp_new_i64();
1519 tcg_gen_shli_i64(t0, arg1, arg2);
1520 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1521 tcg_gen_or_i64(ret, t0, t1);
1522 tcg_temp_free_i64(t0);
1523 tcg_temp_free_i64(t1);
1524 }
1525}
1526
1527void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1528{
1529 if (TCG_TARGET_HAS_rot_i64) {
1530 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1531 } else {
1532 TCGv_i64 t0, t1;
1533 t0 = tcg_temp_new_i64();
1534 t1 = tcg_temp_new_i64();
1535 tcg_gen_shr_i64(t0, arg1, arg2);
1536 tcg_gen_subfi_i64(t1, 64, arg2);
1537 tcg_gen_shl_i64(t1, arg1, t1);
1538 tcg_gen_or_i64(ret, t0, t1);
1539 tcg_temp_free_i64(t0);
1540 tcg_temp_free_i64(t1);
1541 }
1542}
1543
1544void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1545{
1546 tcg_debug_assert(arg2 < 64);
1547 /* some cases can be optimized here */
1548 if (arg2 == 0) {
1549 tcg_gen_mov_i64(ret, arg1);
1550 } else {
1551 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
1552 }
1553}
1554
1555void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
1556 unsigned int ofs, unsigned int len)
1557{
1558 uint64_t mask;
1559 TCGv_i64 t1;
1560
1561 tcg_debug_assert(ofs < 64);
1562 tcg_debug_assert(len <= 64);
1563 tcg_debug_assert(ofs + len <= 64);
1564
1565 if (ofs == 0 && len == 64) {
1566 tcg_gen_mov_i64(ret, arg2);
1567 return;
1568 }
1569 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
1570 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
1571 return;
1572 }
1573
3a13c3f3
RH
1574 if (TCG_TARGET_REG_BITS == 32) {
1575 if (ofs >= 32) {
1576 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
1577 TCGV_LOW(arg2), ofs - 32, len);
1578 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1579 return;
1580 }
1581 if (ofs + len <= 32) {
1582 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
1583 TCGV_LOW(arg2), ofs, len);
1584 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1585 return;
1586 }
951c6300 1587 }
951c6300
RH
1588
1589 mask = (1ull << len) - 1;
1590 t1 = tcg_temp_new_i64();
1591
1592 if (ofs + len < 64) {
1593 tcg_gen_andi_i64(t1, arg2, mask);
1594 tcg_gen_shli_i64(t1, t1, ofs);
1595 } else {
1596 tcg_gen_shli_i64(t1, arg2, ofs);
1597 }
1598 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
1599 tcg_gen_or_i64(ret, ret, t1);
1600
1601 tcg_temp_free_i64(t1);
1602}
1603
1604void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
1605 TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
1606{
37ed3bf1
RH
1607 if (cond == TCG_COND_ALWAYS) {
1608 tcg_gen_mov_i64(ret, v1);
1609 } else if (cond == TCG_COND_NEVER) {
1610 tcg_gen_mov_i64(ret, v2);
1611 } else if (TCG_TARGET_REG_BITS == 32) {
3a13c3f3
RH
1612 TCGv_i32 t0 = tcg_temp_new_i32();
1613 TCGv_i32 t1 = tcg_temp_new_i32();
1614 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
1615 TCGV_LOW(c1), TCGV_HIGH(c1),
1616 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
1617
1618 if (TCG_TARGET_HAS_movcond_i32) {
1619 tcg_gen_movi_i32(t1, 0);
1620 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
1621 TCGV_LOW(v1), TCGV_LOW(v2));
1622 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
1623 TCGV_HIGH(v1), TCGV_HIGH(v2));
1624 } else {
1625 tcg_gen_neg_i32(t0, t0);
951c6300 1626
3a13c3f3
RH
1627 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
1628 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
1629 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
951c6300 1630
3a13c3f3
RH
1631 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
1632 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
1633 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
1634 }
1635 tcg_temp_free_i32(t0);
1636 tcg_temp_free_i32(t1);
1637 } else if (TCG_TARGET_HAS_movcond_i64) {
951c6300
RH
1638 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
1639 } else {
1640 TCGv_i64 t0 = tcg_temp_new_i64();
1641 TCGv_i64 t1 = tcg_temp_new_i64();
1642 tcg_gen_setcond_i64(cond, t0, c1, c2);
1643 tcg_gen_neg_i64(t0, t0);
1644 tcg_gen_and_i64(t1, v1, t0);
1645 tcg_gen_andc_i64(ret, v2, t0);
1646 tcg_gen_or_i64(ret, ret, t1);
1647 tcg_temp_free_i64(t0);
1648 tcg_temp_free_i64(t1);
1649 }
951c6300
RH
1650}
1651
1652void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1653 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1654{
1655 if (TCG_TARGET_HAS_add2_i64) {
1656 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
951c6300
RH
1657 } else {
1658 TCGv_i64 t0 = tcg_temp_new_i64();
1659 TCGv_i64 t1 = tcg_temp_new_i64();
1660 tcg_gen_add_i64(t0, al, bl);
1661 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
1662 tcg_gen_add_i64(rh, ah, bh);
1663 tcg_gen_add_i64(rh, rh, t1);
1664 tcg_gen_mov_i64(rl, t0);
1665 tcg_temp_free_i64(t0);
1666 tcg_temp_free_i64(t1);
1667 }
1668}
1669
1670void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1671 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1672{
1673 if (TCG_TARGET_HAS_sub2_i64) {
1674 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
951c6300
RH
1675 } else {
1676 TCGv_i64 t0 = tcg_temp_new_i64();
1677 TCGv_i64 t1 = tcg_temp_new_i64();
1678 tcg_gen_sub_i64(t0, al, bl);
1679 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
1680 tcg_gen_sub_i64(rh, ah, bh);
1681 tcg_gen_sub_i64(rh, rh, t1);
1682 tcg_gen_mov_i64(rl, t0);
1683 tcg_temp_free_i64(t0);
1684 tcg_temp_free_i64(t1);
1685 }
1686}
1687
1688void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1689{
1690 if (TCG_TARGET_HAS_mulu2_i64) {
1691 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
951c6300
RH
1692 } else if (TCG_TARGET_HAS_muluh_i64) {
1693 TCGv_i64 t = tcg_temp_new_i64();
1694 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1695 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
1696 tcg_gen_mov_i64(rl, t);
1697 tcg_temp_free_i64(t);
1698 } else {
1699 TCGv_i64 t0 = tcg_temp_new_i64();
1700 tcg_gen_mul_i64(t0, arg1, arg2);
1701 gen_helper_muluh_i64(rh, arg1, arg2);
1702 tcg_gen_mov_i64(rl, t0);
1703 tcg_temp_free_i64(t0);
1704 }
1705}
1706
1707void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1708{
1709 if (TCG_TARGET_HAS_muls2_i64) {
1710 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
951c6300
RH
1711 } else if (TCG_TARGET_HAS_mulsh_i64) {
1712 TCGv_i64 t = tcg_temp_new_i64();
1713 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1714 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
1715 tcg_gen_mov_i64(rl, t);
1716 tcg_temp_free_i64(t);
1717 } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
1718 TCGv_i64 t0 = tcg_temp_new_i64();
1719 TCGv_i64 t1 = tcg_temp_new_i64();
1720 TCGv_i64 t2 = tcg_temp_new_i64();
1721 TCGv_i64 t3 = tcg_temp_new_i64();
1722 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1723 /* Adjust for negative inputs. */
1724 tcg_gen_sari_i64(t2, arg1, 63);
1725 tcg_gen_sari_i64(t3, arg2, 63);
1726 tcg_gen_and_i64(t2, t2, arg2);
1727 tcg_gen_and_i64(t3, t3, arg1);
1728 tcg_gen_sub_i64(rh, t1, t2);
1729 tcg_gen_sub_i64(rh, rh, t3);
1730 tcg_gen_mov_i64(rl, t0);
1731 tcg_temp_free_i64(t0);
1732 tcg_temp_free_i64(t1);
1733 tcg_temp_free_i64(t2);
1734 tcg_temp_free_i64(t3);
1735 } else {
1736 TCGv_i64 t0 = tcg_temp_new_i64();
1737 tcg_gen_mul_i64(t0, arg1, arg2);
1738 gen_helper_mulsh_i64(rh, arg1, arg2);
1739 tcg_gen_mov_i64(rl, t0);
1740 tcg_temp_free_i64(t0);
1741 }
1742}
1743
1744/* Size changing operations. */
1745
609ad705 1746void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
951c6300 1747{
3a13c3f3 1748 if (TCG_TARGET_REG_BITS == 32) {
609ad705
RH
1749 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1750 } else if (TCG_TARGET_HAS_extrl_i64_i32) {
1751 tcg_gen_op2(&tcg_ctx, INDEX_op_extrl_i64_i32,
1752 GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1753 } else {
951c6300 1754 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
609ad705
RH
1755 }
1756}
1757
1758void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1759{
1760 if (TCG_TARGET_REG_BITS == 32) {
1761 tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
1762 } else if (TCG_TARGET_HAS_extrh_i64_i32) {
1763 tcg_gen_op2(&tcg_ctx, INDEX_op_extrh_i64_i32,
1764 GET_TCGV_I32(ret), GET_TCGV_I64(arg));
951c6300
RH
1765 } else {
1766 TCGv_i64 t = tcg_temp_new_i64();
609ad705 1767 tcg_gen_shri_i64(t, arg, 32);
951c6300
RH
1768 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
1769 tcg_temp_free_i64(t);
1770 }
951c6300
RH
1771}
1772
1773void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1774{
3a13c3f3
RH
1775 if (TCG_TARGET_REG_BITS == 32) {
1776 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1777 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1778 } else {
4f2331e5
AJ
1779 tcg_gen_op2(&tcg_ctx, INDEX_op_extu_i32_i64,
1780 GET_TCGV_I64(ret), GET_TCGV_I32(arg));
3a13c3f3 1781 }
951c6300
RH
1782}
1783
1784void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1785{
3a13c3f3
RH
1786 if (TCG_TARGET_REG_BITS == 32) {
1787 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1788 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1789 } else {
4f2331e5
AJ
1790 tcg_gen_op2(&tcg_ctx, INDEX_op_ext_i32_i64,
1791 GET_TCGV_I64(ret), GET_TCGV_I32(arg));
3a13c3f3 1792 }
951c6300
RH
1793}
1794
1795void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
1796{
3a13c3f3
RH
1797 TCGv_i64 tmp;
1798
1799 if (TCG_TARGET_REG_BITS == 32) {
1800 tcg_gen_mov_i32(TCGV_LOW(dest), low);
1801 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
1802 return;
1803 }
1804
1805 tmp = tcg_temp_new_i64();
951c6300
RH
1806 /* These extensions are only needed for type correctness.
1807 We may be able to do better given target specific information. */
1808 tcg_gen_extu_i32_i64(tmp, high);
1809 tcg_gen_extu_i32_i64(dest, low);
1810 /* If deposit is available, use it. Otherwise use the extra
1811 knowledge that we have of the zero-extensions above. */
1812 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
1813 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
1814 } else {
1815 tcg_gen_shli_i64(tmp, tmp, 32);
1816 tcg_gen_or_i64(dest, dest, tmp);
1817 }
1818 tcg_temp_free_i64(tmp);
951c6300
RH
1819}
1820
1821void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
1822{
3a13c3f3
RH
1823 if (TCG_TARGET_REG_BITS == 32) {
1824 tcg_gen_mov_i32(lo, TCGV_LOW(arg));
1825 tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
1826 } else {
609ad705
RH
1827 tcg_gen_extrl_i64_i32(lo, arg);
1828 tcg_gen_extrh_i64_i32(hi, arg);
3a13c3f3 1829 }
951c6300
RH
1830}
1831
1832void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
1833{
1834 tcg_gen_ext32u_i64(lo, arg);
1835 tcg_gen_shri_i64(hi, arg, 32);
1836}
1837
1838/* QEMU specific operations. */
1839
1840void tcg_gen_goto_tb(unsigned idx)
1841{
1842 /* We only support two chained exits. */
1843 tcg_debug_assert(idx <= 1);
1844#ifdef CONFIG_DEBUG_TCG
1845 /* Verify that we havn't seen this numbered exit before. */
1846 tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
1847 tcg_ctx.goto_tb_issue_mask |= 1 << idx;
1848#endif
1849 tcg_gen_op1i(INDEX_op_goto_tb, idx);
1850}
1851
1852static inline TCGMemOp tcg_canonicalize_memop(TCGMemOp op, bool is64, bool st)
1853{
1f00b27f
SS
1854 /* Trigger the asserts within as early as possible. */
1855 (void)get_alignment_bits(op);
1856
951c6300
RH
1857 switch (op & MO_SIZE) {
1858 case MO_8:
1859 op &= ~MO_BSWAP;
1860 break;
1861 case MO_16:
1862 break;
1863 case MO_32:
1864 if (!is64) {
1865 op &= ~MO_SIGN;
1866 }
1867 break;
1868 case MO_64:
1869 if (!is64) {
1870 tcg_abort();
1871 }
1872 break;
1873 }
1874 if (st) {
1875 op &= ~MO_SIGN;
1876 }
1877 return op;
1878}
1879
c45cb8bb
RH
1880static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
1881 TCGMemOp memop, TCGArg idx)
951c6300 1882{
59227d5d 1883 TCGMemOpIdx oi = make_memop_idx(memop, idx);
c45cb8bb 1884#if TARGET_LONG_BITS == 32
59227d5d 1885 tcg_gen_op3i_i32(opc, val, addr, oi);
c45cb8bb 1886#else
3a13c3f3 1887 if (TCG_TARGET_REG_BITS == 32) {
59227d5d 1888 tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
3a13c3f3 1889 } else {
59227d5d 1890 tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I32(val), GET_TCGV_I64(addr), oi);
3a13c3f3 1891 }
c45cb8bb 1892#endif
951c6300
RH
1893}
1894
c45cb8bb
RH
1895static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
1896 TCGMemOp memop, TCGArg idx)
1897{
59227d5d 1898 TCGMemOpIdx oi = make_memop_idx(memop, idx);
951c6300 1899#if TARGET_LONG_BITS == 32
c45cb8bb 1900 if (TCG_TARGET_REG_BITS == 32) {
59227d5d 1901 tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
c45cb8bb 1902 } else {
59227d5d 1903 tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I64(val), GET_TCGV_I32(addr), oi);
c45cb8bb 1904 }
951c6300 1905#else
c45cb8bb 1906 if (TCG_TARGET_REG_BITS == 32) {
59227d5d
RH
1907 tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
1908 TCGV_LOW(addr), TCGV_HIGH(addr), oi);
c45cb8bb 1909 } else {
59227d5d 1910 tcg_gen_op3i_i64(opc, val, addr, oi);
c45cb8bb 1911 }
951c6300 1912#endif
c45cb8bb 1913}
951c6300
RH
1914
1915void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1916{
1917 memop = tcg_canonicalize_memop(memop, 0, 0);
dcdaadb6
LV
1918 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1919 addr, trace_mem_get_info(memop, 0));
c45cb8bb 1920 gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
951c6300
RH
1921}
1922
1923void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1924{
1925 memop = tcg_canonicalize_memop(memop, 0, 1);
dcdaadb6
LV
1926 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1927 addr, trace_mem_get_info(memop, 1));
c45cb8bb 1928 gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
951c6300
RH
1929}
1930
1931void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1932{
3a13c3f3 1933 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
951c6300
RH
1934 tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
1935 if (memop & MO_SIGN) {
1936 tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
1937 } else {
1938 tcg_gen_movi_i32(TCGV_HIGH(val), 0);
1939 }
1940 return;
1941 }
951c6300 1942
c45cb8bb 1943 memop = tcg_canonicalize_memop(memop, 1, 0);
dcdaadb6
LV
1944 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1945 addr, trace_mem_get_info(memop, 0));
c45cb8bb 1946 gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
951c6300
RH
1947}
1948
1949void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1950{
3a13c3f3 1951 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
951c6300
RH
1952 tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
1953 return;
1954 }
951c6300 1955
c45cb8bb 1956 memop = tcg_canonicalize_memop(memop, 1, 1);
dcdaadb6
LV
1957 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1958 addr, trace_mem_get_info(memop, 1));
c45cb8bb 1959 gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
951c6300 1960}