]> git.proxmox.com Git - mirror_qemu.git/blame - tcg/tci/tcg-target.c.inc
tcg/sparc: Split out constraint sets to tcg-target-con-set.h
[mirror_qemu.git] / tcg / tci / tcg-target.c.inc
CommitLineData
7316329a
SW
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* TODO list:
26 * - See TODO comments in code.
27 */
28
29/* Marker for missing code. */
30#define TODO() \
31 do { \
32 fprintf(stderr, "TODO %s:%u: %s()\n", \
33 __FILE__, __LINE__, __func__); \
34 tcg_abort(); \
35 } while (0)
36
7316329a
SW
37/* Bitfield n...m (in 32 bit value). */
38#define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
39
7316329a
SW
40/* Macros used in tcg_target_op_defs. */
41#define R "r"
42#define RI "ri"
43#if TCG_TARGET_REG_BITS == 32
44# define R64 "r", "r"
45#else
46# define R64 "r"
47#endif
48#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
0a19f167
RH
49# define L "r", "r"
50# define S "r", "r"
7316329a 51#else
0a19f167
RH
52# define L "r"
53# define S "r"
7316329a
SW
54#endif
55
56/* TODO: documentation. */
57static const TCGTargetOpDef tcg_target_op_defs[] = {
58 { INDEX_op_exit_tb, { NULL } },
59 { INDEX_op_goto_tb, { NULL } },
7316329a
SW
60 { INDEX_op_br, { NULL } },
61
7316329a
SW
62 { INDEX_op_ld8u_i32, { R, R } },
63 { INDEX_op_ld8s_i32, { R, R } },
64 { INDEX_op_ld16u_i32, { R, R } },
65 { INDEX_op_ld16s_i32, { R, R } },
66 { INDEX_op_ld_i32, { R, R } },
67 { INDEX_op_st8_i32, { R, R } },
68 { INDEX_op_st16_i32, { R, R } },
69 { INDEX_op_st_i32, { R, R } },
70
71 { INDEX_op_add_i32, { R, RI, RI } },
72 { INDEX_op_sub_i32, { R, RI, RI } },
73 { INDEX_op_mul_i32, { R, RI, RI } },
7316329a
SW
74 { INDEX_op_div_i32, { R, R, R } },
75 { INDEX_op_divu_i32, { R, R, R } },
76 { INDEX_op_rem_i32, { R, R, R } },
77 { INDEX_op_remu_i32, { R, R, R } },
7316329a
SW
78 /* TODO: Does R, RI, RI result in faster code than R, R, RI?
79 If both operands are constants, we can optimize. */
80 { INDEX_op_and_i32, { R, RI, RI } },
7316329a 81 { INDEX_op_andc_i32, { R, RI, RI } },
7316329a 82 { INDEX_op_eqv_i32, { R, RI, RI } },
7316329a 83 { INDEX_op_nand_i32, { R, RI, RI } },
7316329a 84 { INDEX_op_nor_i32, { R, RI, RI } },
7316329a 85 { INDEX_op_or_i32, { R, RI, RI } },
7316329a 86 { INDEX_op_orc_i32, { R, RI, RI } },
7316329a
SW
87 { INDEX_op_xor_i32, { R, RI, RI } },
88 { INDEX_op_shl_i32, { R, RI, RI } },
89 { INDEX_op_shr_i32, { R, RI, RI } },
90 { INDEX_op_sar_i32, { R, RI, RI } },
7316329a
SW
91 { INDEX_op_rotl_i32, { R, RI, RI } },
92 { INDEX_op_rotr_i32, { R, RI, RI } },
e24dc9fe 93 { INDEX_op_deposit_i32, { R, "0", R } },
7316329a
SW
94
95 { INDEX_op_brcond_i32, { R, RI } },
96
97 { INDEX_op_setcond_i32, { R, R, RI } },
7316329a 98 { INDEX_op_setcond_i64, { R, R, RI } },
7316329a 99
7316329a
SW
100 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
101 { INDEX_op_add2_i32, { R, R, R, R, R, R } },
102 { INDEX_op_sub2_i32, { R, R, R, R, R, R } },
103 { INDEX_op_brcond2_i32, { R, R, RI, RI } },
104 { INDEX_op_mulu2_i32, { R, R, R, R } },
105 { INDEX_op_setcond2_i32, { R, R, R, RI, RI } },
7316329a 106
7316329a 107 { INDEX_op_not_i32, { R, R } },
7316329a 108 { INDEX_op_neg_i32, { R, R } },
7316329a 109
7316329a
SW
110 { INDEX_op_ld8u_i64, { R, R } },
111 { INDEX_op_ld8s_i64, { R, R } },
112 { INDEX_op_ld16u_i64, { R, R } },
113 { INDEX_op_ld16s_i64, { R, R } },
114 { INDEX_op_ld32u_i64, { R, R } },
115 { INDEX_op_ld32s_i64, { R, R } },
116 { INDEX_op_ld_i64, { R, R } },
117
118 { INDEX_op_st8_i64, { R, R } },
119 { INDEX_op_st16_i64, { R, R } },
120 { INDEX_op_st32_i64, { R, R } },
121 { INDEX_op_st_i64, { R, R } },
122
123 { INDEX_op_add_i64, { R, RI, RI } },
124 { INDEX_op_sub_i64, { R, RI, RI } },
125 { INDEX_op_mul_i64, { R, RI, RI } },
7316329a
SW
126 { INDEX_op_div_i64, { R, R, R } },
127 { INDEX_op_divu_i64, { R, R, R } },
128 { INDEX_op_rem_i64, { R, R, R } },
129 { INDEX_op_remu_i64, { R, R, R } },
7316329a 130 { INDEX_op_and_i64, { R, RI, RI } },
7316329a 131 { INDEX_op_andc_i64, { R, RI, RI } },
7316329a 132 { INDEX_op_eqv_i64, { R, RI, RI } },
7316329a 133 { INDEX_op_nand_i64, { R, RI, RI } },
7316329a 134 { INDEX_op_nor_i64, { R, RI, RI } },
7316329a 135 { INDEX_op_or_i64, { R, RI, RI } },
7316329a 136 { INDEX_op_orc_i64, { R, RI, RI } },
7316329a
SW
137 { INDEX_op_xor_i64, { R, RI, RI } },
138 { INDEX_op_shl_i64, { R, RI, RI } },
139 { INDEX_op_shr_i64, { R, RI, RI } },
140 { INDEX_op_sar_i64, { R, RI, RI } },
7316329a
SW
141 { INDEX_op_rotl_i64, { R, RI, RI } },
142 { INDEX_op_rotr_i64, { R, RI, RI } },
e24dc9fe 143 { INDEX_op_deposit_i64, { R, "0", R } },
7316329a
SW
144 { INDEX_op_brcond_i64, { R, RI } },
145
7316329a 146 { INDEX_op_ext8s_i64, { R, R } },
7316329a 147 { INDEX_op_ext16s_i64, { R, R } },
7316329a 148 { INDEX_op_ext32s_i64, { R, R } },
7316329a 149 { INDEX_op_ext8u_i64, { R, R } },
7316329a 150 { INDEX_op_ext16u_i64, { R, R } },
7316329a 151 { INDEX_op_ext32u_i64, { R, R } },
4f2331e5
AJ
152 { INDEX_op_ext_i32_i64, { R, R } },
153 { INDEX_op_extu_i32_i64, { R, R } },
7316329a 154 { INDEX_op_bswap16_i64, { R, R } },
7316329a 155 { INDEX_op_bswap32_i64, { R, R } },
7316329a 156 { INDEX_op_bswap64_i64, { R, R } },
7316329a 157 { INDEX_op_not_i64, { R, R } },
7316329a 158 { INDEX_op_neg_i64, { R, R } },
7316329a 159
76782fab
RH
160 { INDEX_op_qemu_ld_i32, { R, L } },
161 { INDEX_op_qemu_ld_i64, { R64, L } },
7316329a 162
76782fab
RH
163 { INDEX_op_qemu_st_i32, { R, S } },
164 { INDEX_op_qemu_st_i64, { R64, S } },
7316329a 165
7316329a 166 { INDEX_op_ext8s_i32, { R, R } },
7316329a 167 { INDEX_op_ext16s_i32, { R, R } },
7316329a 168 { INDEX_op_ext8u_i32, { R, R } },
7316329a 169 { INDEX_op_ext16u_i32, { R, R } },
7316329a 170
7316329a 171 { INDEX_op_bswap16_i32, { R, R } },
7316329a 172 { INDEX_op_bswap32_i32, { R, R } },
7316329a 173
a1e69e2f 174 { INDEX_op_mb, { } },
7316329a
SW
175 { -1 },
176};
177
f69d277e
RH
178static const TCGTargetOpDef *tcg_target_op_def(TCGOpcode op)
179{
180 int i, n = ARRAY_SIZE(tcg_target_op_defs);
181
182 for (i = 0; i < n; ++i) {
183 if (tcg_target_op_defs[i].op == op) {
184 return &tcg_target_op_defs[i];
185 }
186 }
187 return NULL;
188}
189
7316329a
SW
190static const int tcg_target_reg_alloc_order[] = {
191 TCG_REG_R0,
192 TCG_REG_R1,
193 TCG_REG_R2,
194 TCG_REG_R3,
195#if 0 /* used for TCG_REG_CALL_STACK */
196 TCG_REG_R4,
197#endif
198 TCG_REG_R5,
199 TCG_REG_R6,
200 TCG_REG_R7,
201#if TCG_TARGET_NB_REGS >= 16
202 TCG_REG_R8,
203 TCG_REG_R9,
204 TCG_REG_R10,
205 TCG_REG_R11,
206 TCG_REG_R12,
207 TCG_REG_R13,
208 TCG_REG_R14,
209 TCG_REG_R15,
210#endif
211};
212
1df3caa9 213#if MAX_OPC_PARAM_IARGS != 6
7316329a
SW
214# error Fix needed, number of supported input arguments changed!
215#endif
216
217static const int tcg_target_call_iarg_regs[] = {
218 TCG_REG_R0,
219 TCG_REG_R1,
220 TCG_REG_R2,
221 TCG_REG_R3,
7316329a
SW
222#if 0 /* used for TCG_REG_CALL_STACK */
223 TCG_REG_R4,
224#endif
225 TCG_REG_R5,
1df3caa9 226 TCG_REG_R6,
6673f47d
SW
227#if TCG_TARGET_REG_BITS == 32
228 /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
7316329a
SW
229 TCG_REG_R7,
230#if TCG_TARGET_NB_REGS >= 16
231 TCG_REG_R8,
6673f47d
SW
232 TCG_REG_R9,
233 TCG_REG_R10,
1df3caa9
RH
234 TCG_REG_R11,
235 TCG_REG_R12,
7316329a
SW
236#else
237# error Too few input registers available
238#endif
239#endif
240};
241
242static const int tcg_target_call_oarg_regs[] = {
243 TCG_REG_R0,
244#if TCG_TARGET_REG_BITS == 32
245 TCG_REG_R1
246#endif
247};
248
8d8fdbae 249#ifdef CONFIG_DEBUG_TCG
7316329a
SW
250static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
251 "r00",
252 "r01",
253 "r02",
254 "r03",
255 "r04",
256 "r05",
257 "r06",
258 "r07",
259#if TCG_TARGET_NB_REGS >= 16
260 "r08",
261 "r09",
262 "r10",
263 "r11",
264 "r12",
265 "r13",
266 "r14",
267 "r15",
268#if TCG_TARGET_NB_REGS >= 32
269 "r16",
270 "r17",
271 "r18",
272 "r19",
273 "r20",
274 "r21",
275 "r22",
276 "r23",
277 "r24",
278 "r25",
279 "r26",
280 "r27",
281 "r28",
282 "r29",
283 "r30",
284 "r31"
285#endif
286#endif
287};
288#endif
289
6ac17786 290static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
2ba7fae2 291 intptr_t value, intptr_t addend)
7316329a
SW
292{
293 /* tcg_out_reloc always uses the same type, addend. */
eabb7b91
AJ
294 tcg_debug_assert(type == sizeof(tcg_target_long));
295 tcg_debug_assert(addend == 0);
296 tcg_debug_assert(value != 0);
a7f96f76
RH
297 if (TCG_TARGET_REG_BITS == 32) {
298 tcg_patch32(code_ptr, value);
299 } else {
300 tcg_patch64(code_ptr, value);
301 }
6ac17786 302 return true;
7316329a
SW
303}
304
305#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
306/* Show current bytecode. Used by tcg interpreter. */
307void tci_disas(uint8_t opc)
308{
309 const TCGOpDef *def = &tcg_op_defs[opc];
310 fprintf(stderr, "TCG %s %u, %u, %u\n",
311 def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
312}
313#endif
314
315/* Write value (native size). */
316static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
317{
a7f96f76
RH
318 if (TCG_TARGET_REG_BITS == 32) {
319 tcg_out32(s, v);
320 } else {
321 tcg_out64(s, v);
322 }
7316329a
SW
323}
324
7316329a
SW
325/* Write opcode. */
326static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
327{
328 tcg_out8(s, op);
329 tcg_out8(s, 0);
330}
331
332/* Write register. */
333static void tcg_out_r(TCGContext *s, TCGArg t0)
334{
eabb7b91 335 tcg_debug_assert(t0 < TCG_TARGET_NB_REGS);
7316329a
SW
336 tcg_out8(s, t0);
337}
338
339/* Write register or constant (native size). */
340static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
341{
342 if (const_arg) {
eabb7b91 343 tcg_debug_assert(const_arg == 1);
7316329a
SW
344 tcg_out8(s, TCG_CONST);
345 tcg_out_i(s, arg);
346 } else {
347 tcg_out_r(s, arg);
348 }
349}
350
351/* Write register or constant (32 bit). */
352static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
353{
354 if (const_arg) {
eabb7b91 355 tcg_debug_assert(const_arg == 1);
7316329a
SW
356 tcg_out8(s, TCG_CONST);
357 tcg_out32(s, arg);
358 } else {
359 tcg_out_r(s, arg);
360 }
361}
362
363#if TCG_TARGET_REG_BITS == 64
364/* Write register or constant (64 bit). */
365static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
366{
367 if (const_arg) {
eabb7b91 368 tcg_debug_assert(const_arg == 1);
7316329a
SW
369 tcg_out8(s, TCG_CONST);
370 tcg_out64(s, arg);
371 } else {
372 tcg_out_r(s, arg);
373 }
374}
375#endif
376
377/* Write label. */
bec16311 378static void tci_out_label(TCGContext *s, TCGLabel *label)
7316329a 379{
7316329a
SW
380 if (label->has_value) {
381 tcg_out_i(s, label->u.value);
eabb7b91 382 tcg_debug_assert(label->u.value);
7316329a 383 } else {
bec16311 384 tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), label, 0);
3c01ae0e 385 s->code_ptr += sizeof(tcg_target_ulong);
7316329a
SW
386 }
387}
388
2a534aff 389static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
a05b5b9b 390 intptr_t arg2)
7316329a
SW
391{
392 uint8_t *old_code_ptr = s->code_ptr;
393 if (type == TCG_TYPE_I32) {
394 tcg_out_op_t(s, INDEX_op_ld_i32);
395 tcg_out_r(s, ret);
396 tcg_out_r(s, arg1);
397 tcg_out32(s, arg2);
398 } else {
eabb7b91 399 tcg_debug_assert(type == TCG_TYPE_I64);
7316329a
SW
400#if TCG_TARGET_REG_BITS == 64
401 tcg_out_op_t(s, INDEX_op_ld_i64);
402 tcg_out_r(s, ret);
403 tcg_out_r(s, arg1);
eabb7b91 404 tcg_debug_assert(arg2 == (int32_t)arg2);
7316329a
SW
405 tcg_out32(s, arg2);
406#else
407 TODO();
408#endif
409 }
410 old_code_ptr[1] = s->code_ptr - old_code_ptr;
411}
412
78113e83 413static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
7316329a
SW
414{
415 uint8_t *old_code_ptr = s->code_ptr;
eabb7b91 416 tcg_debug_assert(ret != arg);
7316329a
SW
417#if TCG_TARGET_REG_BITS == 32
418 tcg_out_op_t(s, INDEX_op_mov_i32);
419#else
420 tcg_out_op_t(s, INDEX_op_mov_i64);
421#endif
422 tcg_out_r(s, ret);
423 tcg_out_r(s, arg);
424 old_code_ptr[1] = s->code_ptr - old_code_ptr;
78113e83 425 return true;
7316329a
SW
426}
427
428static void tcg_out_movi(TCGContext *s, TCGType type,
2a534aff 429 TCGReg t0, tcg_target_long arg)
7316329a
SW
430{
431 uint8_t *old_code_ptr = s->code_ptr;
432 uint32_t arg32 = arg;
433 if (type == TCG_TYPE_I32 || arg == arg32) {
1bd1af98 434 tcg_out_op_t(s, INDEX_op_tci_movi_i32);
7316329a
SW
435 tcg_out_r(s, t0);
436 tcg_out32(s, arg32);
437 } else {
eabb7b91 438 tcg_debug_assert(type == TCG_TYPE_I64);
7316329a 439#if TCG_TARGET_REG_BITS == 64
1bd1af98 440 tcg_out_op_t(s, INDEX_op_tci_movi_i64);
7316329a
SW
441 tcg_out_r(s, t0);
442 tcg_out64(s, arg);
443#else
444 TODO();
445#endif
446 }
447 old_code_ptr[1] = s->code_ptr - old_code_ptr;
448}
449
2be7d76b 450static inline void tcg_out_call(TCGContext *s, const tcg_insn_unit *arg)
dddbb2e1 451{
a3abb292
RH
452 uint8_t *old_code_ptr = s->code_ptr;
453 tcg_out_op_t(s, INDEX_op_call);
dddbb2e1 454 tcg_out_ri(s, 1, (uintptr_t)arg);
a3abb292 455 old_code_ptr[1] = s->code_ptr - old_code_ptr;
dddbb2e1
RH
456}
457
7316329a
SW
458static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
459 const int *const_args)
460{
461 uint8_t *old_code_ptr = s->code_ptr;
462
463 tcg_out_op_t(s, opc);
464
465 switch (opc) {
466 case INDEX_op_exit_tb:
467 tcg_out64(s, args[0]);
468 break;
469 case INDEX_op_goto_tb:
f309101c 470 if (s->tb_jmp_insn_offset) {
7316329a 471 /* Direct jump method. */
76442a93
SF
472 /* Align for atomic patching and thread safety */
473 s->code_ptr = QEMU_ALIGN_PTR_UP(s->code_ptr, 4);
f309101c 474 s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s);
7316329a
SW
475 tcg_out32(s, 0);
476 } else {
477 /* Indirect jump method. */
478 TODO();
479 }
9f754620 480 set_jmp_reset_offset(s, args[0]);
7316329a
SW
481 break;
482 case INDEX_op_br:
bec16311 483 tci_out_label(s, arg_label(args[0]));
7316329a 484 break;
7316329a
SW
485 case INDEX_op_setcond_i32:
486 tcg_out_r(s, args[0]);
487 tcg_out_r(s, args[1]);
488 tcg_out_ri32(s, const_args[2], args[2]);
489 tcg_out8(s, args[3]); /* condition */
490 break;
491#if TCG_TARGET_REG_BITS == 32
492 case INDEX_op_setcond2_i32:
493 /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
494 tcg_out_r(s, args[0]);
495 tcg_out_r(s, args[1]);
496 tcg_out_r(s, args[2]);
497 tcg_out_ri32(s, const_args[3], args[3]);
498 tcg_out_ri32(s, const_args[4], args[4]);
499 tcg_out8(s, args[5]); /* condition */
500 break;
501#elif TCG_TARGET_REG_BITS == 64
502 case INDEX_op_setcond_i64:
503 tcg_out_r(s, args[0]);
504 tcg_out_r(s, args[1]);
505 tcg_out_ri64(s, const_args[2], args[2]);
506 tcg_out8(s, args[3]); /* condition */
507 break;
508#endif
7316329a
SW
509 case INDEX_op_ld8u_i32:
510 case INDEX_op_ld8s_i32:
511 case INDEX_op_ld16u_i32:
512 case INDEX_op_ld16s_i32:
513 case INDEX_op_ld_i32:
514 case INDEX_op_st8_i32:
515 case INDEX_op_st16_i32:
516 case INDEX_op_st_i32:
517 case INDEX_op_ld8u_i64:
518 case INDEX_op_ld8s_i64:
519 case INDEX_op_ld16u_i64:
520 case INDEX_op_ld16s_i64:
521 case INDEX_op_ld32u_i64:
522 case INDEX_op_ld32s_i64:
523 case INDEX_op_ld_i64:
524 case INDEX_op_st8_i64:
525 case INDEX_op_st16_i64:
526 case INDEX_op_st32_i64:
527 case INDEX_op_st_i64:
528 tcg_out_r(s, args[0]);
529 tcg_out_r(s, args[1]);
eabb7b91 530 tcg_debug_assert(args[2] == (int32_t)args[2]);
7316329a
SW
531 tcg_out32(s, args[2]);
532 break;
533 case INDEX_op_add_i32:
534 case INDEX_op_sub_i32:
535 case INDEX_op_mul_i32:
536 case INDEX_op_and_i32:
537 case INDEX_op_andc_i32: /* Optional (TCG_TARGET_HAS_andc_i32). */
538 case INDEX_op_eqv_i32: /* Optional (TCG_TARGET_HAS_eqv_i32). */
539 case INDEX_op_nand_i32: /* Optional (TCG_TARGET_HAS_nand_i32). */
540 case INDEX_op_nor_i32: /* Optional (TCG_TARGET_HAS_nor_i32). */
541 case INDEX_op_or_i32:
542 case INDEX_op_orc_i32: /* Optional (TCG_TARGET_HAS_orc_i32). */
543 case INDEX_op_xor_i32:
544 case INDEX_op_shl_i32:
545 case INDEX_op_shr_i32:
546 case INDEX_op_sar_i32:
547 case INDEX_op_rotl_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
548 case INDEX_op_rotr_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
549 tcg_out_r(s, args[0]);
550 tcg_out_ri32(s, const_args[1], args[1]);
551 tcg_out_ri32(s, const_args[2], args[2]);
552 break;
e24dc9fe
SW
553 case INDEX_op_deposit_i32: /* Optional (TCG_TARGET_HAS_deposit_i32). */
554 tcg_out_r(s, args[0]);
555 tcg_out_r(s, args[1]);
556 tcg_out_r(s, args[2]);
eabb7b91 557 tcg_debug_assert(args[3] <= UINT8_MAX);
e24dc9fe 558 tcg_out8(s, args[3]);
eabb7b91 559 tcg_debug_assert(args[4] <= UINT8_MAX);
e24dc9fe
SW
560 tcg_out8(s, args[4]);
561 break;
7316329a
SW
562
563#if TCG_TARGET_REG_BITS == 64
7316329a
SW
564 case INDEX_op_add_i64:
565 case INDEX_op_sub_i64:
566 case INDEX_op_mul_i64:
567 case INDEX_op_and_i64:
568 case INDEX_op_andc_i64: /* Optional (TCG_TARGET_HAS_andc_i64). */
569 case INDEX_op_eqv_i64: /* Optional (TCG_TARGET_HAS_eqv_i64). */
570 case INDEX_op_nand_i64: /* Optional (TCG_TARGET_HAS_nand_i64). */
571 case INDEX_op_nor_i64: /* Optional (TCG_TARGET_HAS_nor_i64). */
572 case INDEX_op_or_i64:
573 case INDEX_op_orc_i64: /* Optional (TCG_TARGET_HAS_orc_i64). */
574 case INDEX_op_xor_i64:
575 case INDEX_op_shl_i64:
576 case INDEX_op_shr_i64:
577 case INDEX_op_sar_i64:
7316329a
SW
578 case INDEX_op_rotl_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
579 case INDEX_op_rotr_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
580 tcg_out_r(s, args[0]);
581 tcg_out_ri64(s, const_args[1], args[1]);
582 tcg_out_ri64(s, const_args[2], args[2]);
583 break;
e24dc9fe
SW
584 case INDEX_op_deposit_i64: /* Optional (TCG_TARGET_HAS_deposit_i64). */
585 tcg_out_r(s, args[0]);
586 tcg_out_r(s, args[1]);
587 tcg_out_r(s, args[2]);
eabb7b91 588 tcg_debug_assert(args[3] <= UINT8_MAX);
e24dc9fe 589 tcg_out8(s, args[3]);
eabb7b91 590 tcg_debug_assert(args[4] <= UINT8_MAX);
e24dc9fe
SW
591 tcg_out8(s, args[4]);
592 break;
7316329a
SW
593 case INDEX_op_div_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
594 case INDEX_op_divu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
595 case INDEX_op_rem_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
596 case INDEX_op_remu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
597 TODO();
598 break;
599 case INDEX_op_div2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
600 case INDEX_op_divu2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
601 TODO();
602 break;
603 case INDEX_op_brcond_i64:
604 tcg_out_r(s, args[0]);
605 tcg_out_ri64(s, const_args[1], args[1]);
606 tcg_out8(s, args[2]); /* condition */
bec16311 607 tci_out_label(s, arg_label(args[3]));
7316329a
SW
608 break;
609 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
610 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
611 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
612 case INDEX_op_not_i64: /* Optional (TCG_TARGET_HAS_not_i64). */
613 case INDEX_op_neg_i64: /* Optional (TCG_TARGET_HAS_neg_i64). */
614 case INDEX_op_ext8s_i64: /* Optional (TCG_TARGET_HAS_ext8s_i64). */
615 case INDEX_op_ext8u_i64: /* Optional (TCG_TARGET_HAS_ext8u_i64). */
616 case INDEX_op_ext16s_i64: /* Optional (TCG_TARGET_HAS_ext16s_i64). */
617 case INDEX_op_ext16u_i64: /* Optional (TCG_TARGET_HAS_ext16u_i64). */
618 case INDEX_op_ext32s_i64: /* Optional (TCG_TARGET_HAS_ext32s_i64). */
619 case INDEX_op_ext32u_i64: /* Optional (TCG_TARGET_HAS_ext32u_i64). */
4f2331e5
AJ
620 case INDEX_op_ext_i32_i64:
621 case INDEX_op_extu_i32_i64:
7316329a
SW
622#endif /* TCG_TARGET_REG_BITS == 64 */
623 case INDEX_op_neg_i32: /* Optional (TCG_TARGET_HAS_neg_i32). */
624 case INDEX_op_not_i32: /* Optional (TCG_TARGET_HAS_not_i32). */
625 case INDEX_op_ext8s_i32: /* Optional (TCG_TARGET_HAS_ext8s_i32). */
626 case INDEX_op_ext16s_i32: /* Optional (TCG_TARGET_HAS_ext16s_i32). */
627 case INDEX_op_ext8u_i32: /* Optional (TCG_TARGET_HAS_ext8u_i32). */
628 case INDEX_op_ext16u_i32: /* Optional (TCG_TARGET_HAS_ext16u_i32). */
629 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
630 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
631 tcg_out_r(s, args[0]);
632 tcg_out_r(s, args[1]);
633 break;
634 case INDEX_op_div_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
635 case INDEX_op_divu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
636 case INDEX_op_rem_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
637 case INDEX_op_remu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
638 tcg_out_r(s, args[0]);
639 tcg_out_ri32(s, const_args[1], args[1]);
640 tcg_out_ri32(s, const_args[2], args[2]);
641 break;
642 case INDEX_op_div2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
643 case INDEX_op_divu2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
644 TODO();
645 break;
646#if TCG_TARGET_REG_BITS == 32
647 case INDEX_op_add2_i32:
648 case INDEX_op_sub2_i32:
649 tcg_out_r(s, args[0]);
650 tcg_out_r(s, args[1]);
651 tcg_out_r(s, args[2]);
652 tcg_out_r(s, args[3]);
653 tcg_out_r(s, args[4]);
654 tcg_out_r(s, args[5]);
655 break;
656 case INDEX_op_brcond2_i32:
657 tcg_out_r(s, args[0]);
658 tcg_out_r(s, args[1]);
659 tcg_out_ri32(s, const_args[2], args[2]);
660 tcg_out_ri32(s, const_args[3], args[3]);
661 tcg_out8(s, args[4]); /* condition */
bec16311 662 tci_out_label(s, arg_label(args[5]));
7316329a
SW
663 break;
664 case INDEX_op_mulu2_i32:
665 tcg_out_r(s, args[0]);
666 tcg_out_r(s, args[1]);
667 tcg_out_r(s, args[2]);
668 tcg_out_r(s, args[3]);
669 break;
670#endif
671 case INDEX_op_brcond_i32:
672 tcg_out_r(s, args[0]);
673 tcg_out_ri32(s, const_args[1], args[1]);
674 tcg_out8(s, args[2]); /* condition */
bec16311 675 tci_out_label(s, arg_label(args[3]));
7316329a 676 break;
76782fab 677 case INDEX_op_qemu_ld_i32:
7316329a
SW
678 tcg_out_r(s, *args++);
679 tcg_out_r(s, *args++);
76782fab
RH
680 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
681 tcg_out_r(s, *args++);
682 }
683 tcg_out_i(s, *args++);
7316329a 684 break;
76782fab 685 case INDEX_op_qemu_ld_i64:
7316329a 686 tcg_out_r(s, *args++);
76782fab
RH
687 if (TCG_TARGET_REG_BITS == 32) {
688 tcg_out_r(s, *args++);
689 }
7316329a 690 tcg_out_r(s, *args++);
76782fab
RH
691 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
692 tcg_out_r(s, *args++);
693 }
694 tcg_out_i(s, *args++);
7316329a 695 break;
76782fab 696 case INDEX_op_qemu_st_i32:
7316329a
SW
697 tcg_out_r(s, *args++);
698 tcg_out_r(s, *args++);
76782fab
RH
699 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
700 tcg_out_r(s, *args++);
701 }
702 tcg_out_i(s, *args++);
7316329a 703 break;
76782fab 704 case INDEX_op_qemu_st_i64:
7316329a 705 tcg_out_r(s, *args++);
76782fab
RH
706 if (TCG_TARGET_REG_BITS == 32) {
707 tcg_out_r(s, *args++);
708 }
7316329a 709 tcg_out_r(s, *args++);
76782fab
RH
710 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
711 tcg_out_r(s, *args++);
712 }
713 tcg_out_i(s, *args++);
7316329a 714 break;
a1e69e2f
PK
715 case INDEX_op_mb:
716 break;
96d0ee7f
RH
717 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
718 case INDEX_op_mov_i64:
96d0ee7f 719 case INDEX_op_call: /* Always emitted via tcg_out_call. */
7316329a 720 default:
7316329a
SW
721 tcg_abort();
722 }
723 old_code_ptr[1] = s->code_ptr - old_code_ptr;
724}
725
2a534aff 726static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
a05b5b9b 727 intptr_t arg2)
7316329a
SW
728{
729 uint8_t *old_code_ptr = s->code_ptr;
730 if (type == TCG_TYPE_I32) {
731 tcg_out_op_t(s, INDEX_op_st_i32);
732 tcg_out_r(s, arg);
733 tcg_out_r(s, arg1);
734 tcg_out32(s, arg2);
735 } else {
eabb7b91 736 tcg_debug_assert(type == TCG_TYPE_I64);
7316329a
SW
737#if TCG_TARGET_REG_BITS == 64
738 tcg_out_op_t(s, INDEX_op_st_i64);
739 tcg_out_r(s, arg);
740 tcg_out_r(s, arg1);
741 tcg_out32(s, arg2);
742#else
743 TODO();
744#endif
745 }
746 old_code_ptr[1] = s->code_ptr - old_code_ptr;
747}
748
59d7c14e
RH
749static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
750 TCGReg base, intptr_t ofs)
751{
752 return false;
753}
754
7316329a 755/* Test if a constant matches the constraint. */
f6c6afc1 756static int tcg_target_const_match(tcg_target_long val, TCGType type,
7316329a
SW
757 const TCGArgConstraint *arg_ct)
758{
759 /* No need to return 0 or 1, 0 or != 0 is good enough. */
760 return arg_ct->ct & TCG_CT_CONST;
761}
762
7316329a
SW
763static void tcg_target_init(TCGContext *s)
764{
765#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
766 const char *envval = getenv("DEBUG_TCG");
767 if (envval) {
24537a01 768 qemu_set_log(strtol(envval, NULL, 0));
7316329a
SW
769 }
770#endif
771
772 /* The current code uses uint8_t for tcg operations. */
eabb7b91 773 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
7316329a
SW
774
775 /* Registers available for 32 bit operations. */
f46934df 776 tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
7316329a 777 /* Registers available for 64 bit operations. */
f46934df 778 tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
7316329a 779 /* TODO: Which registers should be set here? */
f46934df 780 tcg_target_call_clobber_regs = BIT(TCG_TARGET_NB_REGS) - 1;
ee79c356 781
ccb1bb66 782 s->reserved_regs = 0;
7316329a 783 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
ee79c356
RH
784
785 /* We use negative offsets from "sp" so that we can distinguish
786 stores that might pretend to be call arguments. */
787 tcg_set_frame(s, TCG_REG_CALL_STACK,
788 -CPU_TEMP_BUF_NLONGS * sizeof(long),
7316329a
SW
789 CPU_TEMP_BUF_NLONGS * sizeof(long));
790}
791
792/* Generate global QEMU prologue and epilogue code. */
4699ca6d 793static inline void tcg_target_qemu_prologue(TCGContext *s)
7316329a 794{
7316329a 795}