]> git.proxmox.com Git - mirror_qemu.git/blame - tcg/tci/tcg-target.c.inc
tcg: Use tcg_constant_{i32,i64,vec} with gvec expanders
[mirror_qemu.git] / tcg / tci / tcg-target.c.inc
CommitLineData
7316329a
SW
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* TODO list:
26 * - See TODO comments in code.
27 */
28
29/* Marker for missing code. */
30#define TODO() \
31 do { \
32 fprintf(stderr, "TODO %s:%u: %s()\n", \
33 __FILE__, __LINE__, __func__); \
34 tcg_abort(); \
35 } while (0)
36
7316329a
SW
37/* Bitfield n...m (in 32 bit value). */
38#define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
39
7316329a
SW
40/* Macros used in tcg_target_op_defs. */
41#define R "r"
42#define RI "ri"
43#if TCG_TARGET_REG_BITS == 32
44# define R64 "r", "r"
45#else
46# define R64 "r"
47#endif
48#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
49# define L "L", "L"
50# define S "S", "S"
51#else
52# define L "L"
53# define S "S"
54#endif
55
56/* TODO: documentation. */
57static const TCGTargetOpDef tcg_target_op_defs[] = {
58 { INDEX_op_exit_tb, { NULL } },
59 { INDEX_op_goto_tb, { NULL } },
7316329a
SW
60 { INDEX_op_br, { NULL } },
61
7316329a
SW
62 { INDEX_op_ld8u_i32, { R, R } },
63 { INDEX_op_ld8s_i32, { R, R } },
64 { INDEX_op_ld16u_i32, { R, R } },
65 { INDEX_op_ld16s_i32, { R, R } },
66 { INDEX_op_ld_i32, { R, R } },
67 { INDEX_op_st8_i32, { R, R } },
68 { INDEX_op_st16_i32, { R, R } },
69 { INDEX_op_st_i32, { R, R } },
70
71 { INDEX_op_add_i32, { R, RI, RI } },
72 { INDEX_op_sub_i32, { R, RI, RI } },
73 { INDEX_op_mul_i32, { R, RI, RI } },
74#if TCG_TARGET_HAS_div_i32
75 { INDEX_op_div_i32, { R, R, R } },
76 { INDEX_op_divu_i32, { R, R, R } },
77 { INDEX_op_rem_i32, { R, R, R } },
78 { INDEX_op_remu_i32, { R, R, R } },
79#elif TCG_TARGET_HAS_div2_i32
80 { INDEX_op_div2_i32, { R, R, "0", "1", R } },
81 { INDEX_op_divu2_i32, { R, R, "0", "1", R } },
82#endif
83 /* TODO: Does R, RI, RI result in faster code than R, R, RI?
84 If both operands are constants, we can optimize. */
85 { INDEX_op_and_i32, { R, RI, RI } },
86#if TCG_TARGET_HAS_andc_i32
87 { INDEX_op_andc_i32, { R, RI, RI } },
88#endif
89#if TCG_TARGET_HAS_eqv_i32
90 { INDEX_op_eqv_i32, { R, RI, RI } },
91#endif
92#if TCG_TARGET_HAS_nand_i32
93 { INDEX_op_nand_i32, { R, RI, RI } },
94#endif
95#if TCG_TARGET_HAS_nor_i32
96 { INDEX_op_nor_i32, { R, RI, RI } },
97#endif
98 { INDEX_op_or_i32, { R, RI, RI } },
99#if TCG_TARGET_HAS_orc_i32
100 { INDEX_op_orc_i32, { R, RI, RI } },
101#endif
102 { INDEX_op_xor_i32, { R, RI, RI } },
103 { INDEX_op_shl_i32, { R, RI, RI } },
104 { INDEX_op_shr_i32, { R, RI, RI } },
105 { INDEX_op_sar_i32, { R, RI, RI } },
106#if TCG_TARGET_HAS_rot_i32
107 { INDEX_op_rotl_i32, { R, RI, RI } },
108 { INDEX_op_rotr_i32, { R, RI, RI } },
109#endif
e24dc9fe
SW
110#if TCG_TARGET_HAS_deposit_i32
111 { INDEX_op_deposit_i32, { R, "0", R } },
112#endif
7316329a
SW
113
114 { INDEX_op_brcond_i32, { R, RI } },
115
116 { INDEX_op_setcond_i32, { R, R, RI } },
117#if TCG_TARGET_REG_BITS == 64
118 { INDEX_op_setcond_i64, { R, R, RI } },
119#endif /* TCG_TARGET_REG_BITS == 64 */
120
121#if TCG_TARGET_REG_BITS == 32
122 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
123 { INDEX_op_add2_i32, { R, R, R, R, R, R } },
124 { INDEX_op_sub2_i32, { R, R, R, R, R, R } },
125 { INDEX_op_brcond2_i32, { R, R, RI, RI } },
126 { INDEX_op_mulu2_i32, { R, R, R, R } },
127 { INDEX_op_setcond2_i32, { R, R, R, RI, RI } },
128#endif
129
130#if TCG_TARGET_HAS_not_i32
131 { INDEX_op_not_i32, { R, R } },
132#endif
133#if TCG_TARGET_HAS_neg_i32
134 { INDEX_op_neg_i32, { R, R } },
135#endif
136
137#if TCG_TARGET_REG_BITS == 64
7316329a
SW
138 { INDEX_op_ld8u_i64, { R, R } },
139 { INDEX_op_ld8s_i64, { R, R } },
140 { INDEX_op_ld16u_i64, { R, R } },
141 { INDEX_op_ld16s_i64, { R, R } },
142 { INDEX_op_ld32u_i64, { R, R } },
143 { INDEX_op_ld32s_i64, { R, R } },
144 { INDEX_op_ld_i64, { R, R } },
145
146 { INDEX_op_st8_i64, { R, R } },
147 { INDEX_op_st16_i64, { R, R } },
148 { INDEX_op_st32_i64, { R, R } },
149 { INDEX_op_st_i64, { R, R } },
150
151 { INDEX_op_add_i64, { R, RI, RI } },
152 { INDEX_op_sub_i64, { R, RI, RI } },
153 { INDEX_op_mul_i64, { R, RI, RI } },
154#if TCG_TARGET_HAS_div_i64
155 { INDEX_op_div_i64, { R, R, R } },
156 { INDEX_op_divu_i64, { R, R, R } },
157 { INDEX_op_rem_i64, { R, R, R } },
158 { INDEX_op_remu_i64, { R, R, R } },
159#elif TCG_TARGET_HAS_div2_i64
160 { INDEX_op_div2_i64, { R, R, "0", "1", R } },
161 { INDEX_op_divu2_i64, { R, R, "0", "1", R } },
162#endif
163 { INDEX_op_and_i64, { R, RI, RI } },
164#if TCG_TARGET_HAS_andc_i64
165 { INDEX_op_andc_i64, { R, RI, RI } },
166#endif
167#if TCG_TARGET_HAS_eqv_i64
168 { INDEX_op_eqv_i64, { R, RI, RI } },
169#endif
170#if TCG_TARGET_HAS_nand_i64
171 { INDEX_op_nand_i64, { R, RI, RI } },
172#endif
173#if TCG_TARGET_HAS_nor_i64
174 { INDEX_op_nor_i64, { R, RI, RI } },
175#endif
176 { INDEX_op_or_i64, { R, RI, RI } },
177#if TCG_TARGET_HAS_orc_i64
178 { INDEX_op_orc_i64, { R, RI, RI } },
179#endif
180 { INDEX_op_xor_i64, { R, RI, RI } },
181 { INDEX_op_shl_i64, { R, RI, RI } },
182 { INDEX_op_shr_i64, { R, RI, RI } },
183 { INDEX_op_sar_i64, { R, RI, RI } },
184#if TCG_TARGET_HAS_rot_i64
185 { INDEX_op_rotl_i64, { R, RI, RI } },
186 { INDEX_op_rotr_i64, { R, RI, RI } },
e24dc9fe
SW
187#endif
188#if TCG_TARGET_HAS_deposit_i64
189 { INDEX_op_deposit_i64, { R, "0", R } },
7316329a
SW
190#endif
191 { INDEX_op_brcond_i64, { R, RI } },
192
193#if TCG_TARGET_HAS_ext8s_i64
194 { INDEX_op_ext8s_i64, { R, R } },
195#endif
196#if TCG_TARGET_HAS_ext16s_i64
197 { INDEX_op_ext16s_i64, { R, R } },
198#endif
199#if TCG_TARGET_HAS_ext32s_i64
200 { INDEX_op_ext32s_i64, { R, R } },
201#endif
202#if TCG_TARGET_HAS_ext8u_i64
203 { INDEX_op_ext8u_i64, { R, R } },
204#endif
205#if TCG_TARGET_HAS_ext16u_i64
206 { INDEX_op_ext16u_i64, { R, R } },
207#endif
208#if TCG_TARGET_HAS_ext32u_i64
209 { INDEX_op_ext32u_i64, { R, R } },
210#endif
4f2331e5
AJ
211 { INDEX_op_ext_i32_i64, { R, R } },
212 { INDEX_op_extu_i32_i64, { R, R } },
7316329a
SW
213#if TCG_TARGET_HAS_bswap16_i64
214 { INDEX_op_bswap16_i64, { R, R } },
215#endif
216#if TCG_TARGET_HAS_bswap32_i64
217 { INDEX_op_bswap32_i64, { R, R } },
218#endif
219#if TCG_TARGET_HAS_bswap64_i64
220 { INDEX_op_bswap64_i64, { R, R } },
221#endif
222#if TCG_TARGET_HAS_not_i64
223 { INDEX_op_not_i64, { R, R } },
224#endif
225#if TCG_TARGET_HAS_neg_i64
226 { INDEX_op_neg_i64, { R, R } },
227#endif
228#endif /* TCG_TARGET_REG_BITS == 64 */
229
76782fab
RH
230 { INDEX_op_qemu_ld_i32, { R, L } },
231 { INDEX_op_qemu_ld_i64, { R64, L } },
7316329a 232
76782fab
RH
233 { INDEX_op_qemu_st_i32, { R, S } },
234 { INDEX_op_qemu_st_i64, { R64, S } },
7316329a
SW
235
236#if TCG_TARGET_HAS_ext8s_i32
237 { INDEX_op_ext8s_i32, { R, R } },
238#endif
239#if TCG_TARGET_HAS_ext16s_i32
240 { INDEX_op_ext16s_i32, { R, R } },
241#endif
242#if TCG_TARGET_HAS_ext8u_i32
243 { INDEX_op_ext8u_i32, { R, R } },
244#endif
245#if TCG_TARGET_HAS_ext16u_i32
246 { INDEX_op_ext16u_i32, { R, R } },
247#endif
248
249#if TCG_TARGET_HAS_bswap16_i32
250 { INDEX_op_bswap16_i32, { R, R } },
251#endif
252#if TCG_TARGET_HAS_bswap32_i32
253 { INDEX_op_bswap32_i32, { R, R } },
254#endif
255
a1e69e2f 256 { INDEX_op_mb, { } },
7316329a
SW
257 { -1 },
258};
259
f69d277e
RH
260static const TCGTargetOpDef *tcg_target_op_def(TCGOpcode op)
261{
262 int i, n = ARRAY_SIZE(tcg_target_op_defs);
263
264 for (i = 0; i < n; ++i) {
265 if (tcg_target_op_defs[i].op == op) {
266 return &tcg_target_op_defs[i];
267 }
268 }
269 return NULL;
270}
271
7316329a
SW
272static const int tcg_target_reg_alloc_order[] = {
273 TCG_REG_R0,
274 TCG_REG_R1,
275 TCG_REG_R2,
276 TCG_REG_R3,
277#if 0 /* used for TCG_REG_CALL_STACK */
278 TCG_REG_R4,
279#endif
280 TCG_REG_R5,
281 TCG_REG_R6,
282 TCG_REG_R7,
283#if TCG_TARGET_NB_REGS >= 16
284 TCG_REG_R8,
285 TCG_REG_R9,
286 TCG_REG_R10,
287 TCG_REG_R11,
288 TCG_REG_R12,
289 TCG_REG_R13,
290 TCG_REG_R14,
291 TCG_REG_R15,
292#endif
293};
294
1df3caa9 295#if MAX_OPC_PARAM_IARGS != 6
7316329a
SW
296# error Fix needed, number of supported input arguments changed!
297#endif
298
299static const int tcg_target_call_iarg_regs[] = {
300 TCG_REG_R0,
301 TCG_REG_R1,
302 TCG_REG_R2,
303 TCG_REG_R3,
7316329a
SW
304#if 0 /* used for TCG_REG_CALL_STACK */
305 TCG_REG_R4,
306#endif
307 TCG_REG_R5,
1df3caa9 308 TCG_REG_R6,
6673f47d
SW
309#if TCG_TARGET_REG_BITS == 32
310 /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
7316329a
SW
311 TCG_REG_R7,
312#if TCG_TARGET_NB_REGS >= 16
313 TCG_REG_R8,
6673f47d
SW
314 TCG_REG_R9,
315 TCG_REG_R10,
1df3caa9
RH
316 TCG_REG_R11,
317 TCG_REG_R12,
7316329a
SW
318#else
319# error Too few input registers available
320#endif
321#endif
322};
323
324static const int tcg_target_call_oarg_regs[] = {
325 TCG_REG_R0,
326#if TCG_TARGET_REG_BITS == 32
327 TCG_REG_R1
328#endif
329};
330
8d8fdbae 331#ifdef CONFIG_DEBUG_TCG
7316329a
SW
332static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
333 "r00",
334 "r01",
335 "r02",
336 "r03",
337 "r04",
338 "r05",
339 "r06",
340 "r07",
341#if TCG_TARGET_NB_REGS >= 16
342 "r08",
343 "r09",
344 "r10",
345 "r11",
346 "r12",
347 "r13",
348 "r14",
349 "r15",
350#if TCG_TARGET_NB_REGS >= 32
351 "r16",
352 "r17",
353 "r18",
354 "r19",
355 "r20",
356 "r21",
357 "r22",
358 "r23",
359 "r24",
360 "r25",
361 "r26",
362 "r27",
363 "r28",
364 "r29",
365 "r30",
366 "r31"
367#endif
368#endif
369};
370#endif
371
6ac17786 372static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
2ba7fae2 373 intptr_t value, intptr_t addend)
7316329a
SW
374{
375 /* tcg_out_reloc always uses the same type, addend. */
eabb7b91
AJ
376 tcg_debug_assert(type == sizeof(tcg_target_long));
377 tcg_debug_assert(addend == 0);
378 tcg_debug_assert(value != 0);
a7f96f76
RH
379 if (TCG_TARGET_REG_BITS == 32) {
380 tcg_patch32(code_ptr, value);
381 } else {
382 tcg_patch64(code_ptr, value);
383 }
6ac17786 384 return true;
7316329a
SW
385}
386
387/* Parse target specific constraints. */
069ea736
RH
388static const char *target_parse_constraint(TCGArgConstraint *ct,
389 const char *ct_str, TCGType type)
7316329a 390{
069ea736 391 switch (*ct_str++) {
7316329a
SW
392 case 'r':
393 case 'L': /* qemu_ld constraint */
394 case 'S': /* qemu_st constraint */
9be0d080 395 ct->regs = BIT(TCG_TARGET_NB_REGS) - 1;
7316329a
SW
396 break;
397 default:
069ea736 398 return NULL;
7316329a 399 }
069ea736 400 return ct_str;
7316329a
SW
401}
402
403#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
404/* Show current bytecode. Used by tcg interpreter. */
405void tci_disas(uint8_t opc)
406{
407 const TCGOpDef *def = &tcg_op_defs[opc];
408 fprintf(stderr, "TCG %s %u, %u, %u\n",
409 def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
410}
411#endif
412
413/* Write value (native size). */
414static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
415{
a7f96f76
RH
416 if (TCG_TARGET_REG_BITS == 32) {
417 tcg_out32(s, v);
418 } else {
419 tcg_out64(s, v);
420 }
7316329a
SW
421}
422
7316329a
SW
423/* Write opcode. */
424static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
425{
426 tcg_out8(s, op);
427 tcg_out8(s, 0);
428}
429
430/* Write register. */
431static void tcg_out_r(TCGContext *s, TCGArg t0)
432{
eabb7b91 433 tcg_debug_assert(t0 < TCG_TARGET_NB_REGS);
7316329a
SW
434 tcg_out8(s, t0);
435}
436
437/* Write register or constant (native size). */
438static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
439{
440 if (const_arg) {
eabb7b91 441 tcg_debug_assert(const_arg == 1);
7316329a
SW
442 tcg_out8(s, TCG_CONST);
443 tcg_out_i(s, arg);
444 } else {
445 tcg_out_r(s, arg);
446 }
447}
448
449/* Write register or constant (32 bit). */
450static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
451{
452 if (const_arg) {
eabb7b91 453 tcg_debug_assert(const_arg == 1);
7316329a
SW
454 tcg_out8(s, TCG_CONST);
455 tcg_out32(s, arg);
456 } else {
457 tcg_out_r(s, arg);
458 }
459}
460
461#if TCG_TARGET_REG_BITS == 64
462/* Write register or constant (64 bit). */
463static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
464{
465 if (const_arg) {
eabb7b91 466 tcg_debug_assert(const_arg == 1);
7316329a
SW
467 tcg_out8(s, TCG_CONST);
468 tcg_out64(s, arg);
469 } else {
470 tcg_out_r(s, arg);
471 }
472}
473#endif
474
475/* Write label. */
bec16311 476static void tci_out_label(TCGContext *s, TCGLabel *label)
7316329a 477{
7316329a
SW
478 if (label->has_value) {
479 tcg_out_i(s, label->u.value);
eabb7b91 480 tcg_debug_assert(label->u.value);
7316329a 481 } else {
bec16311 482 tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), label, 0);
3c01ae0e 483 s->code_ptr += sizeof(tcg_target_ulong);
7316329a
SW
484 }
485}
486
2a534aff 487static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
a05b5b9b 488 intptr_t arg2)
7316329a
SW
489{
490 uint8_t *old_code_ptr = s->code_ptr;
491 if (type == TCG_TYPE_I32) {
492 tcg_out_op_t(s, INDEX_op_ld_i32);
493 tcg_out_r(s, ret);
494 tcg_out_r(s, arg1);
495 tcg_out32(s, arg2);
496 } else {
eabb7b91 497 tcg_debug_assert(type == TCG_TYPE_I64);
7316329a
SW
498#if TCG_TARGET_REG_BITS == 64
499 tcg_out_op_t(s, INDEX_op_ld_i64);
500 tcg_out_r(s, ret);
501 tcg_out_r(s, arg1);
eabb7b91 502 tcg_debug_assert(arg2 == (int32_t)arg2);
7316329a
SW
503 tcg_out32(s, arg2);
504#else
505 TODO();
506#endif
507 }
508 old_code_ptr[1] = s->code_ptr - old_code_ptr;
509}
510
78113e83 511static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
7316329a
SW
512{
513 uint8_t *old_code_ptr = s->code_ptr;
eabb7b91 514 tcg_debug_assert(ret != arg);
7316329a
SW
515#if TCG_TARGET_REG_BITS == 32
516 tcg_out_op_t(s, INDEX_op_mov_i32);
517#else
518 tcg_out_op_t(s, INDEX_op_mov_i64);
519#endif
520 tcg_out_r(s, ret);
521 tcg_out_r(s, arg);
522 old_code_ptr[1] = s->code_ptr - old_code_ptr;
78113e83 523 return true;
7316329a
SW
524}
525
526static void tcg_out_movi(TCGContext *s, TCGType type,
2a534aff 527 TCGReg t0, tcg_target_long arg)
7316329a
SW
528{
529 uint8_t *old_code_ptr = s->code_ptr;
530 uint32_t arg32 = arg;
531 if (type == TCG_TYPE_I32 || arg == arg32) {
532 tcg_out_op_t(s, INDEX_op_movi_i32);
533 tcg_out_r(s, t0);
534 tcg_out32(s, arg32);
535 } else {
eabb7b91 536 tcg_debug_assert(type == TCG_TYPE_I64);
7316329a
SW
537#if TCG_TARGET_REG_BITS == 64
538 tcg_out_op_t(s, INDEX_op_movi_i64);
539 tcg_out_r(s, t0);
540 tcg_out64(s, arg);
541#else
542 TODO();
543#endif
544 }
545 old_code_ptr[1] = s->code_ptr - old_code_ptr;
546}
547
2be7d76b 548static inline void tcg_out_call(TCGContext *s, const tcg_insn_unit *arg)
dddbb2e1 549{
a3abb292
RH
550 uint8_t *old_code_ptr = s->code_ptr;
551 tcg_out_op_t(s, INDEX_op_call);
dddbb2e1 552 tcg_out_ri(s, 1, (uintptr_t)arg);
a3abb292 553 old_code_ptr[1] = s->code_ptr - old_code_ptr;
dddbb2e1
RH
554}
555
7316329a
SW
556static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
557 const int *const_args)
558{
559 uint8_t *old_code_ptr = s->code_ptr;
560
561 tcg_out_op_t(s, opc);
562
563 switch (opc) {
564 case INDEX_op_exit_tb:
565 tcg_out64(s, args[0]);
566 break;
567 case INDEX_op_goto_tb:
f309101c 568 if (s->tb_jmp_insn_offset) {
7316329a 569 /* Direct jump method. */
76442a93
SF
570 /* Align for atomic patching and thread safety */
571 s->code_ptr = QEMU_ALIGN_PTR_UP(s->code_ptr, 4);
f309101c 572 s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s);
7316329a
SW
573 tcg_out32(s, 0);
574 } else {
575 /* Indirect jump method. */
576 TODO();
577 }
9f754620 578 set_jmp_reset_offset(s, args[0]);
7316329a
SW
579 break;
580 case INDEX_op_br:
bec16311 581 tci_out_label(s, arg_label(args[0]));
7316329a 582 break;
7316329a
SW
583 case INDEX_op_setcond_i32:
584 tcg_out_r(s, args[0]);
585 tcg_out_r(s, args[1]);
586 tcg_out_ri32(s, const_args[2], args[2]);
587 tcg_out8(s, args[3]); /* condition */
588 break;
589#if TCG_TARGET_REG_BITS == 32
590 case INDEX_op_setcond2_i32:
591 /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
592 tcg_out_r(s, args[0]);
593 tcg_out_r(s, args[1]);
594 tcg_out_r(s, args[2]);
595 tcg_out_ri32(s, const_args[3], args[3]);
596 tcg_out_ri32(s, const_args[4], args[4]);
597 tcg_out8(s, args[5]); /* condition */
598 break;
599#elif TCG_TARGET_REG_BITS == 64
600 case INDEX_op_setcond_i64:
601 tcg_out_r(s, args[0]);
602 tcg_out_r(s, args[1]);
603 tcg_out_ri64(s, const_args[2], args[2]);
604 tcg_out8(s, args[3]); /* condition */
605 break;
606#endif
7316329a
SW
607 case INDEX_op_ld8u_i32:
608 case INDEX_op_ld8s_i32:
609 case INDEX_op_ld16u_i32:
610 case INDEX_op_ld16s_i32:
611 case INDEX_op_ld_i32:
612 case INDEX_op_st8_i32:
613 case INDEX_op_st16_i32:
614 case INDEX_op_st_i32:
615 case INDEX_op_ld8u_i64:
616 case INDEX_op_ld8s_i64:
617 case INDEX_op_ld16u_i64:
618 case INDEX_op_ld16s_i64:
619 case INDEX_op_ld32u_i64:
620 case INDEX_op_ld32s_i64:
621 case INDEX_op_ld_i64:
622 case INDEX_op_st8_i64:
623 case INDEX_op_st16_i64:
624 case INDEX_op_st32_i64:
625 case INDEX_op_st_i64:
626 tcg_out_r(s, args[0]);
627 tcg_out_r(s, args[1]);
eabb7b91 628 tcg_debug_assert(args[2] == (int32_t)args[2]);
7316329a
SW
629 tcg_out32(s, args[2]);
630 break;
631 case INDEX_op_add_i32:
632 case INDEX_op_sub_i32:
633 case INDEX_op_mul_i32:
634 case INDEX_op_and_i32:
635 case INDEX_op_andc_i32: /* Optional (TCG_TARGET_HAS_andc_i32). */
636 case INDEX_op_eqv_i32: /* Optional (TCG_TARGET_HAS_eqv_i32). */
637 case INDEX_op_nand_i32: /* Optional (TCG_TARGET_HAS_nand_i32). */
638 case INDEX_op_nor_i32: /* Optional (TCG_TARGET_HAS_nor_i32). */
639 case INDEX_op_or_i32:
640 case INDEX_op_orc_i32: /* Optional (TCG_TARGET_HAS_orc_i32). */
641 case INDEX_op_xor_i32:
642 case INDEX_op_shl_i32:
643 case INDEX_op_shr_i32:
644 case INDEX_op_sar_i32:
645 case INDEX_op_rotl_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
646 case INDEX_op_rotr_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
647 tcg_out_r(s, args[0]);
648 tcg_out_ri32(s, const_args[1], args[1]);
649 tcg_out_ri32(s, const_args[2], args[2]);
650 break;
e24dc9fe
SW
651 case INDEX_op_deposit_i32: /* Optional (TCG_TARGET_HAS_deposit_i32). */
652 tcg_out_r(s, args[0]);
653 tcg_out_r(s, args[1]);
654 tcg_out_r(s, args[2]);
eabb7b91 655 tcg_debug_assert(args[3] <= UINT8_MAX);
e24dc9fe 656 tcg_out8(s, args[3]);
eabb7b91 657 tcg_debug_assert(args[4] <= UINT8_MAX);
e24dc9fe
SW
658 tcg_out8(s, args[4]);
659 break;
7316329a
SW
660
661#if TCG_TARGET_REG_BITS == 64
7316329a
SW
662 case INDEX_op_add_i64:
663 case INDEX_op_sub_i64:
664 case INDEX_op_mul_i64:
665 case INDEX_op_and_i64:
666 case INDEX_op_andc_i64: /* Optional (TCG_TARGET_HAS_andc_i64). */
667 case INDEX_op_eqv_i64: /* Optional (TCG_TARGET_HAS_eqv_i64). */
668 case INDEX_op_nand_i64: /* Optional (TCG_TARGET_HAS_nand_i64). */
669 case INDEX_op_nor_i64: /* Optional (TCG_TARGET_HAS_nor_i64). */
670 case INDEX_op_or_i64:
671 case INDEX_op_orc_i64: /* Optional (TCG_TARGET_HAS_orc_i64). */
672 case INDEX_op_xor_i64:
673 case INDEX_op_shl_i64:
674 case INDEX_op_shr_i64:
675 case INDEX_op_sar_i64:
7316329a
SW
676 case INDEX_op_rotl_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
677 case INDEX_op_rotr_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
678 tcg_out_r(s, args[0]);
679 tcg_out_ri64(s, const_args[1], args[1]);
680 tcg_out_ri64(s, const_args[2], args[2]);
681 break;
e24dc9fe
SW
682 case INDEX_op_deposit_i64: /* Optional (TCG_TARGET_HAS_deposit_i64). */
683 tcg_out_r(s, args[0]);
684 tcg_out_r(s, args[1]);
685 tcg_out_r(s, args[2]);
eabb7b91 686 tcg_debug_assert(args[3] <= UINT8_MAX);
e24dc9fe 687 tcg_out8(s, args[3]);
eabb7b91 688 tcg_debug_assert(args[4] <= UINT8_MAX);
e24dc9fe
SW
689 tcg_out8(s, args[4]);
690 break;
7316329a
SW
691 case INDEX_op_div_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
692 case INDEX_op_divu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
693 case INDEX_op_rem_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
694 case INDEX_op_remu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
695 TODO();
696 break;
697 case INDEX_op_div2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
698 case INDEX_op_divu2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
699 TODO();
700 break;
701 case INDEX_op_brcond_i64:
702 tcg_out_r(s, args[0]);
703 tcg_out_ri64(s, const_args[1], args[1]);
704 tcg_out8(s, args[2]); /* condition */
bec16311 705 tci_out_label(s, arg_label(args[3]));
7316329a
SW
706 break;
707 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
708 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
709 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
710 case INDEX_op_not_i64: /* Optional (TCG_TARGET_HAS_not_i64). */
711 case INDEX_op_neg_i64: /* Optional (TCG_TARGET_HAS_neg_i64). */
712 case INDEX_op_ext8s_i64: /* Optional (TCG_TARGET_HAS_ext8s_i64). */
713 case INDEX_op_ext8u_i64: /* Optional (TCG_TARGET_HAS_ext8u_i64). */
714 case INDEX_op_ext16s_i64: /* Optional (TCG_TARGET_HAS_ext16s_i64). */
715 case INDEX_op_ext16u_i64: /* Optional (TCG_TARGET_HAS_ext16u_i64). */
716 case INDEX_op_ext32s_i64: /* Optional (TCG_TARGET_HAS_ext32s_i64). */
717 case INDEX_op_ext32u_i64: /* Optional (TCG_TARGET_HAS_ext32u_i64). */
4f2331e5
AJ
718 case INDEX_op_ext_i32_i64:
719 case INDEX_op_extu_i32_i64:
7316329a
SW
720#endif /* TCG_TARGET_REG_BITS == 64 */
721 case INDEX_op_neg_i32: /* Optional (TCG_TARGET_HAS_neg_i32). */
722 case INDEX_op_not_i32: /* Optional (TCG_TARGET_HAS_not_i32). */
723 case INDEX_op_ext8s_i32: /* Optional (TCG_TARGET_HAS_ext8s_i32). */
724 case INDEX_op_ext16s_i32: /* Optional (TCG_TARGET_HAS_ext16s_i32). */
725 case INDEX_op_ext8u_i32: /* Optional (TCG_TARGET_HAS_ext8u_i32). */
726 case INDEX_op_ext16u_i32: /* Optional (TCG_TARGET_HAS_ext16u_i32). */
727 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
728 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
729 tcg_out_r(s, args[0]);
730 tcg_out_r(s, args[1]);
731 break;
732 case INDEX_op_div_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
733 case INDEX_op_divu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
734 case INDEX_op_rem_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
735 case INDEX_op_remu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
736 tcg_out_r(s, args[0]);
737 tcg_out_ri32(s, const_args[1], args[1]);
738 tcg_out_ri32(s, const_args[2], args[2]);
739 break;
740 case INDEX_op_div2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
741 case INDEX_op_divu2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
742 TODO();
743 break;
744#if TCG_TARGET_REG_BITS == 32
745 case INDEX_op_add2_i32:
746 case INDEX_op_sub2_i32:
747 tcg_out_r(s, args[0]);
748 tcg_out_r(s, args[1]);
749 tcg_out_r(s, args[2]);
750 tcg_out_r(s, args[3]);
751 tcg_out_r(s, args[4]);
752 tcg_out_r(s, args[5]);
753 break;
754 case INDEX_op_brcond2_i32:
755 tcg_out_r(s, args[0]);
756 tcg_out_r(s, args[1]);
757 tcg_out_ri32(s, const_args[2], args[2]);
758 tcg_out_ri32(s, const_args[3], args[3]);
759 tcg_out8(s, args[4]); /* condition */
bec16311 760 tci_out_label(s, arg_label(args[5]));
7316329a
SW
761 break;
762 case INDEX_op_mulu2_i32:
763 tcg_out_r(s, args[0]);
764 tcg_out_r(s, args[1]);
765 tcg_out_r(s, args[2]);
766 tcg_out_r(s, args[3]);
767 break;
768#endif
769 case INDEX_op_brcond_i32:
770 tcg_out_r(s, args[0]);
771 tcg_out_ri32(s, const_args[1], args[1]);
772 tcg_out8(s, args[2]); /* condition */
bec16311 773 tci_out_label(s, arg_label(args[3]));
7316329a 774 break;
76782fab 775 case INDEX_op_qemu_ld_i32:
7316329a
SW
776 tcg_out_r(s, *args++);
777 tcg_out_r(s, *args++);
76782fab
RH
778 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
779 tcg_out_r(s, *args++);
780 }
781 tcg_out_i(s, *args++);
7316329a 782 break;
76782fab 783 case INDEX_op_qemu_ld_i64:
7316329a 784 tcg_out_r(s, *args++);
76782fab
RH
785 if (TCG_TARGET_REG_BITS == 32) {
786 tcg_out_r(s, *args++);
787 }
7316329a 788 tcg_out_r(s, *args++);
76782fab
RH
789 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
790 tcg_out_r(s, *args++);
791 }
792 tcg_out_i(s, *args++);
7316329a 793 break;
76782fab 794 case INDEX_op_qemu_st_i32:
7316329a
SW
795 tcg_out_r(s, *args++);
796 tcg_out_r(s, *args++);
76782fab
RH
797 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
798 tcg_out_r(s, *args++);
799 }
800 tcg_out_i(s, *args++);
7316329a 801 break;
76782fab 802 case INDEX_op_qemu_st_i64:
7316329a 803 tcg_out_r(s, *args++);
76782fab
RH
804 if (TCG_TARGET_REG_BITS == 32) {
805 tcg_out_r(s, *args++);
806 }
7316329a 807 tcg_out_r(s, *args++);
76782fab
RH
808 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
809 tcg_out_r(s, *args++);
810 }
811 tcg_out_i(s, *args++);
7316329a 812 break;
a1e69e2f
PK
813 case INDEX_op_mb:
814 break;
96d0ee7f
RH
815 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
816 case INDEX_op_mov_i64:
817 case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
818 case INDEX_op_movi_i64:
819 case INDEX_op_call: /* Always emitted via tcg_out_call. */
7316329a 820 default:
7316329a
SW
821 tcg_abort();
822 }
823 old_code_ptr[1] = s->code_ptr - old_code_ptr;
824}
825
2a534aff 826static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
a05b5b9b 827 intptr_t arg2)
7316329a
SW
828{
829 uint8_t *old_code_ptr = s->code_ptr;
830 if (type == TCG_TYPE_I32) {
831 tcg_out_op_t(s, INDEX_op_st_i32);
832 tcg_out_r(s, arg);
833 tcg_out_r(s, arg1);
834 tcg_out32(s, arg2);
835 } else {
eabb7b91 836 tcg_debug_assert(type == TCG_TYPE_I64);
7316329a
SW
837#if TCG_TARGET_REG_BITS == 64
838 tcg_out_op_t(s, INDEX_op_st_i64);
839 tcg_out_r(s, arg);
840 tcg_out_r(s, arg1);
841 tcg_out32(s, arg2);
842#else
843 TODO();
844#endif
845 }
846 old_code_ptr[1] = s->code_ptr - old_code_ptr;
847}
848
59d7c14e
RH
849static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
850 TCGReg base, intptr_t ofs)
851{
852 return false;
853}
854
7316329a 855/* Test if a constant matches the constraint. */
f6c6afc1 856static int tcg_target_const_match(tcg_target_long val, TCGType type,
7316329a
SW
857 const TCGArgConstraint *arg_ct)
858{
859 /* No need to return 0 or 1, 0 or != 0 is good enough. */
860 return arg_ct->ct & TCG_CT_CONST;
861}
862
7316329a
SW
863static void tcg_target_init(TCGContext *s)
864{
865#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
866 const char *envval = getenv("DEBUG_TCG");
867 if (envval) {
24537a01 868 qemu_set_log(strtol(envval, NULL, 0));
7316329a
SW
869 }
870#endif
871
872 /* The current code uses uint8_t for tcg operations. */
eabb7b91 873 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
7316329a
SW
874
875 /* Registers available for 32 bit operations. */
f46934df 876 tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
7316329a 877 /* Registers available for 64 bit operations. */
f46934df 878 tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
7316329a 879 /* TODO: Which registers should be set here? */
f46934df 880 tcg_target_call_clobber_regs = BIT(TCG_TARGET_NB_REGS) - 1;
ee79c356 881
ccb1bb66 882 s->reserved_regs = 0;
7316329a 883 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
ee79c356
RH
884
885 /* We use negative offsets from "sp" so that we can distinguish
886 stores that might pretend to be call arguments. */
887 tcg_set_frame(s, TCG_REG_CALL_STACK,
888 -CPU_TEMP_BUF_NLONGS * sizeof(long),
7316329a
SW
889 CPU_TEMP_BUF_NLONGS * sizeof(long));
890}
891
892/* Generate global QEMU prologue and epilogue code. */
4699ca6d 893static inline void tcg_target_qemu_prologue(TCGContext *s)
7316329a 894{
7316329a 895}