]> git.proxmox.com Git - qemu.git/blame - tcg/tci/tcg-target.c
tcg: Change relocation offsets to intptr_t
[qemu.git] / tcg / tci / tcg-target.c
CommitLineData
7316329a
SW
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* TODO list:
26 * - See TODO comments in code.
27 */
28
29/* Marker for missing code. */
30#define TODO() \
31 do { \
32 fprintf(stderr, "TODO %s:%u: %s()\n", \
33 __FILE__, __LINE__, __func__); \
34 tcg_abort(); \
35 } while (0)
36
7316329a
SW
37/* Bitfield n...m (in 32 bit value). */
38#define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
39
7316329a
SW
40/* Macros used in tcg_target_op_defs. */
41#define R "r"
42#define RI "ri"
43#if TCG_TARGET_REG_BITS == 32
44# define R64 "r", "r"
45#else
46# define R64 "r"
47#endif
48#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
49# define L "L", "L"
50# define S "S", "S"
51#else
52# define L "L"
53# define S "S"
54#endif
55
56/* TODO: documentation. */
57static const TCGTargetOpDef tcg_target_op_defs[] = {
58 { INDEX_op_exit_tb, { NULL } },
59 { INDEX_op_goto_tb, { NULL } },
60 { INDEX_op_call, { RI } },
7316329a
SW
61 { INDEX_op_br, { NULL } },
62
63 { INDEX_op_mov_i32, { R, R } },
64 { INDEX_op_movi_i32, { R } },
65
66 { INDEX_op_ld8u_i32, { R, R } },
67 { INDEX_op_ld8s_i32, { R, R } },
68 { INDEX_op_ld16u_i32, { R, R } },
69 { INDEX_op_ld16s_i32, { R, R } },
70 { INDEX_op_ld_i32, { R, R } },
71 { INDEX_op_st8_i32, { R, R } },
72 { INDEX_op_st16_i32, { R, R } },
73 { INDEX_op_st_i32, { R, R } },
74
75 { INDEX_op_add_i32, { R, RI, RI } },
76 { INDEX_op_sub_i32, { R, RI, RI } },
77 { INDEX_op_mul_i32, { R, RI, RI } },
78#if TCG_TARGET_HAS_div_i32
79 { INDEX_op_div_i32, { R, R, R } },
80 { INDEX_op_divu_i32, { R, R, R } },
81 { INDEX_op_rem_i32, { R, R, R } },
82 { INDEX_op_remu_i32, { R, R, R } },
83#elif TCG_TARGET_HAS_div2_i32
84 { INDEX_op_div2_i32, { R, R, "0", "1", R } },
85 { INDEX_op_divu2_i32, { R, R, "0", "1", R } },
86#endif
87 /* TODO: Does R, RI, RI result in faster code than R, R, RI?
88 If both operands are constants, we can optimize. */
89 { INDEX_op_and_i32, { R, RI, RI } },
90#if TCG_TARGET_HAS_andc_i32
91 { INDEX_op_andc_i32, { R, RI, RI } },
92#endif
93#if TCG_TARGET_HAS_eqv_i32
94 { INDEX_op_eqv_i32, { R, RI, RI } },
95#endif
96#if TCG_TARGET_HAS_nand_i32
97 { INDEX_op_nand_i32, { R, RI, RI } },
98#endif
99#if TCG_TARGET_HAS_nor_i32
100 { INDEX_op_nor_i32, { R, RI, RI } },
101#endif
102 { INDEX_op_or_i32, { R, RI, RI } },
103#if TCG_TARGET_HAS_orc_i32
104 { INDEX_op_orc_i32, { R, RI, RI } },
105#endif
106 { INDEX_op_xor_i32, { R, RI, RI } },
107 { INDEX_op_shl_i32, { R, RI, RI } },
108 { INDEX_op_shr_i32, { R, RI, RI } },
109 { INDEX_op_sar_i32, { R, RI, RI } },
110#if TCG_TARGET_HAS_rot_i32
111 { INDEX_op_rotl_i32, { R, RI, RI } },
112 { INDEX_op_rotr_i32, { R, RI, RI } },
113#endif
e24dc9fe
SW
114#if TCG_TARGET_HAS_deposit_i32
115 { INDEX_op_deposit_i32, { R, "0", R } },
116#endif
7316329a
SW
117
118 { INDEX_op_brcond_i32, { R, RI } },
119
120 { INDEX_op_setcond_i32, { R, R, RI } },
121#if TCG_TARGET_REG_BITS == 64
122 { INDEX_op_setcond_i64, { R, R, RI } },
123#endif /* TCG_TARGET_REG_BITS == 64 */
124
125#if TCG_TARGET_REG_BITS == 32
126 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
127 { INDEX_op_add2_i32, { R, R, R, R, R, R } },
128 { INDEX_op_sub2_i32, { R, R, R, R, R, R } },
129 { INDEX_op_brcond2_i32, { R, R, RI, RI } },
130 { INDEX_op_mulu2_i32, { R, R, R, R } },
131 { INDEX_op_setcond2_i32, { R, R, R, RI, RI } },
132#endif
133
134#if TCG_TARGET_HAS_not_i32
135 { INDEX_op_not_i32, { R, R } },
136#endif
137#if TCG_TARGET_HAS_neg_i32
138 { INDEX_op_neg_i32, { R, R } },
139#endif
140
141#if TCG_TARGET_REG_BITS == 64
142 { INDEX_op_mov_i64, { R, R } },
143 { INDEX_op_movi_i64, { R } },
144
145 { INDEX_op_ld8u_i64, { R, R } },
146 { INDEX_op_ld8s_i64, { R, R } },
147 { INDEX_op_ld16u_i64, { R, R } },
148 { INDEX_op_ld16s_i64, { R, R } },
149 { INDEX_op_ld32u_i64, { R, R } },
150 { INDEX_op_ld32s_i64, { R, R } },
151 { INDEX_op_ld_i64, { R, R } },
152
153 { INDEX_op_st8_i64, { R, R } },
154 { INDEX_op_st16_i64, { R, R } },
155 { INDEX_op_st32_i64, { R, R } },
156 { INDEX_op_st_i64, { R, R } },
157
158 { INDEX_op_add_i64, { R, RI, RI } },
159 { INDEX_op_sub_i64, { R, RI, RI } },
160 { INDEX_op_mul_i64, { R, RI, RI } },
161#if TCG_TARGET_HAS_div_i64
162 { INDEX_op_div_i64, { R, R, R } },
163 { INDEX_op_divu_i64, { R, R, R } },
164 { INDEX_op_rem_i64, { R, R, R } },
165 { INDEX_op_remu_i64, { R, R, R } },
166#elif TCG_TARGET_HAS_div2_i64
167 { INDEX_op_div2_i64, { R, R, "0", "1", R } },
168 { INDEX_op_divu2_i64, { R, R, "0", "1", R } },
169#endif
170 { INDEX_op_and_i64, { R, RI, RI } },
171#if TCG_TARGET_HAS_andc_i64
172 { INDEX_op_andc_i64, { R, RI, RI } },
173#endif
174#if TCG_TARGET_HAS_eqv_i64
175 { INDEX_op_eqv_i64, { R, RI, RI } },
176#endif
177#if TCG_TARGET_HAS_nand_i64
178 { INDEX_op_nand_i64, { R, RI, RI } },
179#endif
180#if TCG_TARGET_HAS_nor_i64
181 { INDEX_op_nor_i64, { R, RI, RI } },
182#endif
183 { INDEX_op_or_i64, { R, RI, RI } },
184#if TCG_TARGET_HAS_orc_i64
185 { INDEX_op_orc_i64, { R, RI, RI } },
186#endif
187 { INDEX_op_xor_i64, { R, RI, RI } },
188 { INDEX_op_shl_i64, { R, RI, RI } },
189 { INDEX_op_shr_i64, { R, RI, RI } },
190 { INDEX_op_sar_i64, { R, RI, RI } },
191#if TCG_TARGET_HAS_rot_i64
192 { INDEX_op_rotl_i64, { R, RI, RI } },
193 { INDEX_op_rotr_i64, { R, RI, RI } },
e24dc9fe
SW
194#endif
195#if TCG_TARGET_HAS_deposit_i64
196 { INDEX_op_deposit_i64, { R, "0", R } },
7316329a
SW
197#endif
198 { INDEX_op_brcond_i64, { R, RI } },
199
200#if TCG_TARGET_HAS_ext8s_i64
201 { INDEX_op_ext8s_i64, { R, R } },
202#endif
203#if TCG_TARGET_HAS_ext16s_i64
204 { INDEX_op_ext16s_i64, { R, R } },
205#endif
206#if TCG_TARGET_HAS_ext32s_i64
207 { INDEX_op_ext32s_i64, { R, R } },
208#endif
209#if TCG_TARGET_HAS_ext8u_i64
210 { INDEX_op_ext8u_i64, { R, R } },
211#endif
212#if TCG_TARGET_HAS_ext16u_i64
213 { INDEX_op_ext16u_i64, { R, R } },
214#endif
215#if TCG_TARGET_HAS_ext32u_i64
216 { INDEX_op_ext32u_i64, { R, R } },
217#endif
218#if TCG_TARGET_HAS_bswap16_i64
219 { INDEX_op_bswap16_i64, { R, R } },
220#endif
221#if TCG_TARGET_HAS_bswap32_i64
222 { INDEX_op_bswap32_i64, { R, R } },
223#endif
224#if TCG_TARGET_HAS_bswap64_i64
225 { INDEX_op_bswap64_i64, { R, R } },
226#endif
227#if TCG_TARGET_HAS_not_i64
228 { INDEX_op_not_i64, { R, R } },
229#endif
230#if TCG_TARGET_HAS_neg_i64
231 { INDEX_op_neg_i64, { R, R } },
232#endif
233#endif /* TCG_TARGET_REG_BITS == 64 */
234
235 { INDEX_op_qemu_ld8u, { R, L } },
236 { INDEX_op_qemu_ld8s, { R, L } },
237 { INDEX_op_qemu_ld16u, { R, L } },
238 { INDEX_op_qemu_ld16s, { R, L } },
239 { INDEX_op_qemu_ld32, { R, L } },
240#if TCG_TARGET_REG_BITS == 64
241 { INDEX_op_qemu_ld32u, { R, L } },
242 { INDEX_op_qemu_ld32s, { R, L } },
243#endif
244 { INDEX_op_qemu_ld64, { R64, L } },
245
246 { INDEX_op_qemu_st8, { R, S } },
247 { INDEX_op_qemu_st16, { R, S } },
248 { INDEX_op_qemu_st32, { R, S } },
249 { INDEX_op_qemu_st64, { R64, S } },
250
251#if TCG_TARGET_HAS_ext8s_i32
252 { INDEX_op_ext8s_i32, { R, R } },
253#endif
254#if TCG_TARGET_HAS_ext16s_i32
255 { INDEX_op_ext16s_i32, { R, R } },
256#endif
257#if TCG_TARGET_HAS_ext8u_i32
258 { INDEX_op_ext8u_i32, { R, R } },
259#endif
260#if TCG_TARGET_HAS_ext16u_i32
261 { INDEX_op_ext16u_i32, { R, R } },
262#endif
263
264#if TCG_TARGET_HAS_bswap16_i32
265 { INDEX_op_bswap16_i32, { R, R } },
266#endif
267#if TCG_TARGET_HAS_bswap32_i32
268 { INDEX_op_bswap32_i32, { R, R } },
269#endif
270
271 { -1 },
272};
273
274static const int tcg_target_reg_alloc_order[] = {
275 TCG_REG_R0,
276 TCG_REG_R1,
277 TCG_REG_R2,
278 TCG_REG_R3,
279#if 0 /* used for TCG_REG_CALL_STACK */
280 TCG_REG_R4,
281#endif
282 TCG_REG_R5,
283 TCG_REG_R6,
284 TCG_REG_R7,
285#if TCG_TARGET_NB_REGS >= 16
286 TCG_REG_R8,
287 TCG_REG_R9,
288 TCG_REG_R10,
289 TCG_REG_R11,
290 TCG_REG_R12,
291 TCG_REG_R13,
292 TCG_REG_R14,
293 TCG_REG_R15,
294#endif
295};
296
6673f47d 297#if MAX_OPC_PARAM_IARGS != 5
7316329a
SW
298# error Fix needed, number of supported input arguments changed!
299#endif
300
301static const int tcg_target_call_iarg_regs[] = {
302 TCG_REG_R0,
303 TCG_REG_R1,
304 TCG_REG_R2,
305 TCG_REG_R3,
7316329a
SW
306#if 0 /* used for TCG_REG_CALL_STACK */
307 TCG_REG_R4,
308#endif
309 TCG_REG_R5,
6673f47d
SW
310#if TCG_TARGET_REG_BITS == 32
311 /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
7316329a
SW
312 TCG_REG_R6,
313 TCG_REG_R7,
314#if TCG_TARGET_NB_REGS >= 16
315 TCG_REG_R8,
6673f47d
SW
316 TCG_REG_R9,
317 TCG_REG_R10,
7316329a
SW
318#else
319# error Too few input registers available
320#endif
321#endif
322};
323
324static const int tcg_target_call_oarg_regs[] = {
325 TCG_REG_R0,
326#if TCG_TARGET_REG_BITS == 32
327 TCG_REG_R1
328#endif
329};
330
331#ifndef NDEBUG
332static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
333 "r00",
334 "r01",
335 "r02",
336 "r03",
337 "r04",
338 "r05",
339 "r06",
340 "r07",
341#if TCG_TARGET_NB_REGS >= 16
342 "r08",
343 "r09",
344 "r10",
345 "r11",
346 "r12",
347 "r13",
348 "r14",
349 "r15",
350#if TCG_TARGET_NB_REGS >= 32
351 "r16",
352 "r17",
353 "r18",
354 "r19",
355 "r20",
356 "r21",
357 "r22",
358 "r23",
359 "r24",
360 "r25",
361 "r26",
362 "r27",
363 "r28",
364 "r29",
365 "r30",
366 "r31"
367#endif
368#endif
369};
370#endif
371
7316329a 372static void patch_reloc(uint8_t *code_ptr, int type,
2ba7fae2 373 intptr_t value, intptr_t addend)
7316329a
SW
374{
375 /* tcg_out_reloc always uses the same type, addend. */
376 assert(type == sizeof(tcg_target_long));
377 assert(addend == 0);
378 assert(value != 0);
379 *(tcg_target_long *)code_ptr = value;
380}
381
382/* Parse target specific constraints. */
383static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
384{
385 const char *ct_str = *pct_str;
386 switch (ct_str[0]) {
387 case 'r':
388 case 'L': /* qemu_ld constraint */
389 case 'S': /* qemu_st constraint */
390 ct->ct |= TCG_CT_REG;
391 tcg_regset_set32(ct->u.regs, 0, BIT(TCG_TARGET_NB_REGS) - 1);
392 break;
393 default:
394 return -1;
395 }
396 ct_str++;
397 *pct_str = ct_str;
398 return 0;
399}
400
401#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
402/* Show current bytecode. Used by tcg interpreter. */
403void tci_disas(uint8_t opc)
404{
405 const TCGOpDef *def = &tcg_op_defs[opc];
406 fprintf(stderr, "TCG %s %u, %u, %u\n",
407 def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
408}
409#endif
410
411/* Write value (native size). */
412static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
413{
414 *(tcg_target_ulong *)s->code_ptr = v;
415 s->code_ptr += sizeof(tcg_target_ulong);
416}
417
418/* Write 64 bit value. */
419static void tcg_out64(TCGContext *s, uint64_t v)
420{
421 *(uint64_t *)s->code_ptr = v;
422 s->code_ptr += sizeof(v);
423}
424
425/* Write opcode. */
426static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
427{
428 tcg_out8(s, op);
429 tcg_out8(s, 0);
430}
431
432/* Write register. */
433static void tcg_out_r(TCGContext *s, TCGArg t0)
434{
435 assert(t0 < TCG_TARGET_NB_REGS);
436 tcg_out8(s, t0);
437}
438
439/* Write register or constant (native size). */
440static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
441{
442 if (const_arg) {
443 assert(const_arg == 1);
444 tcg_out8(s, TCG_CONST);
445 tcg_out_i(s, arg);
446 } else {
447 tcg_out_r(s, arg);
448 }
449}
450
451/* Write register or constant (32 bit). */
452static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
453{
454 if (const_arg) {
455 assert(const_arg == 1);
456 tcg_out8(s, TCG_CONST);
457 tcg_out32(s, arg);
458 } else {
459 tcg_out_r(s, arg);
460 }
461}
462
463#if TCG_TARGET_REG_BITS == 64
464/* Write register or constant (64 bit). */
465static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
466{
467 if (const_arg) {
468 assert(const_arg == 1);
469 tcg_out8(s, TCG_CONST);
470 tcg_out64(s, arg);
471 } else {
472 tcg_out_r(s, arg);
473 }
474}
475#endif
476
477/* Write label. */
478static void tci_out_label(TCGContext *s, TCGArg arg)
479{
480 TCGLabel *label = &s->labels[arg];
481 if (label->has_value) {
482 tcg_out_i(s, label->u.value);
483 assert(label->u.value);
484 } else {
485 tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), arg, 0);
3c01ae0e 486 s->code_ptr += sizeof(tcg_target_ulong);
7316329a
SW
487 }
488}
489
2a534aff 490static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
7316329a
SW
491 tcg_target_long arg2)
492{
493 uint8_t *old_code_ptr = s->code_ptr;
494 if (type == TCG_TYPE_I32) {
495 tcg_out_op_t(s, INDEX_op_ld_i32);
496 tcg_out_r(s, ret);
497 tcg_out_r(s, arg1);
498 tcg_out32(s, arg2);
499 } else {
500 assert(type == TCG_TYPE_I64);
501#if TCG_TARGET_REG_BITS == 64
502 tcg_out_op_t(s, INDEX_op_ld_i64);
503 tcg_out_r(s, ret);
504 tcg_out_r(s, arg1);
03fc0548 505 assert(arg2 == (int32_t)arg2);
7316329a
SW
506 tcg_out32(s, arg2);
507#else
508 TODO();
509#endif
510 }
511 old_code_ptr[1] = s->code_ptr - old_code_ptr;
512}
513
2a534aff 514static void tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
7316329a
SW
515{
516 uint8_t *old_code_ptr = s->code_ptr;
517 assert(ret != arg);
518#if TCG_TARGET_REG_BITS == 32
519 tcg_out_op_t(s, INDEX_op_mov_i32);
520#else
521 tcg_out_op_t(s, INDEX_op_mov_i64);
522#endif
523 tcg_out_r(s, ret);
524 tcg_out_r(s, arg);
525 old_code_ptr[1] = s->code_ptr - old_code_ptr;
526}
527
528static void tcg_out_movi(TCGContext *s, TCGType type,
2a534aff 529 TCGReg t0, tcg_target_long arg)
7316329a
SW
530{
531 uint8_t *old_code_ptr = s->code_ptr;
532 uint32_t arg32 = arg;
533 if (type == TCG_TYPE_I32 || arg == arg32) {
534 tcg_out_op_t(s, INDEX_op_movi_i32);
535 tcg_out_r(s, t0);
536 tcg_out32(s, arg32);
537 } else {
538 assert(type == TCG_TYPE_I64);
539#if TCG_TARGET_REG_BITS == 64
540 tcg_out_op_t(s, INDEX_op_movi_i64);
541 tcg_out_r(s, t0);
542 tcg_out64(s, arg);
543#else
544 TODO();
545#endif
546 }
547 old_code_ptr[1] = s->code_ptr - old_code_ptr;
548}
549
550static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
551 const int *const_args)
552{
553 uint8_t *old_code_ptr = s->code_ptr;
554
555 tcg_out_op_t(s, opc);
556
557 switch (opc) {
558 case INDEX_op_exit_tb:
559 tcg_out64(s, args[0]);
560 break;
561 case INDEX_op_goto_tb:
562 if (s->tb_jmp_offset) {
563 /* Direct jump method. */
564 assert(args[0] < ARRAY_SIZE(s->tb_jmp_offset));
565 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
566 tcg_out32(s, 0);
567 } else {
568 /* Indirect jump method. */
569 TODO();
570 }
571 assert(args[0] < ARRAY_SIZE(s->tb_next_offset));
572 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
573 break;
574 case INDEX_op_br:
575 tci_out_label(s, args[0]);
576 break;
577 case INDEX_op_call:
578 tcg_out_ri(s, const_args[0], args[0]);
579 break;
7316329a
SW
580 case INDEX_op_setcond_i32:
581 tcg_out_r(s, args[0]);
582 tcg_out_r(s, args[1]);
583 tcg_out_ri32(s, const_args[2], args[2]);
584 tcg_out8(s, args[3]); /* condition */
585 break;
586#if TCG_TARGET_REG_BITS == 32
587 case INDEX_op_setcond2_i32:
588 /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
589 tcg_out_r(s, args[0]);
590 tcg_out_r(s, args[1]);
591 tcg_out_r(s, args[2]);
592 tcg_out_ri32(s, const_args[3], args[3]);
593 tcg_out_ri32(s, const_args[4], args[4]);
594 tcg_out8(s, args[5]); /* condition */
595 break;
596#elif TCG_TARGET_REG_BITS == 64
597 case INDEX_op_setcond_i64:
598 tcg_out_r(s, args[0]);
599 tcg_out_r(s, args[1]);
600 tcg_out_ri64(s, const_args[2], args[2]);
601 tcg_out8(s, args[3]); /* condition */
602 break;
603#endif
604 case INDEX_op_movi_i32:
605 TODO(); /* Handled by tcg_out_movi? */
606 break;
607 case INDEX_op_ld8u_i32:
608 case INDEX_op_ld8s_i32:
609 case INDEX_op_ld16u_i32:
610 case INDEX_op_ld16s_i32:
611 case INDEX_op_ld_i32:
612 case INDEX_op_st8_i32:
613 case INDEX_op_st16_i32:
614 case INDEX_op_st_i32:
615 case INDEX_op_ld8u_i64:
616 case INDEX_op_ld8s_i64:
617 case INDEX_op_ld16u_i64:
618 case INDEX_op_ld16s_i64:
619 case INDEX_op_ld32u_i64:
620 case INDEX_op_ld32s_i64:
621 case INDEX_op_ld_i64:
622 case INDEX_op_st8_i64:
623 case INDEX_op_st16_i64:
624 case INDEX_op_st32_i64:
625 case INDEX_op_st_i64:
626 tcg_out_r(s, args[0]);
627 tcg_out_r(s, args[1]);
03fc0548 628 assert(args[2] == (int32_t)args[2]);
7316329a
SW
629 tcg_out32(s, args[2]);
630 break;
631 case INDEX_op_add_i32:
632 case INDEX_op_sub_i32:
633 case INDEX_op_mul_i32:
634 case INDEX_op_and_i32:
635 case INDEX_op_andc_i32: /* Optional (TCG_TARGET_HAS_andc_i32). */
636 case INDEX_op_eqv_i32: /* Optional (TCG_TARGET_HAS_eqv_i32). */
637 case INDEX_op_nand_i32: /* Optional (TCG_TARGET_HAS_nand_i32). */
638 case INDEX_op_nor_i32: /* Optional (TCG_TARGET_HAS_nor_i32). */
639 case INDEX_op_or_i32:
640 case INDEX_op_orc_i32: /* Optional (TCG_TARGET_HAS_orc_i32). */
641 case INDEX_op_xor_i32:
642 case INDEX_op_shl_i32:
643 case INDEX_op_shr_i32:
644 case INDEX_op_sar_i32:
645 case INDEX_op_rotl_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
646 case INDEX_op_rotr_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
647 tcg_out_r(s, args[0]);
648 tcg_out_ri32(s, const_args[1], args[1]);
649 tcg_out_ri32(s, const_args[2], args[2]);
650 break;
e24dc9fe
SW
651 case INDEX_op_deposit_i32: /* Optional (TCG_TARGET_HAS_deposit_i32). */
652 tcg_out_r(s, args[0]);
653 tcg_out_r(s, args[1]);
654 tcg_out_r(s, args[2]);
655 assert(args[3] <= UINT8_MAX);
656 tcg_out8(s, args[3]);
657 assert(args[4] <= UINT8_MAX);
658 tcg_out8(s, args[4]);
659 break;
7316329a
SW
660
661#if TCG_TARGET_REG_BITS == 64
662 case INDEX_op_mov_i64:
663 case INDEX_op_movi_i64:
664 TODO();
665 break;
666 case INDEX_op_add_i64:
667 case INDEX_op_sub_i64:
668 case INDEX_op_mul_i64:
669 case INDEX_op_and_i64:
670 case INDEX_op_andc_i64: /* Optional (TCG_TARGET_HAS_andc_i64). */
671 case INDEX_op_eqv_i64: /* Optional (TCG_TARGET_HAS_eqv_i64). */
672 case INDEX_op_nand_i64: /* Optional (TCG_TARGET_HAS_nand_i64). */
673 case INDEX_op_nor_i64: /* Optional (TCG_TARGET_HAS_nor_i64). */
674 case INDEX_op_or_i64:
675 case INDEX_op_orc_i64: /* Optional (TCG_TARGET_HAS_orc_i64). */
676 case INDEX_op_xor_i64:
677 case INDEX_op_shl_i64:
678 case INDEX_op_shr_i64:
679 case INDEX_op_sar_i64:
680 /* TODO: Implementation of rotl_i64, rotr_i64 missing in tci.c. */
681 case INDEX_op_rotl_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
682 case INDEX_op_rotr_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
683 tcg_out_r(s, args[0]);
684 tcg_out_ri64(s, const_args[1], args[1]);
685 tcg_out_ri64(s, const_args[2], args[2]);
686 break;
e24dc9fe
SW
687 case INDEX_op_deposit_i64: /* Optional (TCG_TARGET_HAS_deposit_i64). */
688 tcg_out_r(s, args[0]);
689 tcg_out_r(s, args[1]);
690 tcg_out_r(s, args[2]);
691 assert(args[3] <= UINT8_MAX);
692 tcg_out8(s, args[3]);
693 assert(args[4] <= UINT8_MAX);
694 tcg_out8(s, args[4]);
695 break;
7316329a
SW
696 case INDEX_op_div_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
697 case INDEX_op_divu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
698 case INDEX_op_rem_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
699 case INDEX_op_remu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
700 TODO();
701 break;
702 case INDEX_op_div2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
703 case INDEX_op_divu2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
704 TODO();
705 break;
706 case INDEX_op_brcond_i64:
707 tcg_out_r(s, args[0]);
708 tcg_out_ri64(s, const_args[1], args[1]);
709 tcg_out8(s, args[2]); /* condition */
710 tci_out_label(s, args[3]);
711 break;
712 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
713 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
714 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
715 case INDEX_op_not_i64: /* Optional (TCG_TARGET_HAS_not_i64). */
716 case INDEX_op_neg_i64: /* Optional (TCG_TARGET_HAS_neg_i64). */
717 case INDEX_op_ext8s_i64: /* Optional (TCG_TARGET_HAS_ext8s_i64). */
718 case INDEX_op_ext8u_i64: /* Optional (TCG_TARGET_HAS_ext8u_i64). */
719 case INDEX_op_ext16s_i64: /* Optional (TCG_TARGET_HAS_ext16s_i64). */
720 case INDEX_op_ext16u_i64: /* Optional (TCG_TARGET_HAS_ext16u_i64). */
721 case INDEX_op_ext32s_i64: /* Optional (TCG_TARGET_HAS_ext32s_i64). */
722 case INDEX_op_ext32u_i64: /* Optional (TCG_TARGET_HAS_ext32u_i64). */
723#endif /* TCG_TARGET_REG_BITS == 64 */
724 case INDEX_op_neg_i32: /* Optional (TCG_TARGET_HAS_neg_i32). */
725 case INDEX_op_not_i32: /* Optional (TCG_TARGET_HAS_not_i32). */
726 case INDEX_op_ext8s_i32: /* Optional (TCG_TARGET_HAS_ext8s_i32). */
727 case INDEX_op_ext16s_i32: /* Optional (TCG_TARGET_HAS_ext16s_i32). */
728 case INDEX_op_ext8u_i32: /* Optional (TCG_TARGET_HAS_ext8u_i32). */
729 case INDEX_op_ext16u_i32: /* Optional (TCG_TARGET_HAS_ext16u_i32). */
730 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
731 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
732 tcg_out_r(s, args[0]);
733 tcg_out_r(s, args[1]);
734 break;
735 case INDEX_op_div_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
736 case INDEX_op_divu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
737 case INDEX_op_rem_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
738 case INDEX_op_remu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
739 tcg_out_r(s, args[0]);
740 tcg_out_ri32(s, const_args[1], args[1]);
741 tcg_out_ri32(s, const_args[2], args[2]);
742 break;
743 case INDEX_op_div2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
744 case INDEX_op_divu2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
745 TODO();
746 break;
747#if TCG_TARGET_REG_BITS == 32
748 case INDEX_op_add2_i32:
749 case INDEX_op_sub2_i32:
750 tcg_out_r(s, args[0]);
751 tcg_out_r(s, args[1]);
752 tcg_out_r(s, args[2]);
753 tcg_out_r(s, args[3]);
754 tcg_out_r(s, args[4]);
755 tcg_out_r(s, args[5]);
756 break;
757 case INDEX_op_brcond2_i32:
758 tcg_out_r(s, args[0]);
759 tcg_out_r(s, args[1]);
760 tcg_out_ri32(s, const_args[2], args[2]);
761 tcg_out_ri32(s, const_args[3], args[3]);
762 tcg_out8(s, args[4]); /* condition */
763 tci_out_label(s, args[5]);
764 break;
765 case INDEX_op_mulu2_i32:
766 tcg_out_r(s, args[0]);
767 tcg_out_r(s, args[1]);
768 tcg_out_r(s, args[2]);
769 tcg_out_r(s, args[3]);
770 break;
771#endif
772 case INDEX_op_brcond_i32:
773 tcg_out_r(s, args[0]);
774 tcg_out_ri32(s, const_args[1], args[1]);
775 tcg_out8(s, args[2]); /* condition */
776 tci_out_label(s, args[3]);
777 break;
778 case INDEX_op_qemu_ld8u:
779 case INDEX_op_qemu_ld8s:
780 case INDEX_op_qemu_ld16u:
781 case INDEX_op_qemu_ld16s:
782 case INDEX_op_qemu_ld32:
783#if TCG_TARGET_REG_BITS == 64
784 case INDEX_op_qemu_ld32s:
785 case INDEX_op_qemu_ld32u:
786#endif
787 tcg_out_r(s, *args++);
788 tcg_out_r(s, *args++);
789#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
790 tcg_out_r(s, *args++);
791#endif
792#ifdef CONFIG_SOFTMMU
793 tcg_out_i(s, *args);
794#endif
795 break;
796 case INDEX_op_qemu_ld64:
797 tcg_out_r(s, *args++);
798#if TCG_TARGET_REG_BITS == 32
799 tcg_out_r(s, *args++);
800#endif
801 tcg_out_r(s, *args++);
802#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
803 tcg_out_r(s, *args++);
804#endif
805#ifdef CONFIG_SOFTMMU
806 tcg_out_i(s, *args);
807#endif
808 break;
809 case INDEX_op_qemu_st8:
810 case INDEX_op_qemu_st16:
811 case INDEX_op_qemu_st32:
812 tcg_out_r(s, *args++);
813 tcg_out_r(s, *args++);
814#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
815 tcg_out_r(s, *args++);
816#endif
817#ifdef CONFIG_SOFTMMU
818 tcg_out_i(s, *args);
819#endif
820 break;
821 case INDEX_op_qemu_st64:
822 tcg_out_r(s, *args++);
823#if TCG_TARGET_REG_BITS == 32
824 tcg_out_r(s, *args++);
825#endif
826 tcg_out_r(s, *args++);
827#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
828 tcg_out_r(s, *args++);
829#endif
830#ifdef CONFIG_SOFTMMU
831 tcg_out_i(s, *args);
832#endif
833 break;
834 case INDEX_op_end:
835 TODO();
836 break;
837 default:
838 fprintf(stderr, "Missing: %s\n", tcg_op_defs[opc].name);
839 tcg_abort();
840 }
841 old_code_ptr[1] = s->code_ptr - old_code_ptr;
842}
843
2a534aff 844static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
7316329a
SW
845 tcg_target_long arg2)
846{
847 uint8_t *old_code_ptr = s->code_ptr;
848 if (type == TCG_TYPE_I32) {
849 tcg_out_op_t(s, INDEX_op_st_i32);
850 tcg_out_r(s, arg);
851 tcg_out_r(s, arg1);
852 tcg_out32(s, arg2);
853 } else {
854 assert(type == TCG_TYPE_I64);
855#if TCG_TARGET_REG_BITS == 64
856 tcg_out_op_t(s, INDEX_op_st_i64);
857 tcg_out_r(s, arg);
858 tcg_out_r(s, arg1);
859 tcg_out32(s, arg2);
860#else
861 TODO();
862#endif
863 }
864 old_code_ptr[1] = s->code_ptr - old_code_ptr;
865}
866
867/* Test if a constant matches the constraint. */
868static int tcg_target_const_match(tcg_target_long val,
869 const TCGArgConstraint *arg_ct)
870{
871 /* No need to return 0 or 1, 0 or != 0 is good enough. */
872 return arg_ct->ct & TCG_CT_CONST;
873}
874
7316329a
SW
875static void tcg_target_init(TCGContext *s)
876{
877#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
878 const char *envval = getenv("DEBUG_TCG");
879 if (envval) {
24537a01 880 qemu_set_log(strtol(envval, NULL, 0));
7316329a
SW
881 }
882#endif
883
884 /* The current code uses uint8_t for tcg operations. */
885 assert(ARRAY_SIZE(tcg_op_defs) <= UINT8_MAX);
886
887 /* Registers available for 32 bit operations. */
888 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0,
889 BIT(TCG_TARGET_NB_REGS) - 1);
890 /* Registers available for 64 bit operations. */
891 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0,
892 BIT(TCG_TARGET_NB_REGS) - 1);
893 /* TODO: Which registers should be set here? */
894 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
895 BIT(TCG_TARGET_NB_REGS) - 1);
ee79c356 896
7316329a
SW
897 tcg_regset_clear(s->reserved_regs);
898 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
899 tcg_add_target_add_op_defs(tcg_target_op_defs);
ee79c356
RH
900
901 /* We use negative offsets from "sp" so that we can distinguish
902 stores that might pretend to be call arguments. */
903 tcg_set_frame(s, TCG_REG_CALL_STACK,
904 -CPU_TEMP_BUF_NLONGS * sizeof(long),
7316329a
SW
905 CPU_TEMP_BUF_NLONGS * sizeof(long));
906}
907
908/* Generate global QEMU prologue and epilogue code. */
4699ca6d 909static inline void tcg_target_qemu_prologue(TCGContext *s)
7316329a 910{
7316329a 911}