]> git.proxmox.com Git - mirror_qemu.git/blob - tcg/tci/tcg-target.c
tci: Make tcg temporaries local to tcg_qemu_tb_exec
[mirror_qemu.git] / tcg / tci / tcg-target.c
1 /*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25 /* TODO list:
26 * - See TODO comments in code.
27 */
28
29 /* Marker for missing code. */
30 #define TODO() \
31 do { \
32 fprintf(stderr, "TODO %s:%u: %s()\n", \
33 __FILE__, __LINE__, __func__); \
34 tcg_abort(); \
35 } while (0)
36
37 /* Single bit n. */
38 #define BIT(n) (1 << (n))
39
40 /* Bitfield n...m (in 32 bit value). */
41 #define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
42
43 /* Macros used in tcg_target_op_defs. */
44 #define R "r"
45 #define RI "ri"
46 #if TCG_TARGET_REG_BITS == 32
47 # define R64 "r", "r"
48 #else
49 # define R64 "r"
50 #endif
51 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
52 # define L "L", "L"
53 # define S "S", "S"
54 #else
55 # define L "L"
56 # define S "S"
57 #endif
58
59 /* TODO: documentation. */
60 static const TCGTargetOpDef tcg_target_op_defs[] = {
61 { INDEX_op_exit_tb, { NULL } },
62 { INDEX_op_goto_tb, { NULL } },
63 { INDEX_op_call, { RI } },
64 { INDEX_op_br, { NULL } },
65
66 { INDEX_op_mov_i32, { R, R } },
67 { INDEX_op_movi_i32, { R } },
68
69 { INDEX_op_ld8u_i32, { R, R } },
70 { INDEX_op_ld8s_i32, { R, R } },
71 { INDEX_op_ld16u_i32, { R, R } },
72 { INDEX_op_ld16s_i32, { R, R } },
73 { INDEX_op_ld_i32, { R, R } },
74 { INDEX_op_st8_i32, { R, R } },
75 { INDEX_op_st16_i32, { R, R } },
76 { INDEX_op_st_i32, { R, R } },
77
78 { INDEX_op_add_i32, { R, RI, RI } },
79 { INDEX_op_sub_i32, { R, RI, RI } },
80 { INDEX_op_mul_i32, { R, RI, RI } },
81 #if TCG_TARGET_HAS_div_i32
82 { INDEX_op_div_i32, { R, R, R } },
83 { INDEX_op_divu_i32, { R, R, R } },
84 { INDEX_op_rem_i32, { R, R, R } },
85 { INDEX_op_remu_i32, { R, R, R } },
86 #elif TCG_TARGET_HAS_div2_i32
87 { INDEX_op_div2_i32, { R, R, "0", "1", R } },
88 { INDEX_op_divu2_i32, { R, R, "0", "1", R } },
89 #endif
90 /* TODO: Does R, RI, RI result in faster code than R, R, RI?
91 If both operands are constants, we can optimize. */
92 { INDEX_op_and_i32, { R, RI, RI } },
93 #if TCG_TARGET_HAS_andc_i32
94 { INDEX_op_andc_i32, { R, RI, RI } },
95 #endif
96 #if TCG_TARGET_HAS_eqv_i32
97 { INDEX_op_eqv_i32, { R, RI, RI } },
98 #endif
99 #if TCG_TARGET_HAS_nand_i32
100 { INDEX_op_nand_i32, { R, RI, RI } },
101 #endif
102 #if TCG_TARGET_HAS_nor_i32
103 { INDEX_op_nor_i32, { R, RI, RI } },
104 #endif
105 { INDEX_op_or_i32, { R, RI, RI } },
106 #if TCG_TARGET_HAS_orc_i32
107 { INDEX_op_orc_i32, { R, RI, RI } },
108 #endif
109 { INDEX_op_xor_i32, { R, RI, RI } },
110 { INDEX_op_shl_i32, { R, RI, RI } },
111 { INDEX_op_shr_i32, { R, RI, RI } },
112 { INDEX_op_sar_i32, { R, RI, RI } },
113 #if TCG_TARGET_HAS_rot_i32
114 { INDEX_op_rotl_i32, { R, RI, RI } },
115 { INDEX_op_rotr_i32, { R, RI, RI } },
116 #endif
117 #if TCG_TARGET_HAS_deposit_i32
118 { INDEX_op_deposit_i32, { R, "0", R } },
119 #endif
120
121 { INDEX_op_brcond_i32, { R, RI } },
122
123 { INDEX_op_setcond_i32, { R, R, RI } },
124 #if TCG_TARGET_REG_BITS == 64
125 { INDEX_op_setcond_i64, { R, R, RI } },
126 #endif /* TCG_TARGET_REG_BITS == 64 */
127
128 #if TCG_TARGET_REG_BITS == 32
129 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
130 { INDEX_op_add2_i32, { R, R, R, R, R, R } },
131 { INDEX_op_sub2_i32, { R, R, R, R, R, R } },
132 { INDEX_op_brcond2_i32, { R, R, RI, RI } },
133 { INDEX_op_mulu2_i32, { R, R, R, R } },
134 { INDEX_op_setcond2_i32, { R, R, R, RI, RI } },
135 #endif
136
137 #if TCG_TARGET_HAS_not_i32
138 { INDEX_op_not_i32, { R, R } },
139 #endif
140 #if TCG_TARGET_HAS_neg_i32
141 { INDEX_op_neg_i32, { R, R } },
142 #endif
143
144 #if TCG_TARGET_REG_BITS == 64
145 { INDEX_op_mov_i64, { R, R } },
146 { INDEX_op_movi_i64, { R } },
147
148 { INDEX_op_ld8u_i64, { R, R } },
149 { INDEX_op_ld8s_i64, { R, R } },
150 { INDEX_op_ld16u_i64, { R, R } },
151 { INDEX_op_ld16s_i64, { R, R } },
152 { INDEX_op_ld32u_i64, { R, R } },
153 { INDEX_op_ld32s_i64, { R, R } },
154 { INDEX_op_ld_i64, { R, R } },
155
156 { INDEX_op_st8_i64, { R, R } },
157 { INDEX_op_st16_i64, { R, R } },
158 { INDEX_op_st32_i64, { R, R } },
159 { INDEX_op_st_i64, { R, R } },
160
161 { INDEX_op_add_i64, { R, RI, RI } },
162 { INDEX_op_sub_i64, { R, RI, RI } },
163 { INDEX_op_mul_i64, { R, RI, RI } },
164 #if TCG_TARGET_HAS_div_i64
165 { INDEX_op_div_i64, { R, R, R } },
166 { INDEX_op_divu_i64, { R, R, R } },
167 { INDEX_op_rem_i64, { R, R, R } },
168 { INDEX_op_remu_i64, { R, R, R } },
169 #elif TCG_TARGET_HAS_div2_i64
170 { INDEX_op_div2_i64, { R, R, "0", "1", R } },
171 { INDEX_op_divu2_i64, { R, R, "0", "1", R } },
172 #endif
173 { INDEX_op_and_i64, { R, RI, RI } },
174 #if TCG_TARGET_HAS_andc_i64
175 { INDEX_op_andc_i64, { R, RI, RI } },
176 #endif
177 #if TCG_TARGET_HAS_eqv_i64
178 { INDEX_op_eqv_i64, { R, RI, RI } },
179 #endif
180 #if TCG_TARGET_HAS_nand_i64
181 { INDEX_op_nand_i64, { R, RI, RI } },
182 #endif
183 #if TCG_TARGET_HAS_nor_i64
184 { INDEX_op_nor_i64, { R, RI, RI } },
185 #endif
186 { INDEX_op_or_i64, { R, RI, RI } },
187 #if TCG_TARGET_HAS_orc_i64
188 { INDEX_op_orc_i64, { R, RI, RI } },
189 #endif
190 { INDEX_op_xor_i64, { R, RI, RI } },
191 { INDEX_op_shl_i64, { R, RI, RI } },
192 { INDEX_op_shr_i64, { R, RI, RI } },
193 { INDEX_op_sar_i64, { R, RI, RI } },
194 #if TCG_TARGET_HAS_rot_i64
195 { INDEX_op_rotl_i64, { R, RI, RI } },
196 { INDEX_op_rotr_i64, { R, RI, RI } },
197 #endif
198 #if TCG_TARGET_HAS_deposit_i64
199 { INDEX_op_deposit_i64, { R, "0", R } },
200 #endif
201 { INDEX_op_brcond_i64, { R, RI } },
202
203 #if TCG_TARGET_HAS_ext8s_i64
204 { INDEX_op_ext8s_i64, { R, R } },
205 #endif
206 #if TCG_TARGET_HAS_ext16s_i64
207 { INDEX_op_ext16s_i64, { R, R } },
208 #endif
209 #if TCG_TARGET_HAS_ext32s_i64
210 { INDEX_op_ext32s_i64, { R, R } },
211 #endif
212 #if TCG_TARGET_HAS_ext8u_i64
213 { INDEX_op_ext8u_i64, { R, R } },
214 #endif
215 #if TCG_TARGET_HAS_ext16u_i64
216 { INDEX_op_ext16u_i64, { R, R } },
217 #endif
218 #if TCG_TARGET_HAS_ext32u_i64
219 { INDEX_op_ext32u_i64, { R, R } },
220 #endif
221 #if TCG_TARGET_HAS_bswap16_i64
222 { INDEX_op_bswap16_i64, { R, R } },
223 #endif
224 #if TCG_TARGET_HAS_bswap32_i64
225 { INDEX_op_bswap32_i64, { R, R } },
226 #endif
227 #if TCG_TARGET_HAS_bswap64_i64
228 { INDEX_op_bswap64_i64, { R, R } },
229 #endif
230 #if TCG_TARGET_HAS_not_i64
231 { INDEX_op_not_i64, { R, R } },
232 #endif
233 #if TCG_TARGET_HAS_neg_i64
234 { INDEX_op_neg_i64, { R, R } },
235 #endif
236 #endif /* TCG_TARGET_REG_BITS == 64 */
237
238 { INDEX_op_qemu_ld8u, { R, L } },
239 { INDEX_op_qemu_ld8s, { R, L } },
240 { INDEX_op_qemu_ld16u, { R, L } },
241 { INDEX_op_qemu_ld16s, { R, L } },
242 { INDEX_op_qemu_ld32, { R, L } },
243 #if TCG_TARGET_REG_BITS == 64
244 { INDEX_op_qemu_ld32u, { R, L } },
245 { INDEX_op_qemu_ld32s, { R, L } },
246 #endif
247 { INDEX_op_qemu_ld64, { R64, L } },
248
249 { INDEX_op_qemu_st8, { R, S } },
250 { INDEX_op_qemu_st16, { R, S } },
251 { INDEX_op_qemu_st32, { R, S } },
252 { INDEX_op_qemu_st64, { R64, S } },
253
254 #if TCG_TARGET_HAS_ext8s_i32
255 { INDEX_op_ext8s_i32, { R, R } },
256 #endif
257 #if TCG_TARGET_HAS_ext16s_i32
258 { INDEX_op_ext16s_i32, { R, R } },
259 #endif
260 #if TCG_TARGET_HAS_ext8u_i32
261 { INDEX_op_ext8u_i32, { R, R } },
262 #endif
263 #if TCG_TARGET_HAS_ext16u_i32
264 { INDEX_op_ext16u_i32, { R, R } },
265 #endif
266
267 #if TCG_TARGET_HAS_bswap16_i32
268 { INDEX_op_bswap16_i32, { R, R } },
269 #endif
270 #if TCG_TARGET_HAS_bswap32_i32
271 { INDEX_op_bswap32_i32, { R, R } },
272 #endif
273
274 { -1 },
275 };
276
277 static const int tcg_target_reg_alloc_order[] = {
278 TCG_REG_R0,
279 TCG_REG_R1,
280 TCG_REG_R2,
281 TCG_REG_R3,
282 #if 0 /* used for TCG_REG_CALL_STACK */
283 TCG_REG_R4,
284 #endif
285 TCG_REG_R5,
286 TCG_REG_R6,
287 TCG_REG_R7,
288 #if TCG_TARGET_NB_REGS >= 16
289 TCG_REG_R8,
290 TCG_REG_R9,
291 TCG_REG_R10,
292 TCG_REG_R11,
293 TCG_REG_R12,
294 TCG_REG_R13,
295 TCG_REG_R14,
296 TCG_REG_R15,
297 #endif
298 };
299
300 #if MAX_OPC_PARAM_IARGS != 5
301 # error Fix needed, number of supported input arguments changed!
302 #endif
303
304 static const int tcg_target_call_iarg_regs[] = {
305 TCG_REG_R0,
306 TCG_REG_R1,
307 TCG_REG_R2,
308 TCG_REG_R3,
309 #if 0 /* used for TCG_REG_CALL_STACK */
310 TCG_REG_R4,
311 #endif
312 TCG_REG_R5,
313 #if TCG_TARGET_REG_BITS == 32
314 /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
315 TCG_REG_R6,
316 TCG_REG_R7,
317 #if TCG_TARGET_NB_REGS >= 16
318 TCG_REG_R8,
319 TCG_REG_R9,
320 TCG_REG_R10,
321 #else
322 # error Too few input registers available
323 #endif
324 #endif
325 };
326
327 static const int tcg_target_call_oarg_regs[] = {
328 TCG_REG_R0,
329 #if TCG_TARGET_REG_BITS == 32
330 TCG_REG_R1
331 #endif
332 };
333
334 #ifndef NDEBUG
335 static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
336 "r00",
337 "r01",
338 "r02",
339 "r03",
340 "r04",
341 "r05",
342 "r06",
343 "r07",
344 #if TCG_TARGET_NB_REGS >= 16
345 "r08",
346 "r09",
347 "r10",
348 "r11",
349 "r12",
350 "r13",
351 "r14",
352 "r15",
353 #if TCG_TARGET_NB_REGS >= 32
354 "r16",
355 "r17",
356 "r18",
357 "r19",
358 "r20",
359 "r21",
360 "r22",
361 "r23",
362 "r24",
363 "r25",
364 "r26",
365 "r27",
366 "r28",
367 "r29",
368 "r30",
369 "r31"
370 #endif
371 #endif
372 };
373 #endif
374
375 static void patch_reloc(uint8_t *code_ptr, int type,
376 tcg_target_long value, tcg_target_long addend)
377 {
378 /* tcg_out_reloc always uses the same type, addend. */
379 assert(type == sizeof(tcg_target_long));
380 assert(addend == 0);
381 assert(value != 0);
382 *(tcg_target_long *)code_ptr = value;
383 }
384
385 /* Parse target specific constraints. */
386 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
387 {
388 const char *ct_str = *pct_str;
389 switch (ct_str[0]) {
390 case 'r':
391 case 'L': /* qemu_ld constraint */
392 case 'S': /* qemu_st constraint */
393 ct->ct |= TCG_CT_REG;
394 tcg_regset_set32(ct->u.regs, 0, BIT(TCG_TARGET_NB_REGS) - 1);
395 break;
396 default:
397 return -1;
398 }
399 ct_str++;
400 *pct_str = ct_str;
401 return 0;
402 }
403
404 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
405 /* Show current bytecode. Used by tcg interpreter. */
406 void tci_disas(uint8_t opc)
407 {
408 const TCGOpDef *def = &tcg_op_defs[opc];
409 fprintf(stderr, "TCG %s %u, %u, %u\n",
410 def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
411 }
412 #endif
413
414 /* Write value (native size). */
415 static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
416 {
417 *(tcg_target_ulong *)s->code_ptr = v;
418 s->code_ptr += sizeof(tcg_target_ulong);
419 }
420
421 /* Write 64 bit value. */
422 static void tcg_out64(TCGContext *s, uint64_t v)
423 {
424 *(uint64_t *)s->code_ptr = v;
425 s->code_ptr += sizeof(v);
426 }
427
428 /* Write opcode. */
429 static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
430 {
431 tcg_out8(s, op);
432 tcg_out8(s, 0);
433 }
434
435 /* Write register. */
436 static void tcg_out_r(TCGContext *s, TCGArg t0)
437 {
438 assert(t0 < TCG_TARGET_NB_REGS);
439 tcg_out8(s, t0);
440 }
441
442 /* Write register or constant (native size). */
443 static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
444 {
445 if (const_arg) {
446 assert(const_arg == 1);
447 tcg_out8(s, TCG_CONST);
448 tcg_out_i(s, arg);
449 } else {
450 tcg_out_r(s, arg);
451 }
452 }
453
454 /* Write register or constant (32 bit). */
455 static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
456 {
457 if (const_arg) {
458 assert(const_arg == 1);
459 tcg_out8(s, TCG_CONST);
460 tcg_out32(s, arg);
461 } else {
462 tcg_out_r(s, arg);
463 }
464 }
465
466 #if TCG_TARGET_REG_BITS == 64
467 /* Write register or constant (64 bit). */
468 static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
469 {
470 if (const_arg) {
471 assert(const_arg == 1);
472 tcg_out8(s, TCG_CONST);
473 tcg_out64(s, arg);
474 } else {
475 tcg_out_r(s, arg);
476 }
477 }
478 #endif
479
480 /* Write label. */
481 static void tci_out_label(TCGContext *s, TCGArg arg)
482 {
483 TCGLabel *label = &s->labels[arg];
484 if (label->has_value) {
485 tcg_out_i(s, label->u.value);
486 assert(label->u.value);
487 } else {
488 tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), arg, 0);
489 s->code_ptr += sizeof(tcg_target_ulong);
490 }
491 }
492
493 static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
494 tcg_target_long arg2)
495 {
496 uint8_t *old_code_ptr = s->code_ptr;
497 if (type == TCG_TYPE_I32) {
498 tcg_out_op_t(s, INDEX_op_ld_i32);
499 tcg_out_r(s, ret);
500 tcg_out_r(s, arg1);
501 tcg_out32(s, arg2);
502 } else {
503 assert(type == TCG_TYPE_I64);
504 #if TCG_TARGET_REG_BITS == 64
505 tcg_out_op_t(s, INDEX_op_ld_i64);
506 tcg_out_r(s, ret);
507 tcg_out_r(s, arg1);
508 assert(arg2 == (int32_t)arg2);
509 tcg_out32(s, arg2);
510 #else
511 TODO();
512 #endif
513 }
514 old_code_ptr[1] = s->code_ptr - old_code_ptr;
515 }
516
517 static void tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
518 {
519 uint8_t *old_code_ptr = s->code_ptr;
520 assert(ret != arg);
521 #if TCG_TARGET_REG_BITS == 32
522 tcg_out_op_t(s, INDEX_op_mov_i32);
523 #else
524 tcg_out_op_t(s, INDEX_op_mov_i64);
525 #endif
526 tcg_out_r(s, ret);
527 tcg_out_r(s, arg);
528 old_code_ptr[1] = s->code_ptr - old_code_ptr;
529 }
530
531 static void tcg_out_movi(TCGContext *s, TCGType type,
532 TCGReg t0, tcg_target_long arg)
533 {
534 uint8_t *old_code_ptr = s->code_ptr;
535 uint32_t arg32 = arg;
536 if (type == TCG_TYPE_I32 || arg == arg32) {
537 tcg_out_op_t(s, INDEX_op_movi_i32);
538 tcg_out_r(s, t0);
539 tcg_out32(s, arg32);
540 } else {
541 assert(type == TCG_TYPE_I64);
542 #if TCG_TARGET_REG_BITS == 64
543 tcg_out_op_t(s, INDEX_op_movi_i64);
544 tcg_out_r(s, t0);
545 tcg_out64(s, arg);
546 #else
547 TODO();
548 #endif
549 }
550 old_code_ptr[1] = s->code_ptr - old_code_ptr;
551 }
552
553 static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
554 const int *const_args)
555 {
556 uint8_t *old_code_ptr = s->code_ptr;
557
558 tcg_out_op_t(s, opc);
559
560 switch (opc) {
561 case INDEX_op_exit_tb:
562 tcg_out64(s, args[0]);
563 break;
564 case INDEX_op_goto_tb:
565 if (s->tb_jmp_offset) {
566 /* Direct jump method. */
567 assert(args[0] < ARRAY_SIZE(s->tb_jmp_offset));
568 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
569 tcg_out32(s, 0);
570 } else {
571 /* Indirect jump method. */
572 TODO();
573 }
574 assert(args[0] < ARRAY_SIZE(s->tb_next_offset));
575 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
576 break;
577 case INDEX_op_br:
578 tci_out_label(s, args[0]);
579 break;
580 case INDEX_op_call:
581 tcg_out_ri(s, const_args[0], args[0]);
582 break;
583 case INDEX_op_setcond_i32:
584 tcg_out_r(s, args[0]);
585 tcg_out_r(s, args[1]);
586 tcg_out_ri32(s, const_args[2], args[2]);
587 tcg_out8(s, args[3]); /* condition */
588 break;
589 #if TCG_TARGET_REG_BITS == 32
590 case INDEX_op_setcond2_i32:
591 /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
592 tcg_out_r(s, args[0]);
593 tcg_out_r(s, args[1]);
594 tcg_out_r(s, args[2]);
595 tcg_out_ri32(s, const_args[3], args[3]);
596 tcg_out_ri32(s, const_args[4], args[4]);
597 tcg_out8(s, args[5]); /* condition */
598 break;
599 #elif TCG_TARGET_REG_BITS == 64
600 case INDEX_op_setcond_i64:
601 tcg_out_r(s, args[0]);
602 tcg_out_r(s, args[1]);
603 tcg_out_ri64(s, const_args[2], args[2]);
604 tcg_out8(s, args[3]); /* condition */
605 break;
606 #endif
607 case INDEX_op_movi_i32:
608 TODO(); /* Handled by tcg_out_movi? */
609 break;
610 case INDEX_op_ld8u_i32:
611 case INDEX_op_ld8s_i32:
612 case INDEX_op_ld16u_i32:
613 case INDEX_op_ld16s_i32:
614 case INDEX_op_ld_i32:
615 case INDEX_op_st8_i32:
616 case INDEX_op_st16_i32:
617 case INDEX_op_st_i32:
618 case INDEX_op_ld8u_i64:
619 case INDEX_op_ld8s_i64:
620 case INDEX_op_ld16u_i64:
621 case INDEX_op_ld16s_i64:
622 case INDEX_op_ld32u_i64:
623 case INDEX_op_ld32s_i64:
624 case INDEX_op_ld_i64:
625 case INDEX_op_st8_i64:
626 case INDEX_op_st16_i64:
627 case INDEX_op_st32_i64:
628 case INDEX_op_st_i64:
629 tcg_out_r(s, args[0]);
630 tcg_out_r(s, args[1]);
631 assert(args[2] == (int32_t)args[2]);
632 tcg_out32(s, args[2]);
633 break;
634 case INDEX_op_add_i32:
635 case INDEX_op_sub_i32:
636 case INDEX_op_mul_i32:
637 case INDEX_op_and_i32:
638 case INDEX_op_andc_i32: /* Optional (TCG_TARGET_HAS_andc_i32). */
639 case INDEX_op_eqv_i32: /* Optional (TCG_TARGET_HAS_eqv_i32). */
640 case INDEX_op_nand_i32: /* Optional (TCG_TARGET_HAS_nand_i32). */
641 case INDEX_op_nor_i32: /* Optional (TCG_TARGET_HAS_nor_i32). */
642 case INDEX_op_or_i32:
643 case INDEX_op_orc_i32: /* Optional (TCG_TARGET_HAS_orc_i32). */
644 case INDEX_op_xor_i32:
645 case INDEX_op_shl_i32:
646 case INDEX_op_shr_i32:
647 case INDEX_op_sar_i32:
648 case INDEX_op_rotl_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
649 case INDEX_op_rotr_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
650 tcg_out_r(s, args[0]);
651 tcg_out_ri32(s, const_args[1], args[1]);
652 tcg_out_ri32(s, const_args[2], args[2]);
653 break;
654 case INDEX_op_deposit_i32: /* Optional (TCG_TARGET_HAS_deposit_i32). */
655 tcg_out_r(s, args[0]);
656 tcg_out_r(s, args[1]);
657 tcg_out_r(s, args[2]);
658 assert(args[3] <= UINT8_MAX);
659 tcg_out8(s, args[3]);
660 assert(args[4] <= UINT8_MAX);
661 tcg_out8(s, args[4]);
662 break;
663
664 #if TCG_TARGET_REG_BITS == 64
665 case INDEX_op_mov_i64:
666 case INDEX_op_movi_i64:
667 TODO();
668 break;
669 case INDEX_op_add_i64:
670 case INDEX_op_sub_i64:
671 case INDEX_op_mul_i64:
672 case INDEX_op_and_i64:
673 case INDEX_op_andc_i64: /* Optional (TCG_TARGET_HAS_andc_i64). */
674 case INDEX_op_eqv_i64: /* Optional (TCG_TARGET_HAS_eqv_i64). */
675 case INDEX_op_nand_i64: /* Optional (TCG_TARGET_HAS_nand_i64). */
676 case INDEX_op_nor_i64: /* Optional (TCG_TARGET_HAS_nor_i64). */
677 case INDEX_op_or_i64:
678 case INDEX_op_orc_i64: /* Optional (TCG_TARGET_HAS_orc_i64). */
679 case INDEX_op_xor_i64:
680 case INDEX_op_shl_i64:
681 case INDEX_op_shr_i64:
682 case INDEX_op_sar_i64:
683 /* TODO: Implementation of rotl_i64, rotr_i64 missing in tci.c. */
684 case INDEX_op_rotl_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
685 case INDEX_op_rotr_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
686 tcg_out_r(s, args[0]);
687 tcg_out_ri64(s, const_args[1], args[1]);
688 tcg_out_ri64(s, const_args[2], args[2]);
689 break;
690 case INDEX_op_deposit_i64: /* Optional (TCG_TARGET_HAS_deposit_i64). */
691 tcg_out_r(s, args[0]);
692 tcg_out_r(s, args[1]);
693 tcg_out_r(s, args[2]);
694 assert(args[3] <= UINT8_MAX);
695 tcg_out8(s, args[3]);
696 assert(args[4] <= UINT8_MAX);
697 tcg_out8(s, args[4]);
698 break;
699 case INDEX_op_div_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
700 case INDEX_op_divu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
701 case INDEX_op_rem_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
702 case INDEX_op_remu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
703 TODO();
704 break;
705 case INDEX_op_div2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
706 case INDEX_op_divu2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
707 TODO();
708 break;
709 case INDEX_op_brcond_i64:
710 tcg_out_r(s, args[0]);
711 tcg_out_ri64(s, const_args[1], args[1]);
712 tcg_out8(s, args[2]); /* condition */
713 tci_out_label(s, args[3]);
714 break;
715 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
716 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
717 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
718 case INDEX_op_not_i64: /* Optional (TCG_TARGET_HAS_not_i64). */
719 case INDEX_op_neg_i64: /* Optional (TCG_TARGET_HAS_neg_i64). */
720 case INDEX_op_ext8s_i64: /* Optional (TCG_TARGET_HAS_ext8s_i64). */
721 case INDEX_op_ext8u_i64: /* Optional (TCG_TARGET_HAS_ext8u_i64). */
722 case INDEX_op_ext16s_i64: /* Optional (TCG_TARGET_HAS_ext16s_i64). */
723 case INDEX_op_ext16u_i64: /* Optional (TCG_TARGET_HAS_ext16u_i64). */
724 case INDEX_op_ext32s_i64: /* Optional (TCG_TARGET_HAS_ext32s_i64). */
725 case INDEX_op_ext32u_i64: /* Optional (TCG_TARGET_HAS_ext32u_i64). */
726 #endif /* TCG_TARGET_REG_BITS == 64 */
727 case INDEX_op_neg_i32: /* Optional (TCG_TARGET_HAS_neg_i32). */
728 case INDEX_op_not_i32: /* Optional (TCG_TARGET_HAS_not_i32). */
729 case INDEX_op_ext8s_i32: /* Optional (TCG_TARGET_HAS_ext8s_i32). */
730 case INDEX_op_ext16s_i32: /* Optional (TCG_TARGET_HAS_ext16s_i32). */
731 case INDEX_op_ext8u_i32: /* Optional (TCG_TARGET_HAS_ext8u_i32). */
732 case INDEX_op_ext16u_i32: /* Optional (TCG_TARGET_HAS_ext16u_i32). */
733 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
734 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
735 tcg_out_r(s, args[0]);
736 tcg_out_r(s, args[1]);
737 break;
738 case INDEX_op_div_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
739 case INDEX_op_divu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
740 case INDEX_op_rem_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
741 case INDEX_op_remu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
742 tcg_out_r(s, args[0]);
743 tcg_out_ri32(s, const_args[1], args[1]);
744 tcg_out_ri32(s, const_args[2], args[2]);
745 break;
746 case INDEX_op_div2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
747 case INDEX_op_divu2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
748 TODO();
749 break;
750 #if TCG_TARGET_REG_BITS == 32
751 case INDEX_op_add2_i32:
752 case INDEX_op_sub2_i32:
753 tcg_out_r(s, args[0]);
754 tcg_out_r(s, args[1]);
755 tcg_out_r(s, args[2]);
756 tcg_out_r(s, args[3]);
757 tcg_out_r(s, args[4]);
758 tcg_out_r(s, args[5]);
759 break;
760 case INDEX_op_brcond2_i32:
761 tcg_out_r(s, args[0]);
762 tcg_out_r(s, args[1]);
763 tcg_out_ri32(s, const_args[2], args[2]);
764 tcg_out_ri32(s, const_args[3], args[3]);
765 tcg_out8(s, args[4]); /* condition */
766 tci_out_label(s, args[5]);
767 break;
768 case INDEX_op_mulu2_i32:
769 tcg_out_r(s, args[0]);
770 tcg_out_r(s, args[1]);
771 tcg_out_r(s, args[2]);
772 tcg_out_r(s, args[3]);
773 break;
774 #endif
775 case INDEX_op_brcond_i32:
776 tcg_out_r(s, args[0]);
777 tcg_out_ri32(s, const_args[1], args[1]);
778 tcg_out8(s, args[2]); /* condition */
779 tci_out_label(s, args[3]);
780 break;
781 case INDEX_op_qemu_ld8u:
782 case INDEX_op_qemu_ld8s:
783 case INDEX_op_qemu_ld16u:
784 case INDEX_op_qemu_ld16s:
785 case INDEX_op_qemu_ld32:
786 #if TCG_TARGET_REG_BITS == 64
787 case INDEX_op_qemu_ld32s:
788 case INDEX_op_qemu_ld32u:
789 #endif
790 tcg_out_r(s, *args++);
791 tcg_out_r(s, *args++);
792 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
793 tcg_out_r(s, *args++);
794 #endif
795 #ifdef CONFIG_SOFTMMU
796 tcg_out_i(s, *args);
797 #endif
798 break;
799 case INDEX_op_qemu_ld64:
800 tcg_out_r(s, *args++);
801 #if TCG_TARGET_REG_BITS == 32
802 tcg_out_r(s, *args++);
803 #endif
804 tcg_out_r(s, *args++);
805 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
806 tcg_out_r(s, *args++);
807 #endif
808 #ifdef CONFIG_SOFTMMU
809 tcg_out_i(s, *args);
810 #endif
811 break;
812 case INDEX_op_qemu_st8:
813 case INDEX_op_qemu_st16:
814 case INDEX_op_qemu_st32:
815 tcg_out_r(s, *args++);
816 tcg_out_r(s, *args++);
817 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
818 tcg_out_r(s, *args++);
819 #endif
820 #ifdef CONFIG_SOFTMMU
821 tcg_out_i(s, *args);
822 #endif
823 break;
824 case INDEX_op_qemu_st64:
825 tcg_out_r(s, *args++);
826 #if TCG_TARGET_REG_BITS == 32
827 tcg_out_r(s, *args++);
828 #endif
829 tcg_out_r(s, *args++);
830 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
831 tcg_out_r(s, *args++);
832 #endif
833 #ifdef CONFIG_SOFTMMU
834 tcg_out_i(s, *args);
835 #endif
836 break;
837 case INDEX_op_end:
838 TODO();
839 break;
840 default:
841 fprintf(stderr, "Missing: %s\n", tcg_op_defs[opc].name);
842 tcg_abort();
843 }
844 old_code_ptr[1] = s->code_ptr - old_code_ptr;
845 }
846
847 static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
848 tcg_target_long arg2)
849 {
850 uint8_t *old_code_ptr = s->code_ptr;
851 if (type == TCG_TYPE_I32) {
852 tcg_out_op_t(s, INDEX_op_st_i32);
853 tcg_out_r(s, arg);
854 tcg_out_r(s, arg1);
855 tcg_out32(s, arg2);
856 } else {
857 assert(type == TCG_TYPE_I64);
858 #if TCG_TARGET_REG_BITS == 64
859 tcg_out_op_t(s, INDEX_op_st_i64);
860 tcg_out_r(s, arg);
861 tcg_out_r(s, arg1);
862 tcg_out32(s, arg2);
863 #else
864 TODO();
865 #endif
866 }
867 old_code_ptr[1] = s->code_ptr - old_code_ptr;
868 }
869
870 /* Test if a constant matches the constraint. */
871 static int tcg_target_const_match(tcg_target_long val,
872 const TCGArgConstraint *arg_ct)
873 {
874 /* No need to return 0 or 1, 0 or != 0 is good enough. */
875 return arg_ct->ct & TCG_CT_CONST;
876 }
877
878 static void tcg_target_init(TCGContext *s)
879 {
880 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
881 const char *envval = getenv("DEBUG_TCG");
882 if (envval) {
883 qemu_set_log(strtol(envval, NULL, 0));
884 }
885 #endif
886
887 /* The current code uses uint8_t for tcg operations. */
888 assert(ARRAY_SIZE(tcg_op_defs) <= UINT8_MAX);
889
890 /* Registers available for 32 bit operations. */
891 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0,
892 BIT(TCG_TARGET_NB_REGS) - 1);
893 /* Registers available for 64 bit operations. */
894 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0,
895 BIT(TCG_TARGET_NB_REGS) - 1);
896 /* TODO: Which registers should be set here? */
897 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
898 BIT(TCG_TARGET_NB_REGS) - 1);
899
900 tcg_regset_clear(s->reserved_regs);
901 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
902 tcg_add_target_add_op_defs(tcg_target_op_defs);
903
904 /* We use negative offsets from "sp" so that we can distinguish
905 stores that might pretend to be call arguments. */
906 tcg_set_frame(s, TCG_REG_CALL_STACK,
907 -CPU_TEMP_BUF_NLONGS * sizeof(long),
908 CPU_TEMP_BUF_NLONGS * sizeof(long));
909 }
910
911 /* Generate global QEMU prologue and epilogue code. */
912 static inline void tcg_target_qemu_prologue(TCGContext *s)
913 {
914 }