]> git.proxmox.com Git - mirror_qemu.git/blob - tcg/tci/tcg-target.c
Merge remote-tracking branch 'remotes/rth/tcg-mips' into staging
[mirror_qemu.git] / tcg / tci / tcg-target.c
1 /*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25 #include "tcg-be-null.h"
26
27 /* TODO list:
28 * - See TODO comments in code.
29 */
30
31 /* Marker for missing code. */
32 #define TODO() \
33 do { \
34 fprintf(stderr, "TODO %s:%u: %s()\n", \
35 __FILE__, __LINE__, __func__); \
36 tcg_abort(); \
37 } while (0)
38
39 /* Bitfield n...m (in 32 bit value). */
40 #define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
41
42 /* Macros used in tcg_target_op_defs. */
43 #define R "r"
44 #define RI "ri"
45 #if TCG_TARGET_REG_BITS == 32
46 # define R64 "r", "r"
47 #else
48 # define R64 "r"
49 #endif
50 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
51 # define L "L", "L"
52 # define S "S", "S"
53 #else
54 # define L "L"
55 # define S "S"
56 #endif
57
58 /* TODO: documentation. */
59 static const TCGTargetOpDef tcg_target_op_defs[] = {
60 { INDEX_op_exit_tb, { NULL } },
61 { INDEX_op_goto_tb, { NULL } },
62 { INDEX_op_br, { NULL } },
63
64 { INDEX_op_ld8u_i32, { R, R } },
65 { INDEX_op_ld8s_i32, { R, R } },
66 { INDEX_op_ld16u_i32, { R, R } },
67 { INDEX_op_ld16s_i32, { R, R } },
68 { INDEX_op_ld_i32, { R, R } },
69 { INDEX_op_st8_i32, { R, R } },
70 { INDEX_op_st16_i32, { R, R } },
71 { INDEX_op_st_i32, { R, R } },
72
73 { INDEX_op_add_i32, { R, RI, RI } },
74 { INDEX_op_sub_i32, { R, RI, RI } },
75 { INDEX_op_mul_i32, { R, RI, RI } },
76 #if TCG_TARGET_HAS_div_i32
77 { INDEX_op_div_i32, { R, R, R } },
78 { INDEX_op_divu_i32, { R, R, R } },
79 { INDEX_op_rem_i32, { R, R, R } },
80 { INDEX_op_remu_i32, { R, R, R } },
81 #elif TCG_TARGET_HAS_div2_i32
82 { INDEX_op_div2_i32, { R, R, "0", "1", R } },
83 { INDEX_op_divu2_i32, { R, R, "0", "1", R } },
84 #endif
85 /* TODO: Does R, RI, RI result in faster code than R, R, RI?
86 If both operands are constants, we can optimize. */
87 { INDEX_op_and_i32, { R, RI, RI } },
88 #if TCG_TARGET_HAS_andc_i32
89 { INDEX_op_andc_i32, { R, RI, RI } },
90 #endif
91 #if TCG_TARGET_HAS_eqv_i32
92 { INDEX_op_eqv_i32, { R, RI, RI } },
93 #endif
94 #if TCG_TARGET_HAS_nand_i32
95 { INDEX_op_nand_i32, { R, RI, RI } },
96 #endif
97 #if TCG_TARGET_HAS_nor_i32
98 { INDEX_op_nor_i32, { R, RI, RI } },
99 #endif
100 { INDEX_op_or_i32, { R, RI, RI } },
101 #if TCG_TARGET_HAS_orc_i32
102 { INDEX_op_orc_i32, { R, RI, RI } },
103 #endif
104 { INDEX_op_xor_i32, { R, RI, RI } },
105 { INDEX_op_shl_i32, { R, RI, RI } },
106 { INDEX_op_shr_i32, { R, RI, RI } },
107 { INDEX_op_sar_i32, { R, RI, RI } },
108 #if TCG_TARGET_HAS_rot_i32
109 { INDEX_op_rotl_i32, { R, RI, RI } },
110 { INDEX_op_rotr_i32, { R, RI, RI } },
111 #endif
112 #if TCG_TARGET_HAS_deposit_i32
113 { INDEX_op_deposit_i32, { R, "0", R } },
114 #endif
115
116 { INDEX_op_brcond_i32, { R, RI } },
117
118 { INDEX_op_setcond_i32, { R, R, RI } },
119 #if TCG_TARGET_REG_BITS == 64
120 { INDEX_op_setcond_i64, { R, R, RI } },
121 #endif /* TCG_TARGET_REG_BITS == 64 */
122
123 #if TCG_TARGET_REG_BITS == 32
124 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
125 { INDEX_op_add2_i32, { R, R, R, R, R, R } },
126 { INDEX_op_sub2_i32, { R, R, R, R, R, R } },
127 { INDEX_op_brcond2_i32, { R, R, RI, RI } },
128 { INDEX_op_mulu2_i32, { R, R, R, R } },
129 { INDEX_op_setcond2_i32, { R, R, R, RI, RI } },
130 #endif
131
132 #if TCG_TARGET_HAS_not_i32
133 { INDEX_op_not_i32, { R, R } },
134 #endif
135 #if TCG_TARGET_HAS_neg_i32
136 { INDEX_op_neg_i32, { R, R } },
137 #endif
138
139 #if TCG_TARGET_REG_BITS == 64
140 { INDEX_op_ld8u_i64, { R, R } },
141 { INDEX_op_ld8s_i64, { R, R } },
142 { INDEX_op_ld16u_i64, { R, R } },
143 { INDEX_op_ld16s_i64, { R, R } },
144 { INDEX_op_ld32u_i64, { R, R } },
145 { INDEX_op_ld32s_i64, { R, R } },
146 { INDEX_op_ld_i64, { R, R } },
147
148 { INDEX_op_st8_i64, { R, R } },
149 { INDEX_op_st16_i64, { R, R } },
150 { INDEX_op_st32_i64, { R, R } },
151 { INDEX_op_st_i64, { R, R } },
152
153 { INDEX_op_add_i64, { R, RI, RI } },
154 { INDEX_op_sub_i64, { R, RI, RI } },
155 { INDEX_op_mul_i64, { R, RI, RI } },
156 #if TCG_TARGET_HAS_div_i64
157 { INDEX_op_div_i64, { R, R, R } },
158 { INDEX_op_divu_i64, { R, R, R } },
159 { INDEX_op_rem_i64, { R, R, R } },
160 { INDEX_op_remu_i64, { R, R, R } },
161 #elif TCG_TARGET_HAS_div2_i64
162 { INDEX_op_div2_i64, { R, R, "0", "1", R } },
163 { INDEX_op_divu2_i64, { R, R, "0", "1", R } },
164 #endif
165 { INDEX_op_and_i64, { R, RI, RI } },
166 #if TCG_TARGET_HAS_andc_i64
167 { INDEX_op_andc_i64, { R, RI, RI } },
168 #endif
169 #if TCG_TARGET_HAS_eqv_i64
170 { INDEX_op_eqv_i64, { R, RI, RI } },
171 #endif
172 #if TCG_TARGET_HAS_nand_i64
173 { INDEX_op_nand_i64, { R, RI, RI } },
174 #endif
175 #if TCG_TARGET_HAS_nor_i64
176 { INDEX_op_nor_i64, { R, RI, RI } },
177 #endif
178 { INDEX_op_or_i64, { R, RI, RI } },
179 #if TCG_TARGET_HAS_orc_i64
180 { INDEX_op_orc_i64, { R, RI, RI } },
181 #endif
182 { INDEX_op_xor_i64, { R, RI, RI } },
183 { INDEX_op_shl_i64, { R, RI, RI } },
184 { INDEX_op_shr_i64, { R, RI, RI } },
185 { INDEX_op_sar_i64, { R, RI, RI } },
186 #if TCG_TARGET_HAS_rot_i64
187 { INDEX_op_rotl_i64, { R, RI, RI } },
188 { INDEX_op_rotr_i64, { R, RI, RI } },
189 #endif
190 #if TCG_TARGET_HAS_deposit_i64
191 { INDEX_op_deposit_i64, { R, "0", R } },
192 #endif
193 { INDEX_op_brcond_i64, { R, RI } },
194
195 #if TCG_TARGET_HAS_ext8s_i64
196 { INDEX_op_ext8s_i64, { R, R } },
197 #endif
198 #if TCG_TARGET_HAS_ext16s_i64
199 { INDEX_op_ext16s_i64, { R, R } },
200 #endif
201 #if TCG_TARGET_HAS_ext32s_i64
202 { INDEX_op_ext32s_i64, { R, R } },
203 #endif
204 #if TCG_TARGET_HAS_ext8u_i64
205 { INDEX_op_ext8u_i64, { R, R } },
206 #endif
207 #if TCG_TARGET_HAS_ext16u_i64
208 { INDEX_op_ext16u_i64, { R, R } },
209 #endif
210 #if TCG_TARGET_HAS_ext32u_i64
211 { INDEX_op_ext32u_i64, { R, R } },
212 #endif
213 #if TCG_TARGET_HAS_bswap16_i64
214 { INDEX_op_bswap16_i64, { R, R } },
215 #endif
216 #if TCG_TARGET_HAS_bswap32_i64
217 { INDEX_op_bswap32_i64, { R, R } },
218 #endif
219 #if TCG_TARGET_HAS_bswap64_i64
220 { INDEX_op_bswap64_i64, { R, R } },
221 #endif
222 #if TCG_TARGET_HAS_not_i64
223 { INDEX_op_not_i64, { R, R } },
224 #endif
225 #if TCG_TARGET_HAS_neg_i64
226 { INDEX_op_neg_i64, { R, R } },
227 #endif
228 #endif /* TCG_TARGET_REG_BITS == 64 */
229
230 { INDEX_op_qemu_ld8u, { R, L } },
231 { INDEX_op_qemu_ld8s, { R, L } },
232 { INDEX_op_qemu_ld16u, { R, L } },
233 { INDEX_op_qemu_ld16s, { R, L } },
234 { INDEX_op_qemu_ld32, { R, L } },
235 #if TCG_TARGET_REG_BITS == 64
236 { INDEX_op_qemu_ld32u, { R, L } },
237 { INDEX_op_qemu_ld32s, { R, L } },
238 #endif
239 { INDEX_op_qemu_ld64, { R64, L } },
240
241 { INDEX_op_qemu_st8, { R, S } },
242 { INDEX_op_qemu_st16, { R, S } },
243 { INDEX_op_qemu_st32, { R, S } },
244 { INDEX_op_qemu_st64, { R64, S } },
245
246 #if TCG_TARGET_HAS_ext8s_i32
247 { INDEX_op_ext8s_i32, { R, R } },
248 #endif
249 #if TCG_TARGET_HAS_ext16s_i32
250 { INDEX_op_ext16s_i32, { R, R } },
251 #endif
252 #if TCG_TARGET_HAS_ext8u_i32
253 { INDEX_op_ext8u_i32, { R, R } },
254 #endif
255 #if TCG_TARGET_HAS_ext16u_i32
256 { INDEX_op_ext16u_i32, { R, R } },
257 #endif
258
259 #if TCG_TARGET_HAS_bswap16_i32
260 { INDEX_op_bswap16_i32, { R, R } },
261 #endif
262 #if TCG_TARGET_HAS_bswap32_i32
263 { INDEX_op_bswap32_i32, { R, R } },
264 #endif
265
266 { -1 },
267 };
268
269 static const int tcg_target_reg_alloc_order[] = {
270 TCG_REG_R0,
271 TCG_REG_R1,
272 TCG_REG_R2,
273 TCG_REG_R3,
274 #if 0 /* used for TCG_REG_CALL_STACK */
275 TCG_REG_R4,
276 #endif
277 TCG_REG_R5,
278 TCG_REG_R6,
279 TCG_REG_R7,
280 #if TCG_TARGET_NB_REGS >= 16
281 TCG_REG_R8,
282 TCG_REG_R9,
283 TCG_REG_R10,
284 TCG_REG_R11,
285 TCG_REG_R12,
286 TCG_REG_R13,
287 TCG_REG_R14,
288 TCG_REG_R15,
289 #endif
290 };
291
292 #if MAX_OPC_PARAM_IARGS != 5
293 # error Fix needed, number of supported input arguments changed!
294 #endif
295
296 static const int tcg_target_call_iarg_regs[] = {
297 TCG_REG_R0,
298 TCG_REG_R1,
299 TCG_REG_R2,
300 TCG_REG_R3,
301 #if 0 /* used for TCG_REG_CALL_STACK */
302 TCG_REG_R4,
303 #endif
304 TCG_REG_R5,
305 #if TCG_TARGET_REG_BITS == 32
306 /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
307 TCG_REG_R6,
308 TCG_REG_R7,
309 #if TCG_TARGET_NB_REGS >= 16
310 TCG_REG_R8,
311 TCG_REG_R9,
312 TCG_REG_R10,
313 #else
314 # error Too few input registers available
315 #endif
316 #endif
317 };
318
319 static const int tcg_target_call_oarg_regs[] = {
320 TCG_REG_R0,
321 #if TCG_TARGET_REG_BITS == 32
322 TCG_REG_R1
323 #endif
324 };
325
326 #ifndef NDEBUG
327 static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
328 "r00",
329 "r01",
330 "r02",
331 "r03",
332 "r04",
333 "r05",
334 "r06",
335 "r07",
336 #if TCG_TARGET_NB_REGS >= 16
337 "r08",
338 "r09",
339 "r10",
340 "r11",
341 "r12",
342 "r13",
343 "r14",
344 "r15",
345 #if TCG_TARGET_NB_REGS >= 32
346 "r16",
347 "r17",
348 "r18",
349 "r19",
350 "r20",
351 "r21",
352 "r22",
353 "r23",
354 "r24",
355 "r25",
356 "r26",
357 "r27",
358 "r28",
359 "r29",
360 "r30",
361 "r31"
362 #endif
363 #endif
364 };
365 #endif
366
367 static void patch_reloc(tcg_insn_unit *code_ptr, int type,
368 intptr_t value, intptr_t addend)
369 {
370 /* tcg_out_reloc always uses the same type, addend. */
371 assert(type == sizeof(tcg_target_long));
372 assert(addend == 0);
373 assert(value != 0);
374 if (TCG_TARGET_REG_BITS == 32) {
375 tcg_patch32(code_ptr, value);
376 } else {
377 tcg_patch64(code_ptr, value);
378 }
379 }
380
381 /* Parse target specific constraints. */
382 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
383 {
384 const char *ct_str = *pct_str;
385 switch (ct_str[0]) {
386 case 'r':
387 case 'L': /* qemu_ld constraint */
388 case 'S': /* qemu_st constraint */
389 ct->ct |= TCG_CT_REG;
390 tcg_regset_set32(ct->u.regs, 0, BIT(TCG_TARGET_NB_REGS) - 1);
391 break;
392 default:
393 return -1;
394 }
395 ct_str++;
396 *pct_str = ct_str;
397 return 0;
398 }
399
400 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
401 /* Show current bytecode. Used by tcg interpreter. */
402 void tci_disas(uint8_t opc)
403 {
404 const TCGOpDef *def = &tcg_op_defs[opc];
405 fprintf(stderr, "TCG %s %u, %u, %u\n",
406 def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
407 }
408 #endif
409
410 /* Write value (native size). */
411 static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
412 {
413 if (TCG_TARGET_REG_BITS == 32) {
414 tcg_out32(s, v);
415 } else {
416 tcg_out64(s, v);
417 }
418 }
419
420 /* Write opcode. */
421 static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
422 {
423 tcg_out8(s, op);
424 tcg_out8(s, 0);
425 }
426
427 /* Write register. */
428 static void tcg_out_r(TCGContext *s, TCGArg t0)
429 {
430 assert(t0 < TCG_TARGET_NB_REGS);
431 tcg_out8(s, t0);
432 }
433
434 /* Write register or constant (native size). */
435 static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
436 {
437 if (const_arg) {
438 assert(const_arg == 1);
439 tcg_out8(s, TCG_CONST);
440 tcg_out_i(s, arg);
441 } else {
442 tcg_out_r(s, arg);
443 }
444 }
445
446 /* Write register or constant (32 bit). */
447 static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
448 {
449 if (const_arg) {
450 assert(const_arg == 1);
451 tcg_out8(s, TCG_CONST);
452 tcg_out32(s, arg);
453 } else {
454 tcg_out_r(s, arg);
455 }
456 }
457
458 #if TCG_TARGET_REG_BITS == 64
459 /* Write register or constant (64 bit). */
460 static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
461 {
462 if (const_arg) {
463 assert(const_arg == 1);
464 tcg_out8(s, TCG_CONST);
465 tcg_out64(s, arg);
466 } else {
467 tcg_out_r(s, arg);
468 }
469 }
470 #endif
471
472 /* Write label. */
473 static void tci_out_label(TCGContext *s, TCGArg arg)
474 {
475 TCGLabel *label = &s->labels[arg];
476 if (label->has_value) {
477 tcg_out_i(s, label->u.value);
478 assert(label->u.value);
479 } else {
480 tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), arg, 0);
481 s->code_ptr += sizeof(tcg_target_ulong);
482 }
483 }
484
485 static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
486 intptr_t arg2)
487 {
488 uint8_t *old_code_ptr = s->code_ptr;
489 if (type == TCG_TYPE_I32) {
490 tcg_out_op_t(s, INDEX_op_ld_i32);
491 tcg_out_r(s, ret);
492 tcg_out_r(s, arg1);
493 tcg_out32(s, arg2);
494 } else {
495 assert(type == TCG_TYPE_I64);
496 #if TCG_TARGET_REG_BITS == 64
497 tcg_out_op_t(s, INDEX_op_ld_i64);
498 tcg_out_r(s, ret);
499 tcg_out_r(s, arg1);
500 assert(arg2 == (int32_t)arg2);
501 tcg_out32(s, arg2);
502 #else
503 TODO();
504 #endif
505 }
506 old_code_ptr[1] = s->code_ptr - old_code_ptr;
507 }
508
509 static void tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
510 {
511 uint8_t *old_code_ptr = s->code_ptr;
512 assert(ret != arg);
513 #if TCG_TARGET_REG_BITS == 32
514 tcg_out_op_t(s, INDEX_op_mov_i32);
515 #else
516 tcg_out_op_t(s, INDEX_op_mov_i64);
517 #endif
518 tcg_out_r(s, ret);
519 tcg_out_r(s, arg);
520 old_code_ptr[1] = s->code_ptr - old_code_ptr;
521 }
522
523 static void tcg_out_movi(TCGContext *s, TCGType type,
524 TCGReg t0, tcg_target_long arg)
525 {
526 uint8_t *old_code_ptr = s->code_ptr;
527 uint32_t arg32 = arg;
528 if (type == TCG_TYPE_I32 || arg == arg32) {
529 tcg_out_op_t(s, INDEX_op_movi_i32);
530 tcg_out_r(s, t0);
531 tcg_out32(s, arg32);
532 } else {
533 assert(type == TCG_TYPE_I64);
534 #if TCG_TARGET_REG_BITS == 64
535 tcg_out_op_t(s, INDEX_op_movi_i64);
536 tcg_out_r(s, t0);
537 tcg_out64(s, arg);
538 #else
539 TODO();
540 #endif
541 }
542 old_code_ptr[1] = s->code_ptr - old_code_ptr;
543 }
544
545 static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *arg)
546 {
547 uint8_t *old_code_ptr = s->code_ptr;
548 tcg_out_op_t(s, INDEX_op_call);
549 tcg_out_ri(s, 1, (uintptr_t)arg);
550 old_code_ptr[1] = s->code_ptr - old_code_ptr;
551 }
552
553 static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
554 const int *const_args)
555 {
556 uint8_t *old_code_ptr = s->code_ptr;
557
558 tcg_out_op_t(s, opc);
559
560 switch (opc) {
561 case INDEX_op_exit_tb:
562 tcg_out64(s, args[0]);
563 break;
564 case INDEX_op_goto_tb:
565 if (s->tb_jmp_offset) {
566 /* Direct jump method. */
567 assert(args[0] < ARRAY_SIZE(s->tb_jmp_offset));
568 s->tb_jmp_offset[args[0]] = tcg_current_code_size(s);
569 tcg_out32(s, 0);
570 } else {
571 /* Indirect jump method. */
572 TODO();
573 }
574 assert(args[0] < ARRAY_SIZE(s->tb_next_offset));
575 s->tb_next_offset[args[0]] = tcg_current_code_size(s);
576 break;
577 case INDEX_op_br:
578 tci_out_label(s, args[0]);
579 break;
580 case INDEX_op_setcond_i32:
581 tcg_out_r(s, args[0]);
582 tcg_out_r(s, args[1]);
583 tcg_out_ri32(s, const_args[2], args[2]);
584 tcg_out8(s, args[3]); /* condition */
585 break;
586 #if TCG_TARGET_REG_BITS == 32
587 case INDEX_op_setcond2_i32:
588 /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
589 tcg_out_r(s, args[0]);
590 tcg_out_r(s, args[1]);
591 tcg_out_r(s, args[2]);
592 tcg_out_ri32(s, const_args[3], args[3]);
593 tcg_out_ri32(s, const_args[4], args[4]);
594 tcg_out8(s, args[5]); /* condition */
595 break;
596 #elif TCG_TARGET_REG_BITS == 64
597 case INDEX_op_setcond_i64:
598 tcg_out_r(s, args[0]);
599 tcg_out_r(s, args[1]);
600 tcg_out_ri64(s, const_args[2], args[2]);
601 tcg_out8(s, args[3]); /* condition */
602 break;
603 #endif
604 case INDEX_op_ld8u_i32:
605 case INDEX_op_ld8s_i32:
606 case INDEX_op_ld16u_i32:
607 case INDEX_op_ld16s_i32:
608 case INDEX_op_ld_i32:
609 case INDEX_op_st8_i32:
610 case INDEX_op_st16_i32:
611 case INDEX_op_st_i32:
612 case INDEX_op_ld8u_i64:
613 case INDEX_op_ld8s_i64:
614 case INDEX_op_ld16u_i64:
615 case INDEX_op_ld16s_i64:
616 case INDEX_op_ld32u_i64:
617 case INDEX_op_ld32s_i64:
618 case INDEX_op_ld_i64:
619 case INDEX_op_st8_i64:
620 case INDEX_op_st16_i64:
621 case INDEX_op_st32_i64:
622 case INDEX_op_st_i64:
623 tcg_out_r(s, args[0]);
624 tcg_out_r(s, args[1]);
625 assert(args[2] == (int32_t)args[2]);
626 tcg_out32(s, args[2]);
627 break;
628 case INDEX_op_add_i32:
629 case INDEX_op_sub_i32:
630 case INDEX_op_mul_i32:
631 case INDEX_op_and_i32:
632 case INDEX_op_andc_i32: /* Optional (TCG_TARGET_HAS_andc_i32). */
633 case INDEX_op_eqv_i32: /* Optional (TCG_TARGET_HAS_eqv_i32). */
634 case INDEX_op_nand_i32: /* Optional (TCG_TARGET_HAS_nand_i32). */
635 case INDEX_op_nor_i32: /* Optional (TCG_TARGET_HAS_nor_i32). */
636 case INDEX_op_or_i32:
637 case INDEX_op_orc_i32: /* Optional (TCG_TARGET_HAS_orc_i32). */
638 case INDEX_op_xor_i32:
639 case INDEX_op_shl_i32:
640 case INDEX_op_shr_i32:
641 case INDEX_op_sar_i32:
642 case INDEX_op_rotl_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
643 case INDEX_op_rotr_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
644 tcg_out_r(s, args[0]);
645 tcg_out_ri32(s, const_args[1], args[1]);
646 tcg_out_ri32(s, const_args[2], args[2]);
647 break;
648 case INDEX_op_deposit_i32: /* Optional (TCG_TARGET_HAS_deposit_i32). */
649 tcg_out_r(s, args[0]);
650 tcg_out_r(s, args[1]);
651 tcg_out_r(s, args[2]);
652 assert(args[3] <= UINT8_MAX);
653 tcg_out8(s, args[3]);
654 assert(args[4] <= UINT8_MAX);
655 tcg_out8(s, args[4]);
656 break;
657
658 #if TCG_TARGET_REG_BITS == 64
659 case INDEX_op_add_i64:
660 case INDEX_op_sub_i64:
661 case INDEX_op_mul_i64:
662 case INDEX_op_and_i64:
663 case INDEX_op_andc_i64: /* Optional (TCG_TARGET_HAS_andc_i64). */
664 case INDEX_op_eqv_i64: /* Optional (TCG_TARGET_HAS_eqv_i64). */
665 case INDEX_op_nand_i64: /* Optional (TCG_TARGET_HAS_nand_i64). */
666 case INDEX_op_nor_i64: /* Optional (TCG_TARGET_HAS_nor_i64). */
667 case INDEX_op_or_i64:
668 case INDEX_op_orc_i64: /* Optional (TCG_TARGET_HAS_orc_i64). */
669 case INDEX_op_xor_i64:
670 case INDEX_op_shl_i64:
671 case INDEX_op_shr_i64:
672 case INDEX_op_sar_i64:
673 case INDEX_op_rotl_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
674 case INDEX_op_rotr_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
675 tcg_out_r(s, args[0]);
676 tcg_out_ri64(s, const_args[1], args[1]);
677 tcg_out_ri64(s, const_args[2], args[2]);
678 break;
679 case INDEX_op_deposit_i64: /* Optional (TCG_TARGET_HAS_deposit_i64). */
680 tcg_out_r(s, args[0]);
681 tcg_out_r(s, args[1]);
682 tcg_out_r(s, args[2]);
683 assert(args[3] <= UINT8_MAX);
684 tcg_out8(s, args[3]);
685 assert(args[4] <= UINT8_MAX);
686 tcg_out8(s, args[4]);
687 break;
688 case INDEX_op_div_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
689 case INDEX_op_divu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
690 case INDEX_op_rem_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
691 case INDEX_op_remu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
692 TODO();
693 break;
694 case INDEX_op_div2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
695 case INDEX_op_divu2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
696 TODO();
697 break;
698 case INDEX_op_brcond_i64:
699 tcg_out_r(s, args[0]);
700 tcg_out_ri64(s, const_args[1], args[1]);
701 tcg_out8(s, args[2]); /* condition */
702 tci_out_label(s, args[3]);
703 break;
704 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
705 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
706 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
707 case INDEX_op_not_i64: /* Optional (TCG_TARGET_HAS_not_i64). */
708 case INDEX_op_neg_i64: /* Optional (TCG_TARGET_HAS_neg_i64). */
709 case INDEX_op_ext8s_i64: /* Optional (TCG_TARGET_HAS_ext8s_i64). */
710 case INDEX_op_ext8u_i64: /* Optional (TCG_TARGET_HAS_ext8u_i64). */
711 case INDEX_op_ext16s_i64: /* Optional (TCG_TARGET_HAS_ext16s_i64). */
712 case INDEX_op_ext16u_i64: /* Optional (TCG_TARGET_HAS_ext16u_i64). */
713 case INDEX_op_ext32s_i64: /* Optional (TCG_TARGET_HAS_ext32s_i64). */
714 case INDEX_op_ext32u_i64: /* Optional (TCG_TARGET_HAS_ext32u_i64). */
715 #endif /* TCG_TARGET_REG_BITS == 64 */
716 case INDEX_op_neg_i32: /* Optional (TCG_TARGET_HAS_neg_i32). */
717 case INDEX_op_not_i32: /* Optional (TCG_TARGET_HAS_not_i32). */
718 case INDEX_op_ext8s_i32: /* Optional (TCG_TARGET_HAS_ext8s_i32). */
719 case INDEX_op_ext16s_i32: /* Optional (TCG_TARGET_HAS_ext16s_i32). */
720 case INDEX_op_ext8u_i32: /* Optional (TCG_TARGET_HAS_ext8u_i32). */
721 case INDEX_op_ext16u_i32: /* Optional (TCG_TARGET_HAS_ext16u_i32). */
722 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
723 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
724 tcg_out_r(s, args[0]);
725 tcg_out_r(s, args[1]);
726 break;
727 case INDEX_op_div_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
728 case INDEX_op_divu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
729 case INDEX_op_rem_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
730 case INDEX_op_remu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
731 tcg_out_r(s, args[0]);
732 tcg_out_ri32(s, const_args[1], args[1]);
733 tcg_out_ri32(s, const_args[2], args[2]);
734 break;
735 case INDEX_op_div2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
736 case INDEX_op_divu2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
737 TODO();
738 break;
739 #if TCG_TARGET_REG_BITS == 32
740 case INDEX_op_add2_i32:
741 case INDEX_op_sub2_i32:
742 tcg_out_r(s, args[0]);
743 tcg_out_r(s, args[1]);
744 tcg_out_r(s, args[2]);
745 tcg_out_r(s, args[3]);
746 tcg_out_r(s, args[4]);
747 tcg_out_r(s, args[5]);
748 break;
749 case INDEX_op_brcond2_i32:
750 tcg_out_r(s, args[0]);
751 tcg_out_r(s, args[1]);
752 tcg_out_ri32(s, const_args[2], args[2]);
753 tcg_out_ri32(s, const_args[3], args[3]);
754 tcg_out8(s, args[4]); /* condition */
755 tci_out_label(s, args[5]);
756 break;
757 case INDEX_op_mulu2_i32:
758 tcg_out_r(s, args[0]);
759 tcg_out_r(s, args[1]);
760 tcg_out_r(s, args[2]);
761 tcg_out_r(s, args[3]);
762 break;
763 #endif
764 case INDEX_op_brcond_i32:
765 tcg_out_r(s, args[0]);
766 tcg_out_ri32(s, const_args[1], args[1]);
767 tcg_out8(s, args[2]); /* condition */
768 tci_out_label(s, args[3]);
769 break;
770 case INDEX_op_qemu_ld8u:
771 case INDEX_op_qemu_ld8s:
772 case INDEX_op_qemu_ld16u:
773 case INDEX_op_qemu_ld16s:
774 case INDEX_op_qemu_ld32:
775 #if TCG_TARGET_REG_BITS == 64
776 case INDEX_op_qemu_ld32s:
777 case INDEX_op_qemu_ld32u:
778 #endif
779 tcg_out_r(s, *args++);
780 tcg_out_r(s, *args++);
781 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
782 tcg_out_r(s, *args++);
783 #endif
784 #ifdef CONFIG_SOFTMMU
785 tcg_out_i(s, *args);
786 #endif
787 break;
788 case INDEX_op_qemu_ld64:
789 tcg_out_r(s, *args++);
790 #if TCG_TARGET_REG_BITS == 32
791 tcg_out_r(s, *args++);
792 #endif
793 tcg_out_r(s, *args++);
794 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
795 tcg_out_r(s, *args++);
796 #endif
797 #ifdef CONFIG_SOFTMMU
798 tcg_out_i(s, *args);
799 #endif
800 break;
801 case INDEX_op_qemu_st8:
802 case INDEX_op_qemu_st16:
803 case INDEX_op_qemu_st32:
804 tcg_out_r(s, *args++);
805 tcg_out_r(s, *args++);
806 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
807 tcg_out_r(s, *args++);
808 #endif
809 #ifdef CONFIG_SOFTMMU
810 tcg_out_i(s, *args);
811 #endif
812 break;
813 case INDEX_op_qemu_st64:
814 tcg_out_r(s, *args++);
815 #if TCG_TARGET_REG_BITS == 32
816 tcg_out_r(s, *args++);
817 #endif
818 tcg_out_r(s, *args++);
819 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
820 tcg_out_r(s, *args++);
821 #endif
822 #ifdef CONFIG_SOFTMMU
823 tcg_out_i(s, *args);
824 #endif
825 break;
826 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
827 case INDEX_op_mov_i64:
828 case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
829 case INDEX_op_movi_i64:
830 case INDEX_op_call: /* Always emitted via tcg_out_call. */
831 default:
832 tcg_abort();
833 }
834 old_code_ptr[1] = s->code_ptr - old_code_ptr;
835 }
836
837 static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
838 intptr_t arg2)
839 {
840 uint8_t *old_code_ptr = s->code_ptr;
841 if (type == TCG_TYPE_I32) {
842 tcg_out_op_t(s, INDEX_op_st_i32);
843 tcg_out_r(s, arg);
844 tcg_out_r(s, arg1);
845 tcg_out32(s, arg2);
846 } else {
847 assert(type == TCG_TYPE_I64);
848 #if TCG_TARGET_REG_BITS == 64
849 tcg_out_op_t(s, INDEX_op_st_i64);
850 tcg_out_r(s, arg);
851 tcg_out_r(s, arg1);
852 tcg_out32(s, arg2);
853 #else
854 TODO();
855 #endif
856 }
857 old_code_ptr[1] = s->code_ptr - old_code_ptr;
858 }
859
860 /* Test if a constant matches the constraint. */
861 static int tcg_target_const_match(tcg_target_long val, TCGType type,
862 const TCGArgConstraint *arg_ct)
863 {
864 /* No need to return 0 or 1, 0 or != 0 is good enough. */
865 return arg_ct->ct & TCG_CT_CONST;
866 }
867
868 static void tcg_target_init(TCGContext *s)
869 {
870 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
871 const char *envval = getenv("DEBUG_TCG");
872 if (envval) {
873 qemu_set_log(strtol(envval, NULL, 0));
874 }
875 #endif
876
877 /* The current code uses uint8_t for tcg operations. */
878 assert(ARRAY_SIZE(tcg_op_defs) <= UINT8_MAX);
879
880 /* Registers available for 32 bit operations. */
881 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0,
882 BIT(TCG_TARGET_NB_REGS) - 1);
883 /* Registers available for 64 bit operations. */
884 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0,
885 BIT(TCG_TARGET_NB_REGS) - 1);
886 /* TODO: Which registers should be set here? */
887 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
888 BIT(TCG_TARGET_NB_REGS) - 1);
889
890 tcg_regset_clear(s->reserved_regs);
891 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
892 tcg_add_target_add_op_defs(tcg_target_op_defs);
893
894 /* We use negative offsets from "sp" so that we can distinguish
895 stores that might pretend to be call arguments. */
896 tcg_set_frame(s, TCG_REG_CALL_STACK,
897 -CPU_TEMP_BUF_NLONGS * sizeof(long),
898 CPU_TEMP_BUF_NLONGS * sizeof(long));
899 }
900
901 /* Generate global QEMU prologue and epilogue code. */
902 static inline void tcg_target_qemu_prologue(TCGContext *s)
903 {
904 }