]> git.proxmox.com Git - mirror_qemu.git/blob - tcg/tci.c
tcg/tci: Use ffi for calls
[mirror_qemu.git] / tcg / tci.c
1 /*
2 * Tiny Code Interpreter for QEMU
3 *
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
5 *
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 */
19
20 #include "qemu/osdep.h"
21 #include "qemu-common.h"
22 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
23 #include "exec/cpu_ldst.h"
24 #include "tcg/tcg-op.h"
25 #include "qemu/compiler.h"
26 #include <ffi.h>
27
28
29 /*
30 * Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
31 * Without assertions, the interpreter runs much faster.
32 */
33 #if defined(CONFIG_DEBUG_TCG)
34 # define tci_assert(cond) assert(cond)
35 #else
36 # define tci_assert(cond) ((void)(cond))
37 #endif
38
39 __thread uintptr_t tci_tb_ptr;
40
41 static void
42 tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
43 {
44 tci_assert(index < TCG_TARGET_NB_REGS);
45 tci_assert(index != TCG_AREG0);
46 tci_assert(index != TCG_REG_CALL_STACK);
47 regs[index] = value;
48 }
49
50 static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
51 uint32_t low_index, uint64_t value)
52 {
53 tci_write_reg(regs, low_index, value);
54 tci_write_reg(regs, high_index, value >> 32);
55 }
56
57 /* Create a 64 bit value from two 32 bit values. */
58 static uint64_t tci_uint64(uint32_t high, uint32_t low)
59 {
60 return ((uint64_t)high << 32) + low;
61 }
62
63 /* Read constant byte from bytecode. */
64 static uint8_t tci_read_b(const uint8_t **tb_ptr)
65 {
66 return *(tb_ptr[0]++);
67 }
68
69 /* Read register number from bytecode. */
70 static TCGReg tci_read_r(const uint8_t **tb_ptr)
71 {
72 uint8_t regno = tci_read_b(tb_ptr);
73 tci_assert(regno < TCG_TARGET_NB_REGS);
74 return regno;
75 }
76
77 /* Read constant (native size) from bytecode. */
78 static tcg_target_ulong tci_read_i(const uint8_t **tb_ptr)
79 {
80 tcg_target_ulong value = *(const tcg_target_ulong *)(*tb_ptr);
81 *tb_ptr += sizeof(value);
82 return value;
83 }
84
85 /* Read unsigned constant (32 bit) from bytecode. */
86 static uint32_t tci_read_i32(const uint8_t **tb_ptr)
87 {
88 uint32_t value = *(const uint32_t *)(*tb_ptr);
89 *tb_ptr += sizeof(value);
90 return value;
91 }
92
93 /* Read signed constant (32 bit) from bytecode. */
94 static int32_t tci_read_s32(const uint8_t **tb_ptr)
95 {
96 int32_t value = *(const int32_t *)(*tb_ptr);
97 *tb_ptr += sizeof(value);
98 return value;
99 }
100
101 static tcg_target_ulong tci_read_label(const uint8_t **tb_ptr)
102 {
103 return tci_read_i(tb_ptr);
104 }
105
106 /*
107 * Load sets of arguments all at once. The naming convention is:
108 * tci_args_<arguments>
109 * where arguments is a sequence of
110 *
111 * b = immediate (bit position)
112 * c = condition (TCGCond)
113 * i = immediate (uint32_t)
114 * I = immediate (tcg_target_ulong)
115 * l = label or pointer
116 * m = immediate (TCGMemOpIdx)
117 * n = immediate (call return length)
118 * r = register
119 * s = signed ldst offset
120 */
121
122 static void check_size(const uint8_t *start, const uint8_t **tb_ptr)
123 {
124 const uint8_t *old_code_ptr = start - 2;
125 uint8_t op_size = old_code_ptr[1];
126 tci_assert(*tb_ptr == old_code_ptr + op_size);
127 }
128
129 static void tci_args_l(const uint8_t **tb_ptr, void **l0)
130 {
131 const uint8_t *start = *tb_ptr;
132
133 *l0 = (void *)tci_read_label(tb_ptr);
134
135 check_size(start, tb_ptr);
136 }
137
138 static void tci_args_nll(const uint8_t **tb_ptr, uint8_t *n0,
139 void **l1, void **l2)
140 {
141 const uint8_t *start = *tb_ptr;
142
143 *n0 = tci_read_b(tb_ptr);
144 *l1 = (void *)tci_read_label(tb_ptr);
145 *l2 = (void *)tci_read_label(tb_ptr);
146
147 check_size(start, tb_ptr);
148 }
149
150 static void tci_args_rr(const uint8_t **tb_ptr,
151 TCGReg *r0, TCGReg *r1)
152 {
153 const uint8_t *start = *tb_ptr;
154
155 *r0 = tci_read_r(tb_ptr);
156 *r1 = tci_read_r(tb_ptr);
157
158 check_size(start, tb_ptr);
159 }
160
161 static void tci_args_ri(const uint8_t **tb_ptr,
162 TCGReg *r0, tcg_target_ulong *i1)
163 {
164 const uint8_t *start = *tb_ptr;
165
166 *r0 = tci_read_r(tb_ptr);
167 *i1 = tci_read_i32(tb_ptr);
168
169 check_size(start, tb_ptr);
170 }
171
172 #if TCG_TARGET_REG_BITS == 64
173 static void tci_args_rI(const uint8_t **tb_ptr,
174 TCGReg *r0, tcg_target_ulong *i1)
175 {
176 const uint8_t *start = *tb_ptr;
177
178 *r0 = tci_read_r(tb_ptr);
179 *i1 = tci_read_i(tb_ptr);
180
181 check_size(start, tb_ptr);
182 }
183 #endif
184
185 static void tci_args_rrm(const uint8_t **tb_ptr,
186 TCGReg *r0, TCGReg *r1, TCGMemOpIdx *m2)
187 {
188 const uint8_t *start = *tb_ptr;
189
190 *r0 = tci_read_r(tb_ptr);
191 *r1 = tci_read_r(tb_ptr);
192 *m2 = tci_read_i32(tb_ptr);
193
194 check_size(start, tb_ptr);
195 }
196
197 static void tci_args_rrr(const uint8_t **tb_ptr,
198 TCGReg *r0, TCGReg *r1, TCGReg *r2)
199 {
200 const uint8_t *start = *tb_ptr;
201
202 *r0 = tci_read_r(tb_ptr);
203 *r1 = tci_read_r(tb_ptr);
204 *r2 = tci_read_r(tb_ptr);
205
206 check_size(start, tb_ptr);
207 }
208
209 static void tci_args_rrs(const uint8_t **tb_ptr,
210 TCGReg *r0, TCGReg *r1, int32_t *i2)
211 {
212 const uint8_t *start = *tb_ptr;
213
214 *r0 = tci_read_r(tb_ptr);
215 *r1 = tci_read_r(tb_ptr);
216 *i2 = tci_read_s32(tb_ptr);
217
218 check_size(start, tb_ptr);
219 }
220
221 static void tci_args_rrcl(const uint8_t **tb_ptr,
222 TCGReg *r0, TCGReg *r1, TCGCond *c2, void **l3)
223 {
224 const uint8_t *start = *tb_ptr;
225
226 *r0 = tci_read_r(tb_ptr);
227 *r1 = tci_read_r(tb_ptr);
228 *c2 = tci_read_b(tb_ptr);
229 *l3 = (void *)tci_read_label(tb_ptr);
230
231 check_size(start, tb_ptr);
232 }
233
234 static void tci_args_rrrc(const uint8_t **tb_ptr,
235 TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGCond *c3)
236 {
237 const uint8_t *start = *tb_ptr;
238
239 *r0 = tci_read_r(tb_ptr);
240 *r1 = tci_read_r(tb_ptr);
241 *r2 = tci_read_r(tb_ptr);
242 *c3 = tci_read_b(tb_ptr);
243
244 check_size(start, tb_ptr);
245 }
246
247 static void tci_args_rrrm(const uint8_t **tb_ptr,
248 TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGMemOpIdx *m3)
249 {
250 const uint8_t *start = *tb_ptr;
251
252 *r0 = tci_read_r(tb_ptr);
253 *r1 = tci_read_r(tb_ptr);
254 *r2 = tci_read_r(tb_ptr);
255 *m3 = tci_read_i32(tb_ptr);
256
257 check_size(start, tb_ptr);
258 }
259
260 static void tci_args_rrrbb(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
261 TCGReg *r2, uint8_t *i3, uint8_t *i4)
262 {
263 const uint8_t *start = *tb_ptr;
264
265 *r0 = tci_read_r(tb_ptr);
266 *r1 = tci_read_r(tb_ptr);
267 *r2 = tci_read_r(tb_ptr);
268 *i3 = tci_read_b(tb_ptr);
269 *i4 = tci_read_b(tb_ptr);
270
271 check_size(start, tb_ptr);
272 }
273
274 static void tci_args_rrrrm(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
275 TCGReg *r2, TCGReg *r3, TCGMemOpIdx *m4)
276 {
277 const uint8_t *start = *tb_ptr;
278
279 *r0 = tci_read_r(tb_ptr);
280 *r1 = tci_read_r(tb_ptr);
281 *r2 = tci_read_r(tb_ptr);
282 *r3 = tci_read_r(tb_ptr);
283 *m4 = tci_read_i32(tb_ptr);
284
285 check_size(start, tb_ptr);
286 }
287
288 #if TCG_TARGET_REG_BITS == 32
289 static void tci_args_rrrr(const uint8_t **tb_ptr,
290 TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGReg *r3)
291 {
292 const uint8_t *start = *tb_ptr;
293
294 *r0 = tci_read_r(tb_ptr);
295 *r1 = tci_read_r(tb_ptr);
296 *r2 = tci_read_r(tb_ptr);
297 *r3 = tci_read_r(tb_ptr);
298
299 check_size(start, tb_ptr);
300 }
301
302 static void tci_args_rrrrcl(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
303 TCGReg *r2, TCGReg *r3, TCGCond *c4, void **l5)
304 {
305 const uint8_t *start = *tb_ptr;
306
307 *r0 = tci_read_r(tb_ptr);
308 *r1 = tci_read_r(tb_ptr);
309 *r2 = tci_read_r(tb_ptr);
310 *r3 = tci_read_r(tb_ptr);
311 *c4 = tci_read_b(tb_ptr);
312 *l5 = (void *)tci_read_label(tb_ptr);
313
314 check_size(start, tb_ptr);
315 }
316
317 static void tci_args_rrrrrc(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
318 TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGCond *c5)
319 {
320 const uint8_t *start = *tb_ptr;
321
322 *r0 = tci_read_r(tb_ptr);
323 *r1 = tci_read_r(tb_ptr);
324 *r2 = tci_read_r(tb_ptr);
325 *r3 = tci_read_r(tb_ptr);
326 *r4 = tci_read_r(tb_ptr);
327 *c5 = tci_read_b(tb_ptr);
328
329 check_size(start, tb_ptr);
330 }
331
332 static void tci_args_rrrrrr(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
333 TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGReg *r5)
334 {
335 const uint8_t *start = *tb_ptr;
336
337 *r0 = tci_read_r(tb_ptr);
338 *r1 = tci_read_r(tb_ptr);
339 *r2 = tci_read_r(tb_ptr);
340 *r3 = tci_read_r(tb_ptr);
341 *r4 = tci_read_r(tb_ptr);
342 *r5 = tci_read_r(tb_ptr);
343
344 check_size(start, tb_ptr);
345 }
346 #endif
347
348 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
349 {
350 bool result = false;
351 int32_t i0 = u0;
352 int32_t i1 = u1;
353 switch (condition) {
354 case TCG_COND_EQ:
355 result = (u0 == u1);
356 break;
357 case TCG_COND_NE:
358 result = (u0 != u1);
359 break;
360 case TCG_COND_LT:
361 result = (i0 < i1);
362 break;
363 case TCG_COND_GE:
364 result = (i0 >= i1);
365 break;
366 case TCG_COND_LE:
367 result = (i0 <= i1);
368 break;
369 case TCG_COND_GT:
370 result = (i0 > i1);
371 break;
372 case TCG_COND_LTU:
373 result = (u0 < u1);
374 break;
375 case TCG_COND_GEU:
376 result = (u0 >= u1);
377 break;
378 case TCG_COND_LEU:
379 result = (u0 <= u1);
380 break;
381 case TCG_COND_GTU:
382 result = (u0 > u1);
383 break;
384 default:
385 g_assert_not_reached();
386 }
387 return result;
388 }
389
390 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
391 {
392 bool result = false;
393 int64_t i0 = u0;
394 int64_t i1 = u1;
395 switch (condition) {
396 case TCG_COND_EQ:
397 result = (u0 == u1);
398 break;
399 case TCG_COND_NE:
400 result = (u0 != u1);
401 break;
402 case TCG_COND_LT:
403 result = (i0 < i1);
404 break;
405 case TCG_COND_GE:
406 result = (i0 >= i1);
407 break;
408 case TCG_COND_LE:
409 result = (i0 <= i1);
410 break;
411 case TCG_COND_GT:
412 result = (i0 > i1);
413 break;
414 case TCG_COND_LTU:
415 result = (u0 < u1);
416 break;
417 case TCG_COND_GEU:
418 result = (u0 >= u1);
419 break;
420 case TCG_COND_LEU:
421 result = (u0 <= u1);
422 break;
423 case TCG_COND_GTU:
424 result = (u0 > u1);
425 break;
426 default:
427 g_assert_not_reached();
428 }
429 return result;
430 }
431
432 #define qemu_ld_ub \
433 cpu_ldub_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
434 #define qemu_ld_leuw \
435 cpu_lduw_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
436 #define qemu_ld_leul \
437 cpu_ldl_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
438 #define qemu_ld_leq \
439 cpu_ldq_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
440 #define qemu_ld_beuw \
441 cpu_lduw_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
442 #define qemu_ld_beul \
443 cpu_ldl_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
444 #define qemu_ld_beq \
445 cpu_ldq_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
446 #define qemu_st_b(X) \
447 cpu_stb_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
448 #define qemu_st_lew(X) \
449 cpu_stw_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
450 #define qemu_st_lel(X) \
451 cpu_stl_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
452 #define qemu_st_leq(X) \
453 cpu_stq_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
454 #define qemu_st_bew(X) \
455 cpu_stw_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
456 #define qemu_st_bel(X) \
457 cpu_stl_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
458 #define qemu_st_beq(X) \
459 cpu_stq_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
460
461 #if TCG_TARGET_REG_BITS == 64
462 # define CASE_32_64(x) \
463 case glue(glue(INDEX_op_, x), _i64): \
464 case glue(glue(INDEX_op_, x), _i32):
465 # define CASE_64(x) \
466 case glue(glue(INDEX_op_, x), _i64):
467 #else
468 # define CASE_32_64(x) \
469 case glue(glue(INDEX_op_, x), _i32):
470 # define CASE_64(x)
471 #endif
472
473 /* Interpret pseudo code in tb. */
474 /*
475 * Disable CFI checks.
476 * One possible operation in the pseudo code is a call to binary code.
477 * Therefore, disable CFI checks in the interpreter function
478 */
479 uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
480 const void *v_tb_ptr)
481 {
482 const uint8_t *tb_ptr = v_tb_ptr;
483 tcg_target_ulong regs[TCG_TARGET_NB_REGS];
484 uint64_t stack[(TCG_STATIC_CALL_ARGS_SIZE + TCG_STATIC_FRAME_SIZE)
485 / sizeof(uint64_t)];
486 void *call_slots[TCG_STATIC_CALL_ARGS_SIZE / sizeof(uint64_t)];
487
488 regs[TCG_AREG0] = (tcg_target_ulong)env;
489 regs[TCG_REG_CALL_STACK] = (uintptr_t)stack;
490 /* Other call_slots entries initialized at first use (see below). */
491 call_slots[0] = NULL;
492 tci_assert(tb_ptr);
493
494 for (;;) {
495 TCGOpcode opc = tb_ptr[0];
496 TCGReg r0, r1, r2, r3;
497 tcg_target_ulong t1;
498 TCGCond condition;
499 target_ulong taddr;
500 uint8_t pos, len;
501 uint32_t tmp32;
502 uint64_t tmp64;
503 #if TCG_TARGET_REG_BITS == 32
504 TCGReg r4, r5;
505 uint64_t T1, T2;
506 #endif
507 TCGMemOpIdx oi;
508 int32_t ofs;
509 void *ptr, *cif;
510
511 /* Skip opcode and size entry. */
512 tb_ptr += 2;
513
514 switch (opc) {
515 case INDEX_op_call:
516 /*
517 * Set up the ffi_avalue array once, delayed until now
518 * because many TB's do not make any calls. In tcg_gen_callN,
519 * we arranged for every real argument to be "left-aligned"
520 * in each 64-bit slot.
521 */
522 if (unlikely(call_slots[0] == NULL)) {
523 for (int i = 0; i < ARRAY_SIZE(call_slots); ++i) {
524 call_slots[i] = &stack[i];
525 }
526 }
527
528 tci_args_nll(&tb_ptr, &len, &ptr, &cif);
529
530 /* Helper functions may need to access the "return address" */
531 tci_tb_ptr = (uintptr_t)tb_ptr;
532
533 ffi_call(cif, ptr, stack, call_slots);
534
535 /* Any result winds up "left-aligned" in the stack[0] slot. */
536 switch (len) {
537 case 0: /* void */
538 break;
539 case 1: /* uint32_t */
540 /*
541 * Note that libffi has an odd special case in that it will
542 * always widen an integral result to ffi_arg.
543 */
544 if (sizeof(ffi_arg) == 4) {
545 regs[TCG_REG_R0] = *(uint32_t *)stack;
546 break;
547 }
548 /* fall through */
549 case 2: /* uint64_t */
550 if (TCG_TARGET_REG_BITS == 32) {
551 tci_write_reg64(regs, TCG_REG_R1, TCG_REG_R0, stack[0]);
552 } else {
553 regs[TCG_REG_R0] = stack[0];
554 }
555 break;
556 default:
557 g_assert_not_reached();
558 }
559 break;
560
561 case INDEX_op_br:
562 tci_args_l(&tb_ptr, &ptr);
563 tb_ptr = ptr;
564 continue;
565 case INDEX_op_setcond_i32:
566 tci_args_rrrc(&tb_ptr, &r0, &r1, &r2, &condition);
567 regs[r0] = tci_compare32(regs[r1], regs[r2], condition);
568 break;
569 #if TCG_TARGET_REG_BITS == 32
570 case INDEX_op_setcond2_i32:
571 tci_args_rrrrrc(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &condition);
572 T1 = tci_uint64(regs[r2], regs[r1]);
573 T2 = tci_uint64(regs[r4], regs[r3]);
574 regs[r0] = tci_compare64(T1, T2, condition);
575 break;
576 #elif TCG_TARGET_REG_BITS == 64
577 case INDEX_op_setcond_i64:
578 tci_args_rrrc(&tb_ptr, &r0, &r1, &r2, &condition);
579 regs[r0] = tci_compare64(regs[r1], regs[r2], condition);
580 break;
581 #endif
582 CASE_32_64(mov)
583 tci_args_rr(&tb_ptr, &r0, &r1);
584 regs[r0] = regs[r1];
585 break;
586 case INDEX_op_tci_movi_i32:
587 tci_args_ri(&tb_ptr, &r0, &t1);
588 regs[r0] = t1;
589 break;
590
591 /* Load/store operations (32 bit). */
592
593 CASE_32_64(ld8u)
594 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
595 ptr = (void *)(regs[r1] + ofs);
596 regs[r0] = *(uint8_t *)ptr;
597 break;
598 CASE_32_64(ld8s)
599 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
600 ptr = (void *)(regs[r1] + ofs);
601 regs[r0] = *(int8_t *)ptr;
602 break;
603 CASE_32_64(ld16u)
604 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
605 ptr = (void *)(regs[r1] + ofs);
606 regs[r0] = *(uint16_t *)ptr;
607 break;
608 CASE_32_64(ld16s)
609 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
610 ptr = (void *)(regs[r1] + ofs);
611 regs[r0] = *(int16_t *)ptr;
612 break;
613 case INDEX_op_ld_i32:
614 CASE_64(ld32u)
615 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
616 ptr = (void *)(regs[r1] + ofs);
617 regs[r0] = *(uint32_t *)ptr;
618 break;
619 CASE_32_64(st8)
620 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
621 ptr = (void *)(regs[r1] + ofs);
622 *(uint8_t *)ptr = regs[r0];
623 break;
624 CASE_32_64(st16)
625 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
626 ptr = (void *)(regs[r1] + ofs);
627 *(uint16_t *)ptr = regs[r0];
628 break;
629 case INDEX_op_st_i32:
630 CASE_64(st32)
631 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
632 ptr = (void *)(regs[r1] + ofs);
633 *(uint32_t *)ptr = regs[r0];
634 break;
635
636 /* Arithmetic operations (mixed 32/64 bit). */
637
638 CASE_32_64(add)
639 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
640 regs[r0] = regs[r1] + regs[r2];
641 break;
642 CASE_32_64(sub)
643 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
644 regs[r0] = regs[r1] - regs[r2];
645 break;
646 CASE_32_64(mul)
647 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
648 regs[r0] = regs[r1] * regs[r2];
649 break;
650 CASE_32_64(and)
651 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
652 regs[r0] = regs[r1] & regs[r2];
653 break;
654 CASE_32_64(or)
655 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
656 regs[r0] = regs[r1] | regs[r2];
657 break;
658 CASE_32_64(xor)
659 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
660 regs[r0] = regs[r1] ^ regs[r2];
661 break;
662
663 /* Arithmetic operations (32 bit). */
664
665 case INDEX_op_div_i32:
666 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
667 regs[r0] = (int32_t)regs[r1] / (int32_t)regs[r2];
668 break;
669 case INDEX_op_divu_i32:
670 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
671 regs[r0] = (uint32_t)regs[r1] / (uint32_t)regs[r2];
672 break;
673 case INDEX_op_rem_i32:
674 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
675 regs[r0] = (int32_t)regs[r1] % (int32_t)regs[r2];
676 break;
677 case INDEX_op_remu_i32:
678 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
679 regs[r0] = (uint32_t)regs[r1] % (uint32_t)regs[r2];
680 break;
681
682 /* Shift/rotate operations (32 bit). */
683
684 case INDEX_op_shl_i32:
685 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
686 regs[r0] = (uint32_t)regs[r1] << (regs[r2] & 31);
687 break;
688 case INDEX_op_shr_i32:
689 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
690 regs[r0] = (uint32_t)regs[r1] >> (regs[r2] & 31);
691 break;
692 case INDEX_op_sar_i32:
693 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
694 regs[r0] = (int32_t)regs[r1] >> (regs[r2] & 31);
695 break;
696 #if TCG_TARGET_HAS_rot_i32
697 case INDEX_op_rotl_i32:
698 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
699 regs[r0] = rol32(regs[r1], regs[r2] & 31);
700 break;
701 case INDEX_op_rotr_i32:
702 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
703 regs[r0] = ror32(regs[r1], regs[r2] & 31);
704 break;
705 #endif
706 #if TCG_TARGET_HAS_deposit_i32
707 case INDEX_op_deposit_i32:
708 tci_args_rrrbb(&tb_ptr, &r0, &r1, &r2, &pos, &len);
709 regs[r0] = deposit32(regs[r1], pos, len, regs[r2]);
710 break;
711 #endif
712 case INDEX_op_brcond_i32:
713 tci_args_rrcl(&tb_ptr, &r0, &r1, &condition, &ptr);
714 if (tci_compare32(regs[r0], regs[r1], condition)) {
715 tb_ptr = ptr;
716 }
717 break;
718 #if TCG_TARGET_REG_BITS == 32
719 case INDEX_op_add2_i32:
720 tci_args_rrrrrr(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &r5);
721 T1 = tci_uint64(regs[r3], regs[r2]);
722 T2 = tci_uint64(regs[r5], regs[r4]);
723 tci_write_reg64(regs, r1, r0, T1 + T2);
724 break;
725 case INDEX_op_sub2_i32:
726 tci_args_rrrrrr(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &r5);
727 T1 = tci_uint64(regs[r3], regs[r2]);
728 T2 = tci_uint64(regs[r5], regs[r4]);
729 tci_write_reg64(regs, r1, r0, T1 - T2);
730 break;
731 case INDEX_op_brcond2_i32:
732 tci_args_rrrrcl(&tb_ptr, &r0, &r1, &r2, &r3, &condition, &ptr);
733 T1 = tci_uint64(regs[r1], regs[r0]);
734 T2 = tci_uint64(regs[r3], regs[r2]);
735 if (tci_compare64(T1, T2, condition)) {
736 tb_ptr = ptr;
737 continue;
738 }
739 break;
740 case INDEX_op_mulu2_i32:
741 tci_args_rrrr(&tb_ptr, &r0, &r1, &r2, &r3);
742 tci_write_reg64(regs, r1, r0, (uint64_t)regs[r2] * regs[r3]);
743 break;
744 #endif /* TCG_TARGET_REG_BITS == 32 */
745 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
746 CASE_32_64(ext8s)
747 tci_args_rr(&tb_ptr, &r0, &r1);
748 regs[r0] = (int8_t)regs[r1];
749 break;
750 #endif
751 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
752 CASE_32_64(ext16s)
753 tci_args_rr(&tb_ptr, &r0, &r1);
754 regs[r0] = (int16_t)regs[r1];
755 break;
756 #endif
757 #if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
758 CASE_32_64(ext8u)
759 tci_args_rr(&tb_ptr, &r0, &r1);
760 regs[r0] = (uint8_t)regs[r1];
761 break;
762 #endif
763 #if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
764 CASE_32_64(ext16u)
765 tci_args_rr(&tb_ptr, &r0, &r1);
766 regs[r0] = (uint16_t)regs[r1];
767 break;
768 #endif
769 #if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
770 CASE_32_64(bswap16)
771 tci_args_rr(&tb_ptr, &r0, &r1);
772 regs[r0] = bswap16(regs[r1]);
773 break;
774 #endif
775 #if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
776 CASE_32_64(bswap32)
777 tci_args_rr(&tb_ptr, &r0, &r1);
778 regs[r0] = bswap32(regs[r1]);
779 break;
780 #endif
781 #if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
782 CASE_32_64(not)
783 tci_args_rr(&tb_ptr, &r0, &r1);
784 regs[r0] = ~regs[r1];
785 break;
786 #endif
787 #if TCG_TARGET_HAS_neg_i32 || TCG_TARGET_HAS_neg_i64
788 CASE_32_64(neg)
789 tci_args_rr(&tb_ptr, &r0, &r1);
790 regs[r0] = -regs[r1];
791 break;
792 #endif
793 #if TCG_TARGET_REG_BITS == 64
794 case INDEX_op_tci_movi_i64:
795 tci_args_rI(&tb_ptr, &r0, &t1);
796 regs[r0] = t1;
797 break;
798
799 /* Load/store operations (64 bit). */
800
801 case INDEX_op_ld32s_i64:
802 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
803 ptr = (void *)(regs[r1] + ofs);
804 regs[r0] = *(int32_t *)ptr;
805 break;
806 case INDEX_op_ld_i64:
807 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
808 ptr = (void *)(regs[r1] + ofs);
809 regs[r0] = *(uint64_t *)ptr;
810 break;
811 case INDEX_op_st_i64:
812 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
813 ptr = (void *)(regs[r1] + ofs);
814 *(uint64_t *)ptr = regs[r0];
815 break;
816
817 /* Arithmetic operations (64 bit). */
818
819 case INDEX_op_div_i64:
820 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
821 regs[r0] = (int64_t)regs[r1] / (int64_t)regs[r2];
822 break;
823 case INDEX_op_divu_i64:
824 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
825 regs[r0] = (uint64_t)regs[r1] / (uint64_t)regs[r2];
826 break;
827 case INDEX_op_rem_i64:
828 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
829 regs[r0] = (int64_t)regs[r1] % (int64_t)regs[r2];
830 break;
831 case INDEX_op_remu_i64:
832 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
833 regs[r0] = (uint64_t)regs[r1] % (uint64_t)regs[r2];
834 break;
835
836 /* Shift/rotate operations (64 bit). */
837
838 case INDEX_op_shl_i64:
839 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
840 regs[r0] = regs[r1] << (regs[r2] & 63);
841 break;
842 case INDEX_op_shr_i64:
843 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
844 regs[r0] = regs[r1] >> (regs[r2] & 63);
845 break;
846 case INDEX_op_sar_i64:
847 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
848 regs[r0] = (int64_t)regs[r1] >> (regs[r2] & 63);
849 break;
850 #if TCG_TARGET_HAS_rot_i64
851 case INDEX_op_rotl_i64:
852 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
853 regs[r0] = rol64(regs[r1], regs[r2] & 63);
854 break;
855 case INDEX_op_rotr_i64:
856 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
857 regs[r0] = ror64(regs[r1], regs[r2] & 63);
858 break;
859 #endif
860 #if TCG_TARGET_HAS_deposit_i64
861 case INDEX_op_deposit_i64:
862 tci_args_rrrbb(&tb_ptr, &r0, &r1, &r2, &pos, &len);
863 regs[r0] = deposit64(regs[r1], pos, len, regs[r2]);
864 break;
865 #endif
866 case INDEX_op_brcond_i64:
867 tci_args_rrcl(&tb_ptr, &r0, &r1, &condition, &ptr);
868 if (tci_compare64(regs[r0], regs[r1], condition)) {
869 tb_ptr = ptr;
870 }
871 break;
872 case INDEX_op_ext32s_i64:
873 case INDEX_op_ext_i32_i64:
874 tci_args_rr(&tb_ptr, &r0, &r1);
875 regs[r0] = (int32_t)regs[r1];
876 break;
877 case INDEX_op_ext32u_i64:
878 case INDEX_op_extu_i32_i64:
879 tci_args_rr(&tb_ptr, &r0, &r1);
880 regs[r0] = (uint32_t)regs[r1];
881 break;
882 #if TCG_TARGET_HAS_bswap64_i64
883 case INDEX_op_bswap64_i64:
884 tci_args_rr(&tb_ptr, &r0, &r1);
885 regs[r0] = bswap64(regs[r1]);
886 break;
887 #endif
888 #endif /* TCG_TARGET_REG_BITS == 64 */
889
890 /* QEMU specific operations. */
891
892 case INDEX_op_exit_tb:
893 tci_args_l(&tb_ptr, &ptr);
894 return (uintptr_t)ptr;
895
896 case INDEX_op_goto_tb:
897 tci_args_l(&tb_ptr, &ptr);
898 tb_ptr = *(void **)ptr;
899 break;
900
901 case INDEX_op_qemu_ld_i32:
902 if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
903 tci_args_rrm(&tb_ptr, &r0, &r1, &oi);
904 taddr = regs[r1];
905 } else {
906 tci_args_rrrm(&tb_ptr, &r0, &r1, &r2, &oi);
907 taddr = tci_uint64(regs[r2], regs[r1]);
908 }
909 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
910 case MO_UB:
911 tmp32 = qemu_ld_ub;
912 break;
913 case MO_SB:
914 tmp32 = (int8_t)qemu_ld_ub;
915 break;
916 case MO_LEUW:
917 tmp32 = qemu_ld_leuw;
918 break;
919 case MO_LESW:
920 tmp32 = (int16_t)qemu_ld_leuw;
921 break;
922 case MO_LEUL:
923 tmp32 = qemu_ld_leul;
924 break;
925 case MO_BEUW:
926 tmp32 = qemu_ld_beuw;
927 break;
928 case MO_BESW:
929 tmp32 = (int16_t)qemu_ld_beuw;
930 break;
931 case MO_BEUL:
932 tmp32 = qemu_ld_beul;
933 break;
934 default:
935 g_assert_not_reached();
936 }
937 regs[r0] = tmp32;
938 break;
939
940 case INDEX_op_qemu_ld_i64:
941 if (TCG_TARGET_REG_BITS == 64) {
942 tci_args_rrm(&tb_ptr, &r0, &r1, &oi);
943 taddr = regs[r1];
944 } else if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
945 tci_args_rrrm(&tb_ptr, &r0, &r1, &r2, &oi);
946 taddr = regs[r2];
947 } else {
948 tci_args_rrrrm(&tb_ptr, &r0, &r1, &r2, &r3, &oi);
949 taddr = tci_uint64(regs[r3], regs[r2]);
950 }
951 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
952 case MO_UB:
953 tmp64 = qemu_ld_ub;
954 break;
955 case MO_SB:
956 tmp64 = (int8_t)qemu_ld_ub;
957 break;
958 case MO_LEUW:
959 tmp64 = qemu_ld_leuw;
960 break;
961 case MO_LESW:
962 tmp64 = (int16_t)qemu_ld_leuw;
963 break;
964 case MO_LEUL:
965 tmp64 = qemu_ld_leul;
966 break;
967 case MO_LESL:
968 tmp64 = (int32_t)qemu_ld_leul;
969 break;
970 case MO_LEQ:
971 tmp64 = qemu_ld_leq;
972 break;
973 case MO_BEUW:
974 tmp64 = qemu_ld_beuw;
975 break;
976 case MO_BESW:
977 tmp64 = (int16_t)qemu_ld_beuw;
978 break;
979 case MO_BEUL:
980 tmp64 = qemu_ld_beul;
981 break;
982 case MO_BESL:
983 tmp64 = (int32_t)qemu_ld_beul;
984 break;
985 case MO_BEQ:
986 tmp64 = qemu_ld_beq;
987 break;
988 default:
989 g_assert_not_reached();
990 }
991 if (TCG_TARGET_REG_BITS == 32) {
992 tci_write_reg64(regs, r1, r0, tmp64);
993 } else {
994 regs[r0] = tmp64;
995 }
996 break;
997
998 case INDEX_op_qemu_st_i32:
999 if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
1000 tci_args_rrm(&tb_ptr, &r0, &r1, &oi);
1001 taddr = regs[r1];
1002 } else {
1003 tci_args_rrrm(&tb_ptr, &r0, &r1, &r2, &oi);
1004 taddr = tci_uint64(regs[r2], regs[r1]);
1005 }
1006 tmp32 = regs[r0];
1007 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1008 case MO_UB:
1009 qemu_st_b(tmp32);
1010 break;
1011 case MO_LEUW:
1012 qemu_st_lew(tmp32);
1013 break;
1014 case MO_LEUL:
1015 qemu_st_lel(tmp32);
1016 break;
1017 case MO_BEUW:
1018 qemu_st_bew(tmp32);
1019 break;
1020 case MO_BEUL:
1021 qemu_st_bel(tmp32);
1022 break;
1023 default:
1024 g_assert_not_reached();
1025 }
1026 break;
1027
1028 case INDEX_op_qemu_st_i64:
1029 if (TCG_TARGET_REG_BITS == 64) {
1030 tci_args_rrm(&tb_ptr, &r0, &r1, &oi);
1031 taddr = regs[r1];
1032 tmp64 = regs[r0];
1033 } else {
1034 if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
1035 tci_args_rrrm(&tb_ptr, &r0, &r1, &r2, &oi);
1036 taddr = regs[r2];
1037 } else {
1038 tci_args_rrrrm(&tb_ptr, &r0, &r1, &r2, &r3, &oi);
1039 taddr = tci_uint64(regs[r3], regs[r2]);
1040 }
1041 tmp64 = tci_uint64(regs[r1], regs[r0]);
1042 }
1043 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1044 case MO_UB:
1045 qemu_st_b(tmp64);
1046 break;
1047 case MO_LEUW:
1048 qemu_st_lew(tmp64);
1049 break;
1050 case MO_LEUL:
1051 qemu_st_lel(tmp64);
1052 break;
1053 case MO_LEQ:
1054 qemu_st_leq(tmp64);
1055 break;
1056 case MO_BEUW:
1057 qemu_st_bew(tmp64);
1058 break;
1059 case MO_BEUL:
1060 qemu_st_bel(tmp64);
1061 break;
1062 case MO_BEQ:
1063 qemu_st_beq(tmp64);
1064 break;
1065 default:
1066 g_assert_not_reached();
1067 }
1068 break;
1069
1070 case INDEX_op_mb:
1071 /* Ensure ordering for all kinds */
1072 smp_mb();
1073 break;
1074 default:
1075 g_assert_not_reached();
1076 }
1077 }
1078 }
1079
1080 /*
1081 * Disassembler that matches the interpreter
1082 */
1083
1084 static const char *str_r(TCGReg r)
1085 {
1086 static const char regs[TCG_TARGET_NB_REGS][4] = {
1087 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
1088 "r8", "r9", "r10", "r11", "r12", "r13", "env", "sp"
1089 };
1090
1091 QEMU_BUILD_BUG_ON(TCG_AREG0 != TCG_REG_R14);
1092 QEMU_BUILD_BUG_ON(TCG_REG_CALL_STACK != TCG_REG_R15);
1093
1094 assert((unsigned)r < TCG_TARGET_NB_REGS);
1095 return regs[r];
1096 }
1097
1098 static const char *str_c(TCGCond c)
1099 {
1100 static const char cond[16][8] = {
1101 [TCG_COND_NEVER] = "never",
1102 [TCG_COND_ALWAYS] = "always",
1103 [TCG_COND_EQ] = "eq",
1104 [TCG_COND_NE] = "ne",
1105 [TCG_COND_LT] = "lt",
1106 [TCG_COND_GE] = "ge",
1107 [TCG_COND_LE] = "le",
1108 [TCG_COND_GT] = "gt",
1109 [TCG_COND_LTU] = "ltu",
1110 [TCG_COND_GEU] = "geu",
1111 [TCG_COND_LEU] = "leu",
1112 [TCG_COND_GTU] = "gtu",
1113 };
1114
1115 assert((unsigned)c < ARRAY_SIZE(cond));
1116 assert(cond[c][0] != 0);
1117 return cond[c];
1118 }
1119
1120 /* Disassemble TCI bytecode. */
1121 int print_insn_tci(bfd_vma addr, disassemble_info *info)
1122 {
1123 uint8_t buf[256];
1124 int length, status;
1125 const TCGOpDef *def;
1126 const char *op_name;
1127 TCGOpcode op;
1128 TCGReg r0, r1, r2, r3;
1129 #if TCG_TARGET_REG_BITS == 32
1130 TCGReg r4, r5;
1131 #endif
1132 tcg_target_ulong i1;
1133 int32_t s2;
1134 TCGCond c;
1135 TCGMemOpIdx oi;
1136 uint8_t pos, len;
1137 void *ptr, *cif;
1138 const uint8_t *tb_ptr;
1139
1140 status = info->read_memory_func(addr, buf, 2, info);
1141 if (status != 0) {
1142 info->memory_error_func(status, addr, info);
1143 return -1;
1144 }
1145 op = buf[0];
1146 length = buf[1];
1147
1148 if (length < 2) {
1149 info->fprintf_func(info->stream, "invalid length %d", length);
1150 return 1;
1151 }
1152
1153 status = info->read_memory_func(addr + 2, buf + 2, length - 2, info);
1154 if (status != 0) {
1155 info->memory_error_func(status, addr + 2, info);
1156 return -1;
1157 }
1158
1159 def = &tcg_op_defs[op];
1160 op_name = def->name;
1161 tb_ptr = buf + 2;
1162
1163 switch (op) {
1164 case INDEX_op_br:
1165 case INDEX_op_exit_tb:
1166 case INDEX_op_goto_tb:
1167 tci_args_l(&tb_ptr, &ptr);
1168 info->fprintf_func(info->stream, "%-12s %p", op_name, ptr);
1169 break;
1170
1171 case INDEX_op_call:
1172 tci_args_nll(&tb_ptr, &len, &ptr, &cif);
1173 info->fprintf_func(info->stream, "%-12s %d, %p, %p",
1174 op_name, len, ptr, cif);
1175 break;
1176
1177 case INDEX_op_brcond_i32:
1178 case INDEX_op_brcond_i64:
1179 tci_args_rrcl(&tb_ptr, &r0, &r1, &c, &ptr);
1180 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %p",
1181 op_name, str_r(r0), str_r(r1), str_c(c), ptr);
1182 break;
1183
1184 case INDEX_op_setcond_i32:
1185 case INDEX_op_setcond_i64:
1186 tci_args_rrrc(&tb_ptr, &r0, &r1, &r2, &c);
1187 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s",
1188 op_name, str_r(r0), str_r(r1), str_r(r2), str_c(c));
1189 break;
1190
1191 case INDEX_op_tci_movi_i32:
1192 tci_args_ri(&tb_ptr, &r0, &i1);
1193 info->fprintf_func(info->stream, "%-12s %s, 0x%" TCG_PRIlx,
1194 op_name, str_r(r0), i1);
1195 break;
1196
1197 #if TCG_TARGET_REG_BITS == 64
1198 case INDEX_op_tci_movi_i64:
1199 tci_args_rI(&tb_ptr, &r0, &i1);
1200 info->fprintf_func(info->stream, "%-12s %s, 0x%" TCG_PRIlx,
1201 op_name, str_r(r0), i1);
1202 break;
1203 #endif
1204
1205 case INDEX_op_ld8u_i32:
1206 case INDEX_op_ld8u_i64:
1207 case INDEX_op_ld8s_i32:
1208 case INDEX_op_ld8s_i64:
1209 case INDEX_op_ld16u_i32:
1210 case INDEX_op_ld16u_i64:
1211 case INDEX_op_ld16s_i32:
1212 case INDEX_op_ld16s_i64:
1213 case INDEX_op_ld32u_i64:
1214 case INDEX_op_ld32s_i64:
1215 case INDEX_op_ld_i32:
1216 case INDEX_op_ld_i64:
1217 case INDEX_op_st8_i32:
1218 case INDEX_op_st8_i64:
1219 case INDEX_op_st16_i32:
1220 case INDEX_op_st16_i64:
1221 case INDEX_op_st32_i64:
1222 case INDEX_op_st_i32:
1223 case INDEX_op_st_i64:
1224 tci_args_rrs(&tb_ptr, &r0, &r1, &s2);
1225 info->fprintf_func(info->stream, "%-12s %s, %s, %d",
1226 op_name, str_r(r0), str_r(r1), s2);
1227 break;
1228
1229 case INDEX_op_mov_i32:
1230 case INDEX_op_mov_i64:
1231 case INDEX_op_ext8s_i32:
1232 case INDEX_op_ext8s_i64:
1233 case INDEX_op_ext8u_i32:
1234 case INDEX_op_ext8u_i64:
1235 case INDEX_op_ext16s_i32:
1236 case INDEX_op_ext16s_i64:
1237 case INDEX_op_ext16u_i32:
1238 case INDEX_op_ext32s_i64:
1239 case INDEX_op_ext32u_i64:
1240 case INDEX_op_ext_i32_i64:
1241 case INDEX_op_extu_i32_i64:
1242 case INDEX_op_bswap16_i32:
1243 case INDEX_op_bswap16_i64:
1244 case INDEX_op_bswap32_i32:
1245 case INDEX_op_bswap32_i64:
1246 case INDEX_op_bswap64_i64:
1247 case INDEX_op_not_i32:
1248 case INDEX_op_not_i64:
1249 case INDEX_op_neg_i32:
1250 case INDEX_op_neg_i64:
1251 tci_args_rr(&tb_ptr, &r0, &r1);
1252 info->fprintf_func(info->stream, "%-12s %s, %s",
1253 op_name, str_r(r0), str_r(r1));
1254 break;
1255
1256 case INDEX_op_add_i32:
1257 case INDEX_op_add_i64:
1258 case INDEX_op_sub_i32:
1259 case INDEX_op_sub_i64:
1260 case INDEX_op_mul_i32:
1261 case INDEX_op_mul_i64:
1262 case INDEX_op_and_i32:
1263 case INDEX_op_and_i64:
1264 case INDEX_op_or_i32:
1265 case INDEX_op_or_i64:
1266 case INDEX_op_xor_i32:
1267 case INDEX_op_xor_i64:
1268 case INDEX_op_div_i32:
1269 case INDEX_op_div_i64:
1270 case INDEX_op_rem_i32:
1271 case INDEX_op_rem_i64:
1272 case INDEX_op_divu_i32:
1273 case INDEX_op_divu_i64:
1274 case INDEX_op_remu_i32:
1275 case INDEX_op_remu_i64:
1276 case INDEX_op_shl_i32:
1277 case INDEX_op_shl_i64:
1278 case INDEX_op_shr_i32:
1279 case INDEX_op_shr_i64:
1280 case INDEX_op_sar_i32:
1281 case INDEX_op_sar_i64:
1282 case INDEX_op_rotl_i32:
1283 case INDEX_op_rotl_i64:
1284 case INDEX_op_rotr_i32:
1285 case INDEX_op_rotr_i64:
1286 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
1287 info->fprintf_func(info->stream, "%-12s %s, %s, %s",
1288 op_name, str_r(r0), str_r(r1), str_r(r2));
1289 break;
1290
1291 case INDEX_op_deposit_i32:
1292 case INDEX_op_deposit_i64:
1293 tci_args_rrrbb(&tb_ptr, &r0, &r1, &r2, &pos, &len);
1294 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %d, %d",
1295 op_name, str_r(r0), str_r(r1), str_r(r2), pos, len);
1296 break;
1297
1298 #if TCG_TARGET_REG_BITS == 32
1299 case INDEX_op_setcond2_i32:
1300 tci_args_rrrrrc(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &c);
1301 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s, %s",
1302 op_name, str_r(r0), str_r(r1), str_r(r2),
1303 str_r(r3), str_r(r4), str_c(c));
1304 break;
1305
1306 case INDEX_op_brcond2_i32:
1307 tci_args_rrrrcl(&tb_ptr, &r0, &r1, &r2, &r3, &c, &ptr);
1308 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s, %p",
1309 op_name, str_r(r0), str_r(r1),
1310 str_r(r2), str_r(r3), str_c(c), ptr);
1311 break;
1312
1313 case INDEX_op_mulu2_i32:
1314 tci_args_rrrr(&tb_ptr, &r0, &r1, &r2, &r3);
1315 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s",
1316 op_name, str_r(r0), str_r(r1),
1317 str_r(r2), str_r(r3));
1318 break;
1319
1320 case INDEX_op_add2_i32:
1321 case INDEX_op_sub2_i32:
1322 tci_args_rrrrrr(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &r5);
1323 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s, %s",
1324 op_name, str_r(r0), str_r(r1), str_r(r2),
1325 str_r(r3), str_r(r4), str_r(r5));
1326 break;
1327 #endif
1328
1329 case INDEX_op_qemu_ld_i64:
1330 case INDEX_op_qemu_st_i64:
1331 len = DIV_ROUND_UP(64, TCG_TARGET_REG_BITS);
1332 goto do_qemu_ldst;
1333 case INDEX_op_qemu_ld_i32:
1334 case INDEX_op_qemu_st_i32:
1335 len = 1;
1336 do_qemu_ldst:
1337 len += DIV_ROUND_UP(TARGET_LONG_BITS, TCG_TARGET_REG_BITS);
1338 switch (len) {
1339 case 2:
1340 tci_args_rrm(&tb_ptr, &r0, &r1, &oi);
1341 info->fprintf_func(info->stream, "%-12s %s, %s, %x",
1342 op_name, str_r(r0), str_r(r1), oi);
1343 break;
1344 case 3:
1345 tci_args_rrrm(&tb_ptr, &r0, &r1, &r2, &oi);
1346 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %x",
1347 op_name, str_r(r0), str_r(r1), str_r(r2), oi);
1348 break;
1349 case 4:
1350 tci_args_rrrrm(&tb_ptr, &r0, &r1, &r2, &r3, &oi);
1351 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %x",
1352 op_name, str_r(r0), str_r(r1),
1353 str_r(r2), str_r(r3), oi);
1354 break;
1355 default:
1356 g_assert_not_reached();
1357 }
1358 break;
1359
1360 default:
1361 info->fprintf_func(info->stream, "illegal opcode %d", op);
1362 break;
1363 }
1364
1365 return length;
1366 }