]> git.proxmox.com Git - qemu.git/blob - tci.c
70f8308f9364284b0699baebb7c1f0b139e07f1f
[qemu.git] / tci.c
1 /*
2 * Tiny Code Interpreter for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 */
19
20 #include "config.h"
21
22 /* Defining NDEBUG disables assertions (which makes the code faster). */
23 #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
24 # define NDEBUG
25 #endif
26
27 #include "qemu-common.h"
28 #include "exec/exec-all.h" /* MAX_OPC_PARAM_IARGS */
29 #include "tcg-op.h"
30
31 /* Marker for missing code. */
32 #define TODO() \
33 do { \
34 fprintf(stderr, "TODO %s:%u: %s()\n", \
35 __FILE__, __LINE__, __func__); \
36 tcg_abort(); \
37 } while (0)
38
39 #if MAX_OPC_PARAM_IARGS != 5
40 # error Fix needed, number of supported input arguments changed!
41 #endif
42 #if TCG_TARGET_REG_BITS == 32
43 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
44 tcg_target_ulong, tcg_target_ulong,
45 tcg_target_ulong, tcg_target_ulong,
46 tcg_target_ulong, tcg_target_ulong,
47 tcg_target_ulong, tcg_target_ulong);
48 #else
49 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
50 tcg_target_ulong, tcg_target_ulong,
51 tcg_target_ulong);
52 #endif
53
54 /* Targets which don't use GETPC also don't need tci_tb_ptr
55 which makes them a little faster. */
56 #if defined(GETPC)
57 uintptr_t tci_tb_ptr;
58 #endif
59
60 static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
61
62 static tcg_target_ulong tci_read_reg(TCGReg index)
63 {
64 assert(index < ARRAY_SIZE(tci_reg));
65 return tci_reg[index];
66 }
67
68 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
69 static int8_t tci_read_reg8s(TCGReg index)
70 {
71 return (int8_t)tci_read_reg(index);
72 }
73 #endif
74
75 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
76 static int16_t tci_read_reg16s(TCGReg index)
77 {
78 return (int16_t)tci_read_reg(index);
79 }
80 #endif
81
82 #if TCG_TARGET_REG_BITS == 64
83 static int32_t tci_read_reg32s(TCGReg index)
84 {
85 return (int32_t)tci_read_reg(index);
86 }
87 #endif
88
89 static uint8_t tci_read_reg8(TCGReg index)
90 {
91 return (uint8_t)tci_read_reg(index);
92 }
93
94 static uint16_t tci_read_reg16(TCGReg index)
95 {
96 return (uint16_t)tci_read_reg(index);
97 }
98
99 static uint32_t tci_read_reg32(TCGReg index)
100 {
101 return (uint32_t)tci_read_reg(index);
102 }
103
104 #if TCG_TARGET_REG_BITS == 64
105 static uint64_t tci_read_reg64(TCGReg index)
106 {
107 return tci_read_reg(index);
108 }
109 #endif
110
111 static void tci_write_reg(TCGReg index, tcg_target_ulong value)
112 {
113 assert(index < ARRAY_SIZE(tci_reg));
114 assert(index != TCG_AREG0);
115 tci_reg[index] = value;
116 }
117
118 static void tci_write_reg8s(TCGReg index, int8_t value)
119 {
120 tci_write_reg(index, value);
121 }
122
123 static void tci_write_reg16s(TCGReg index, int16_t value)
124 {
125 tci_write_reg(index, value);
126 }
127
128 #if TCG_TARGET_REG_BITS == 64
129 static void tci_write_reg32s(TCGReg index, int32_t value)
130 {
131 tci_write_reg(index, value);
132 }
133 #endif
134
135 static void tci_write_reg8(TCGReg index, uint8_t value)
136 {
137 tci_write_reg(index, value);
138 }
139
140 static void tci_write_reg16(TCGReg index, uint16_t value)
141 {
142 tci_write_reg(index, value);
143 }
144
145 static void tci_write_reg32(TCGReg index, uint32_t value)
146 {
147 tci_write_reg(index, value);
148 }
149
150 #if TCG_TARGET_REG_BITS == 32
151 static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
152 uint64_t value)
153 {
154 tci_write_reg(low_index, value);
155 tci_write_reg(high_index, value >> 32);
156 }
157 #elif TCG_TARGET_REG_BITS == 64
158 static void tci_write_reg64(TCGReg index, uint64_t value)
159 {
160 tci_write_reg(index, value);
161 }
162 #endif
163
164 #if TCG_TARGET_REG_BITS == 32
165 /* Create a 64 bit value from two 32 bit values. */
166 static uint64_t tci_uint64(uint32_t high, uint32_t low)
167 {
168 return ((uint64_t)high << 32) + low;
169 }
170 #endif
171
172 /* Read constant (native size) from bytecode. */
173 static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
174 {
175 tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
176 *tb_ptr += sizeof(value);
177 return value;
178 }
179
180 /* Read unsigned constant (32 bit) from bytecode. */
181 static uint32_t tci_read_i32(uint8_t **tb_ptr)
182 {
183 uint32_t value = *(uint32_t *)(*tb_ptr);
184 *tb_ptr += sizeof(value);
185 return value;
186 }
187
188 /* Read signed constant (32 bit) from bytecode. */
189 static int32_t tci_read_s32(uint8_t **tb_ptr)
190 {
191 int32_t value = *(int32_t *)(*tb_ptr);
192 *tb_ptr += sizeof(value);
193 return value;
194 }
195
196 #if TCG_TARGET_REG_BITS == 64
197 /* Read constant (64 bit) from bytecode. */
198 static uint64_t tci_read_i64(uint8_t **tb_ptr)
199 {
200 uint64_t value = *(uint64_t *)(*tb_ptr);
201 *tb_ptr += sizeof(value);
202 return value;
203 }
204 #endif
205
206 /* Read indexed register (native size) from bytecode. */
207 static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
208 {
209 tcg_target_ulong value = tci_read_reg(**tb_ptr);
210 *tb_ptr += 1;
211 return value;
212 }
213
214 /* Read indexed register (8 bit) from bytecode. */
215 static uint8_t tci_read_r8(uint8_t **tb_ptr)
216 {
217 uint8_t value = tci_read_reg8(**tb_ptr);
218 *tb_ptr += 1;
219 return value;
220 }
221
222 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
223 /* Read indexed register (8 bit signed) from bytecode. */
224 static int8_t tci_read_r8s(uint8_t **tb_ptr)
225 {
226 int8_t value = tci_read_reg8s(**tb_ptr);
227 *tb_ptr += 1;
228 return value;
229 }
230 #endif
231
232 /* Read indexed register (16 bit) from bytecode. */
233 static uint16_t tci_read_r16(uint8_t **tb_ptr)
234 {
235 uint16_t value = tci_read_reg16(**tb_ptr);
236 *tb_ptr += 1;
237 return value;
238 }
239
240 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
241 /* Read indexed register (16 bit signed) from bytecode. */
242 static int16_t tci_read_r16s(uint8_t **tb_ptr)
243 {
244 int16_t value = tci_read_reg16s(**tb_ptr);
245 *tb_ptr += 1;
246 return value;
247 }
248 #endif
249
250 /* Read indexed register (32 bit) from bytecode. */
251 static uint32_t tci_read_r32(uint8_t **tb_ptr)
252 {
253 uint32_t value = tci_read_reg32(**tb_ptr);
254 *tb_ptr += 1;
255 return value;
256 }
257
258 #if TCG_TARGET_REG_BITS == 32
259 /* Read two indexed registers (2 * 32 bit) from bytecode. */
260 static uint64_t tci_read_r64(uint8_t **tb_ptr)
261 {
262 uint32_t low = tci_read_r32(tb_ptr);
263 return tci_uint64(tci_read_r32(tb_ptr), low);
264 }
265 #elif TCG_TARGET_REG_BITS == 64
266 /* Read indexed register (32 bit signed) from bytecode. */
267 static int32_t tci_read_r32s(uint8_t **tb_ptr)
268 {
269 int32_t value = tci_read_reg32s(**tb_ptr);
270 *tb_ptr += 1;
271 return value;
272 }
273
274 /* Read indexed register (64 bit) from bytecode. */
275 static uint64_t tci_read_r64(uint8_t **tb_ptr)
276 {
277 uint64_t value = tci_read_reg64(**tb_ptr);
278 *tb_ptr += 1;
279 return value;
280 }
281 #endif
282
283 /* Read indexed register(s) with target address from bytecode. */
284 static target_ulong tci_read_ulong(uint8_t **tb_ptr)
285 {
286 target_ulong taddr = tci_read_r(tb_ptr);
287 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
288 taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
289 #endif
290 return taddr;
291 }
292
293 /* Read indexed register or constant (native size) from bytecode. */
294 static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
295 {
296 tcg_target_ulong value;
297 TCGReg r = **tb_ptr;
298 *tb_ptr += 1;
299 if (r == TCG_CONST) {
300 value = tci_read_i(tb_ptr);
301 } else {
302 value = tci_read_reg(r);
303 }
304 return value;
305 }
306
307 /* Read indexed register or constant (32 bit) from bytecode. */
308 static uint32_t tci_read_ri32(uint8_t **tb_ptr)
309 {
310 uint32_t value;
311 TCGReg r = **tb_ptr;
312 *tb_ptr += 1;
313 if (r == TCG_CONST) {
314 value = tci_read_i32(tb_ptr);
315 } else {
316 value = tci_read_reg32(r);
317 }
318 return value;
319 }
320
321 #if TCG_TARGET_REG_BITS == 32
322 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
323 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
324 {
325 uint32_t low = tci_read_ri32(tb_ptr);
326 return tci_uint64(tci_read_ri32(tb_ptr), low);
327 }
328 #elif TCG_TARGET_REG_BITS == 64
329 /* Read indexed register or constant (64 bit) from bytecode. */
330 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
331 {
332 uint64_t value;
333 TCGReg r = **tb_ptr;
334 *tb_ptr += 1;
335 if (r == TCG_CONST) {
336 value = tci_read_i64(tb_ptr);
337 } else {
338 value = tci_read_reg64(r);
339 }
340 return value;
341 }
342 #endif
343
344 static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
345 {
346 tcg_target_ulong label = tci_read_i(tb_ptr);
347 assert(label != 0);
348 return label;
349 }
350
351 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
352 {
353 bool result = false;
354 int32_t i0 = u0;
355 int32_t i1 = u1;
356 switch (condition) {
357 case TCG_COND_EQ:
358 result = (u0 == u1);
359 break;
360 case TCG_COND_NE:
361 result = (u0 != u1);
362 break;
363 case TCG_COND_LT:
364 result = (i0 < i1);
365 break;
366 case TCG_COND_GE:
367 result = (i0 >= i1);
368 break;
369 case TCG_COND_LE:
370 result = (i0 <= i1);
371 break;
372 case TCG_COND_GT:
373 result = (i0 > i1);
374 break;
375 case TCG_COND_LTU:
376 result = (u0 < u1);
377 break;
378 case TCG_COND_GEU:
379 result = (u0 >= u1);
380 break;
381 case TCG_COND_LEU:
382 result = (u0 <= u1);
383 break;
384 case TCG_COND_GTU:
385 result = (u0 > u1);
386 break;
387 default:
388 TODO();
389 }
390 return result;
391 }
392
393 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
394 {
395 bool result = false;
396 int64_t i0 = u0;
397 int64_t i1 = u1;
398 switch (condition) {
399 case TCG_COND_EQ:
400 result = (u0 == u1);
401 break;
402 case TCG_COND_NE:
403 result = (u0 != u1);
404 break;
405 case TCG_COND_LT:
406 result = (i0 < i1);
407 break;
408 case TCG_COND_GE:
409 result = (i0 >= i1);
410 break;
411 case TCG_COND_LE:
412 result = (i0 <= i1);
413 break;
414 case TCG_COND_GT:
415 result = (i0 > i1);
416 break;
417 case TCG_COND_LTU:
418 result = (u0 < u1);
419 break;
420 case TCG_COND_GEU:
421 result = (u0 >= u1);
422 break;
423 case TCG_COND_LEU:
424 result = (u0 <= u1);
425 break;
426 case TCG_COND_GTU:
427 result = (u0 > u1);
428 break;
429 default:
430 TODO();
431 }
432 return result;
433 }
434
435 /* Interpret pseudo code in tb. */
436 tcg_target_ulong tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
437 {
438 tcg_target_ulong next_tb = 0;
439
440 tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
441 assert(tb_ptr);
442
443 for (;;) {
444 TCGOpcode opc = tb_ptr[0];
445 #if !defined(NDEBUG)
446 uint8_t op_size = tb_ptr[1];
447 uint8_t *old_code_ptr = tb_ptr;
448 #endif
449 tcg_target_ulong t0;
450 tcg_target_ulong t1;
451 tcg_target_ulong t2;
452 tcg_target_ulong label;
453 TCGCond condition;
454 target_ulong taddr;
455 #ifndef CONFIG_SOFTMMU
456 tcg_target_ulong host_addr;
457 #endif
458 uint8_t tmp8;
459 uint16_t tmp16;
460 uint32_t tmp32;
461 uint64_t tmp64;
462 #if TCG_TARGET_REG_BITS == 32
463 uint64_t v64;
464 #endif
465
466 #if defined(GETPC)
467 tci_tb_ptr = (uintptr_t)tb_ptr;
468 #endif
469
470 /* Skip opcode and size entry. */
471 tb_ptr += 2;
472
473 switch (opc) {
474 case INDEX_op_end:
475 case INDEX_op_nop:
476 break;
477 case INDEX_op_nop1:
478 case INDEX_op_nop2:
479 case INDEX_op_nop3:
480 case INDEX_op_nopn:
481 case INDEX_op_discard:
482 TODO();
483 break;
484 case INDEX_op_set_label:
485 TODO();
486 break;
487 case INDEX_op_call:
488 t0 = tci_read_ri(&tb_ptr);
489 #if TCG_TARGET_REG_BITS == 32
490 tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
491 tci_read_reg(TCG_REG_R1),
492 tci_read_reg(TCG_REG_R2),
493 tci_read_reg(TCG_REG_R3),
494 tci_read_reg(TCG_REG_R5),
495 tci_read_reg(TCG_REG_R6),
496 tci_read_reg(TCG_REG_R7),
497 tci_read_reg(TCG_REG_R8),
498 tci_read_reg(TCG_REG_R9),
499 tci_read_reg(TCG_REG_R10));
500 tci_write_reg(TCG_REG_R0, tmp64);
501 tci_write_reg(TCG_REG_R1, tmp64 >> 32);
502 #else
503 tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
504 tci_read_reg(TCG_REG_R1),
505 tci_read_reg(TCG_REG_R2),
506 tci_read_reg(TCG_REG_R3),
507 tci_read_reg(TCG_REG_R5));
508 tci_write_reg(TCG_REG_R0, tmp64);
509 #endif
510 break;
511 case INDEX_op_br:
512 label = tci_read_label(&tb_ptr);
513 assert(tb_ptr == old_code_ptr + op_size);
514 tb_ptr = (uint8_t *)label;
515 continue;
516 case INDEX_op_setcond_i32:
517 t0 = *tb_ptr++;
518 t1 = tci_read_r32(&tb_ptr);
519 t2 = tci_read_ri32(&tb_ptr);
520 condition = *tb_ptr++;
521 tci_write_reg32(t0, tci_compare32(t1, t2, condition));
522 break;
523 #if TCG_TARGET_REG_BITS == 32
524 case INDEX_op_setcond2_i32:
525 t0 = *tb_ptr++;
526 tmp64 = tci_read_r64(&tb_ptr);
527 v64 = tci_read_ri64(&tb_ptr);
528 condition = *tb_ptr++;
529 tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
530 break;
531 #elif TCG_TARGET_REG_BITS == 64
532 case INDEX_op_setcond_i64:
533 t0 = *tb_ptr++;
534 t1 = tci_read_r64(&tb_ptr);
535 t2 = tci_read_ri64(&tb_ptr);
536 condition = *tb_ptr++;
537 tci_write_reg64(t0, tci_compare64(t1, t2, condition));
538 break;
539 #endif
540 case INDEX_op_mov_i32:
541 t0 = *tb_ptr++;
542 t1 = tci_read_r32(&tb_ptr);
543 tci_write_reg32(t0, t1);
544 break;
545 case INDEX_op_movi_i32:
546 t0 = *tb_ptr++;
547 t1 = tci_read_i32(&tb_ptr);
548 tci_write_reg32(t0, t1);
549 break;
550
551 /* Load/store operations (32 bit). */
552
553 case INDEX_op_ld8u_i32:
554 t0 = *tb_ptr++;
555 t1 = tci_read_r(&tb_ptr);
556 t2 = tci_read_s32(&tb_ptr);
557 tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
558 break;
559 case INDEX_op_ld8s_i32:
560 case INDEX_op_ld16u_i32:
561 TODO();
562 break;
563 case INDEX_op_ld16s_i32:
564 TODO();
565 break;
566 case INDEX_op_ld_i32:
567 t0 = *tb_ptr++;
568 t1 = tci_read_r(&tb_ptr);
569 t2 = tci_read_s32(&tb_ptr);
570 tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
571 break;
572 case INDEX_op_st8_i32:
573 t0 = tci_read_r8(&tb_ptr);
574 t1 = tci_read_r(&tb_ptr);
575 t2 = tci_read_s32(&tb_ptr);
576 *(uint8_t *)(t1 + t2) = t0;
577 break;
578 case INDEX_op_st16_i32:
579 t0 = tci_read_r16(&tb_ptr);
580 t1 = tci_read_r(&tb_ptr);
581 t2 = tci_read_s32(&tb_ptr);
582 *(uint16_t *)(t1 + t2) = t0;
583 break;
584 case INDEX_op_st_i32:
585 t0 = tci_read_r32(&tb_ptr);
586 t1 = tci_read_r(&tb_ptr);
587 t2 = tci_read_s32(&tb_ptr);
588 *(uint32_t *)(t1 + t2) = t0;
589 break;
590
591 /* Arithmetic operations (32 bit). */
592
593 case INDEX_op_add_i32:
594 t0 = *tb_ptr++;
595 t1 = tci_read_ri32(&tb_ptr);
596 t2 = tci_read_ri32(&tb_ptr);
597 tci_write_reg32(t0, t1 + t2);
598 break;
599 case INDEX_op_sub_i32:
600 t0 = *tb_ptr++;
601 t1 = tci_read_ri32(&tb_ptr);
602 t2 = tci_read_ri32(&tb_ptr);
603 tci_write_reg32(t0, t1 - t2);
604 break;
605 case INDEX_op_mul_i32:
606 t0 = *tb_ptr++;
607 t1 = tci_read_ri32(&tb_ptr);
608 t2 = tci_read_ri32(&tb_ptr);
609 tci_write_reg32(t0, t1 * t2);
610 break;
611 #if TCG_TARGET_HAS_div_i32
612 case INDEX_op_div_i32:
613 t0 = *tb_ptr++;
614 t1 = tci_read_ri32(&tb_ptr);
615 t2 = tci_read_ri32(&tb_ptr);
616 tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
617 break;
618 case INDEX_op_divu_i32:
619 t0 = *tb_ptr++;
620 t1 = tci_read_ri32(&tb_ptr);
621 t2 = tci_read_ri32(&tb_ptr);
622 tci_write_reg32(t0, t1 / t2);
623 break;
624 case INDEX_op_rem_i32:
625 t0 = *tb_ptr++;
626 t1 = tci_read_ri32(&tb_ptr);
627 t2 = tci_read_ri32(&tb_ptr);
628 tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
629 break;
630 case INDEX_op_remu_i32:
631 t0 = *tb_ptr++;
632 t1 = tci_read_ri32(&tb_ptr);
633 t2 = tci_read_ri32(&tb_ptr);
634 tci_write_reg32(t0, t1 % t2);
635 break;
636 #elif TCG_TARGET_HAS_div2_i32
637 case INDEX_op_div2_i32:
638 case INDEX_op_divu2_i32:
639 TODO();
640 break;
641 #endif
642 case INDEX_op_and_i32:
643 t0 = *tb_ptr++;
644 t1 = tci_read_ri32(&tb_ptr);
645 t2 = tci_read_ri32(&tb_ptr);
646 tci_write_reg32(t0, t1 & t2);
647 break;
648 case INDEX_op_or_i32:
649 t0 = *tb_ptr++;
650 t1 = tci_read_ri32(&tb_ptr);
651 t2 = tci_read_ri32(&tb_ptr);
652 tci_write_reg32(t0, t1 | t2);
653 break;
654 case INDEX_op_xor_i32:
655 t0 = *tb_ptr++;
656 t1 = tci_read_ri32(&tb_ptr);
657 t2 = tci_read_ri32(&tb_ptr);
658 tci_write_reg32(t0, t1 ^ t2);
659 break;
660
661 /* Shift/rotate operations (32 bit). */
662
663 case INDEX_op_shl_i32:
664 t0 = *tb_ptr++;
665 t1 = tci_read_ri32(&tb_ptr);
666 t2 = tci_read_ri32(&tb_ptr);
667 tci_write_reg32(t0, t1 << t2);
668 break;
669 case INDEX_op_shr_i32:
670 t0 = *tb_ptr++;
671 t1 = tci_read_ri32(&tb_ptr);
672 t2 = tci_read_ri32(&tb_ptr);
673 tci_write_reg32(t0, t1 >> t2);
674 break;
675 case INDEX_op_sar_i32:
676 t0 = *tb_ptr++;
677 t1 = tci_read_ri32(&tb_ptr);
678 t2 = tci_read_ri32(&tb_ptr);
679 tci_write_reg32(t0, ((int32_t)t1 >> t2));
680 break;
681 #if TCG_TARGET_HAS_rot_i32
682 case INDEX_op_rotl_i32:
683 t0 = *tb_ptr++;
684 t1 = tci_read_ri32(&tb_ptr);
685 t2 = tci_read_ri32(&tb_ptr);
686 tci_write_reg32(t0, (t1 << t2) | (t1 >> (32 - t2)));
687 break;
688 case INDEX_op_rotr_i32:
689 t0 = *tb_ptr++;
690 t1 = tci_read_ri32(&tb_ptr);
691 t2 = tci_read_ri32(&tb_ptr);
692 tci_write_reg32(t0, (t1 >> t2) | (t1 << (32 - t2)));
693 break;
694 #endif
695 #if TCG_TARGET_HAS_deposit_i32
696 case INDEX_op_deposit_i32:
697 t0 = *tb_ptr++;
698 t1 = tci_read_r32(&tb_ptr);
699 t2 = tci_read_r32(&tb_ptr);
700 tmp16 = *tb_ptr++;
701 tmp8 = *tb_ptr++;
702 tmp32 = (((1 << tmp8) - 1) << tmp16);
703 tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
704 break;
705 #endif
706 case INDEX_op_brcond_i32:
707 t0 = tci_read_r32(&tb_ptr);
708 t1 = tci_read_ri32(&tb_ptr);
709 condition = *tb_ptr++;
710 label = tci_read_label(&tb_ptr);
711 if (tci_compare32(t0, t1, condition)) {
712 assert(tb_ptr == old_code_ptr + op_size);
713 tb_ptr = (uint8_t *)label;
714 continue;
715 }
716 break;
717 #if TCG_TARGET_REG_BITS == 32
718 case INDEX_op_add2_i32:
719 t0 = *tb_ptr++;
720 t1 = *tb_ptr++;
721 tmp64 = tci_read_r64(&tb_ptr);
722 tmp64 += tci_read_r64(&tb_ptr);
723 tci_write_reg64(t1, t0, tmp64);
724 break;
725 case INDEX_op_sub2_i32:
726 t0 = *tb_ptr++;
727 t1 = *tb_ptr++;
728 tmp64 = tci_read_r64(&tb_ptr);
729 tmp64 -= tci_read_r64(&tb_ptr);
730 tci_write_reg64(t1, t0, tmp64);
731 break;
732 case INDEX_op_brcond2_i32:
733 tmp64 = tci_read_r64(&tb_ptr);
734 v64 = tci_read_ri64(&tb_ptr);
735 condition = *tb_ptr++;
736 label = tci_read_label(&tb_ptr);
737 if (tci_compare64(tmp64, v64, condition)) {
738 assert(tb_ptr == old_code_ptr + op_size);
739 tb_ptr = (uint8_t *)label;
740 continue;
741 }
742 break;
743 case INDEX_op_mulu2_i32:
744 t0 = *tb_ptr++;
745 t1 = *tb_ptr++;
746 t2 = tci_read_r32(&tb_ptr);
747 tmp64 = tci_read_r32(&tb_ptr);
748 tci_write_reg64(t1, t0, t2 * tmp64);
749 break;
750 #endif /* TCG_TARGET_REG_BITS == 32 */
751 #if TCG_TARGET_HAS_ext8s_i32
752 case INDEX_op_ext8s_i32:
753 t0 = *tb_ptr++;
754 t1 = tci_read_r8s(&tb_ptr);
755 tci_write_reg32(t0, t1);
756 break;
757 #endif
758 #if TCG_TARGET_HAS_ext16s_i32
759 case INDEX_op_ext16s_i32:
760 t0 = *tb_ptr++;
761 t1 = tci_read_r16s(&tb_ptr);
762 tci_write_reg32(t0, t1);
763 break;
764 #endif
765 #if TCG_TARGET_HAS_ext8u_i32
766 case INDEX_op_ext8u_i32:
767 t0 = *tb_ptr++;
768 t1 = tci_read_r8(&tb_ptr);
769 tci_write_reg32(t0, t1);
770 break;
771 #endif
772 #if TCG_TARGET_HAS_ext16u_i32
773 case INDEX_op_ext16u_i32:
774 t0 = *tb_ptr++;
775 t1 = tci_read_r16(&tb_ptr);
776 tci_write_reg32(t0, t1);
777 break;
778 #endif
779 #if TCG_TARGET_HAS_bswap16_i32
780 case INDEX_op_bswap16_i32:
781 t0 = *tb_ptr++;
782 t1 = tci_read_r16(&tb_ptr);
783 tci_write_reg32(t0, bswap16(t1));
784 break;
785 #endif
786 #if TCG_TARGET_HAS_bswap32_i32
787 case INDEX_op_bswap32_i32:
788 t0 = *tb_ptr++;
789 t1 = tci_read_r32(&tb_ptr);
790 tci_write_reg32(t0, bswap32(t1));
791 break;
792 #endif
793 #if TCG_TARGET_HAS_not_i32
794 case INDEX_op_not_i32:
795 t0 = *tb_ptr++;
796 t1 = tci_read_r32(&tb_ptr);
797 tci_write_reg32(t0, ~t1);
798 break;
799 #endif
800 #if TCG_TARGET_HAS_neg_i32
801 case INDEX_op_neg_i32:
802 t0 = *tb_ptr++;
803 t1 = tci_read_r32(&tb_ptr);
804 tci_write_reg32(t0, -t1);
805 break;
806 #endif
807 #if TCG_TARGET_REG_BITS == 64
808 case INDEX_op_mov_i64:
809 t0 = *tb_ptr++;
810 t1 = tci_read_r64(&tb_ptr);
811 tci_write_reg64(t0, t1);
812 break;
813 case INDEX_op_movi_i64:
814 t0 = *tb_ptr++;
815 t1 = tci_read_i64(&tb_ptr);
816 tci_write_reg64(t0, t1);
817 break;
818
819 /* Load/store operations (64 bit). */
820
821 case INDEX_op_ld8u_i64:
822 t0 = *tb_ptr++;
823 t1 = tci_read_r(&tb_ptr);
824 t2 = tci_read_s32(&tb_ptr);
825 tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
826 break;
827 case INDEX_op_ld8s_i64:
828 case INDEX_op_ld16u_i64:
829 case INDEX_op_ld16s_i64:
830 TODO();
831 break;
832 case INDEX_op_ld32u_i64:
833 t0 = *tb_ptr++;
834 t1 = tci_read_r(&tb_ptr);
835 t2 = tci_read_s32(&tb_ptr);
836 tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
837 break;
838 case INDEX_op_ld32s_i64:
839 t0 = *tb_ptr++;
840 t1 = tci_read_r(&tb_ptr);
841 t2 = tci_read_s32(&tb_ptr);
842 tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
843 break;
844 case INDEX_op_ld_i64:
845 t0 = *tb_ptr++;
846 t1 = tci_read_r(&tb_ptr);
847 t2 = tci_read_s32(&tb_ptr);
848 tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
849 break;
850 case INDEX_op_st8_i64:
851 t0 = tci_read_r8(&tb_ptr);
852 t1 = tci_read_r(&tb_ptr);
853 t2 = tci_read_s32(&tb_ptr);
854 *(uint8_t *)(t1 + t2) = t0;
855 break;
856 case INDEX_op_st16_i64:
857 t0 = tci_read_r16(&tb_ptr);
858 t1 = tci_read_r(&tb_ptr);
859 t2 = tci_read_s32(&tb_ptr);
860 *(uint16_t *)(t1 + t2) = t0;
861 break;
862 case INDEX_op_st32_i64:
863 t0 = tci_read_r32(&tb_ptr);
864 t1 = tci_read_r(&tb_ptr);
865 t2 = tci_read_s32(&tb_ptr);
866 *(uint32_t *)(t1 + t2) = t0;
867 break;
868 case INDEX_op_st_i64:
869 t0 = tci_read_r64(&tb_ptr);
870 t1 = tci_read_r(&tb_ptr);
871 t2 = tci_read_s32(&tb_ptr);
872 *(uint64_t *)(t1 + t2) = t0;
873 break;
874
875 /* Arithmetic operations (64 bit). */
876
877 case INDEX_op_add_i64:
878 t0 = *tb_ptr++;
879 t1 = tci_read_ri64(&tb_ptr);
880 t2 = tci_read_ri64(&tb_ptr);
881 tci_write_reg64(t0, t1 + t2);
882 break;
883 case INDEX_op_sub_i64:
884 t0 = *tb_ptr++;
885 t1 = tci_read_ri64(&tb_ptr);
886 t2 = tci_read_ri64(&tb_ptr);
887 tci_write_reg64(t0, t1 - t2);
888 break;
889 case INDEX_op_mul_i64:
890 t0 = *tb_ptr++;
891 t1 = tci_read_ri64(&tb_ptr);
892 t2 = tci_read_ri64(&tb_ptr);
893 tci_write_reg64(t0, t1 * t2);
894 break;
895 #if TCG_TARGET_HAS_div_i64
896 case INDEX_op_div_i64:
897 case INDEX_op_divu_i64:
898 case INDEX_op_rem_i64:
899 case INDEX_op_remu_i64:
900 TODO();
901 break;
902 #elif TCG_TARGET_HAS_div2_i64
903 case INDEX_op_div2_i64:
904 case INDEX_op_divu2_i64:
905 TODO();
906 break;
907 #endif
908 case INDEX_op_and_i64:
909 t0 = *tb_ptr++;
910 t1 = tci_read_ri64(&tb_ptr);
911 t2 = tci_read_ri64(&tb_ptr);
912 tci_write_reg64(t0, t1 & t2);
913 break;
914 case INDEX_op_or_i64:
915 t0 = *tb_ptr++;
916 t1 = tci_read_ri64(&tb_ptr);
917 t2 = tci_read_ri64(&tb_ptr);
918 tci_write_reg64(t0, t1 | t2);
919 break;
920 case INDEX_op_xor_i64:
921 t0 = *tb_ptr++;
922 t1 = tci_read_ri64(&tb_ptr);
923 t2 = tci_read_ri64(&tb_ptr);
924 tci_write_reg64(t0, t1 ^ t2);
925 break;
926
927 /* Shift/rotate operations (64 bit). */
928
929 case INDEX_op_shl_i64:
930 t0 = *tb_ptr++;
931 t1 = tci_read_ri64(&tb_ptr);
932 t2 = tci_read_ri64(&tb_ptr);
933 tci_write_reg64(t0, t1 << t2);
934 break;
935 case INDEX_op_shr_i64:
936 t0 = *tb_ptr++;
937 t1 = tci_read_ri64(&tb_ptr);
938 t2 = tci_read_ri64(&tb_ptr);
939 tci_write_reg64(t0, t1 >> t2);
940 break;
941 case INDEX_op_sar_i64:
942 t0 = *tb_ptr++;
943 t1 = tci_read_ri64(&tb_ptr);
944 t2 = tci_read_ri64(&tb_ptr);
945 tci_write_reg64(t0, ((int64_t)t1 >> t2));
946 break;
947 #if TCG_TARGET_HAS_rot_i64
948 case INDEX_op_rotl_i64:
949 case INDEX_op_rotr_i64:
950 TODO();
951 break;
952 #endif
953 #if TCG_TARGET_HAS_deposit_i64
954 case INDEX_op_deposit_i64:
955 t0 = *tb_ptr++;
956 t1 = tci_read_r64(&tb_ptr);
957 t2 = tci_read_r64(&tb_ptr);
958 tmp16 = *tb_ptr++;
959 tmp8 = *tb_ptr++;
960 tmp64 = (((1ULL << tmp8) - 1) << tmp16);
961 tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
962 break;
963 #endif
964 case INDEX_op_brcond_i64:
965 t0 = tci_read_r64(&tb_ptr);
966 t1 = tci_read_ri64(&tb_ptr);
967 condition = *tb_ptr++;
968 label = tci_read_label(&tb_ptr);
969 if (tci_compare64(t0, t1, condition)) {
970 assert(tb_ptr == old_code_ptr + op_size);
971 tb_ptr = (uint8_t *)label;
972 continue;
973 }
974 break;
975 #if TCG_TARGET_HAS_ext8u_i64
976 case INDEX_op_ext8u_i64:
977 t0 = *tb_ptr++;
978 t1 = tci_read_r8(&tb_ptr);
979 tci_write_reg64(t0, t1);
980 break;
981 #endif
982 #if TCG_TARGET_HAS_ext8s_i64
983 case INDEX_op_ext8s_i64:
984 t0 = *tb_ptr++;
985 t1 = tci_read_r8s(&tb_ptr);
986 tci_write_reg64(t0, t1);
987 break;
988 #endif
989 #if TCG_TARGET_HAS_ext16s_i64
990 case INDEX_op_ext16s_i64:
991 t0 = *tb_ptr++;
992 t1 = tci_read_r16s(&tb_ptr);
993 tci_write_reg64(t0, t1);
994 break;
995 #endif
996 #if TCG_TARGET_HAS_ext16u_i64
997 case INDEX_op_ext16u_i64:
998 t0 = *tb_ptr++;
999 t1 = tci_read_r16(&tb_ptr);
1000 tci_write_reg64(t0, t1);
1001 break;
1002 #endif
1003 #if TCG_TARGET_HAS_ext32s_i64
1004 case INDEX_op_ext32s_i64:
1005 t0 = *tb_ptr++;
1006 t1 = tci_read_r32s(&tb_ptr);
1007 tci_write_reg64(t0, t1);
1008 break;
1009 #endif
1010 #if TCG_TARGET_HAS_ext32u_i64
1011 case INDEX_op_ext32u_i64:
1012 t0 = *tb_ptr++;
1013 t1 = tci_read_r32(&tb_ptr);
1014 tci_write_reg64(t0, t1);
1015 break;
1016 #endif
1017 #if TCG_TARGET_HAS_bswap16_i64
1018 case INDEX_op_bswap16_i64:
1019 TODO();
1020 t0 = *tb_ptr++;
1021 t1 = tci_read_r16(&tb_ptr);
1022 tci_write_reg64(t0, bswap16(t1));
1023 break;
1024 #endif
1025 #if TCG_TARGET_HAS_bswap32_i64
1026 case INDEX_op_bswap32_i64:
1027 t0 = *tb_ptr++;
1028 t1 = tci_read_r32(&tb_ptr);
1029 tci_write_reg64(t0, bswap32(t1));
1030 break;
1031 #endif
1032 #if TCG_TARGET_HAS_bswap64_i64
1033 case INDEX_op_bswap64_i64:
1034 t0 = *tb_ptr++;
1035 t1 = tci_read_r64(&tb_ptr);
1036 tci_write_reg64(t0, bswap64(t1));
1037 break;
1038 #endif
1039 #if TCG_TARGET_HAS_not_i64
1040 case INDEX_op_not_i64:
1041 t0 = *tb_ptr++;
1042 t1 = tci_read_r64(&tb_ptr);
1043 tci_write_reg64(t0, ~t1);
1044 break;
1045 #endif
1046 #if TCG_TARGET_HAS_neg_i64
1047 case INDEX_op_neg_i64:
1048 t0 = *tb_ptr++;
1049 t1 = tci_read_r64(&tb_ptr);
1050 tci_write_reg64(t0, -t1);
1051 break;
1052 #endif
1053 #endif /* TCG_TARGET_REG_BITS == 64 */
1054
1055 /* QEMU specific operations. */
1056
1057 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
1058 case INDEX_op_debug_insn_start:
1059 TODO();
1060 break;
1061 #else
1062 case INDEX_op_debug_insn_start:
1063 TODO();
1064 break;
1065 #endif
1066 case INDEX_op_exit_tb:
1067 next_tb = *(uint64_t *)tb_ptr;
1068 goto exit;
1069 break;
1070 case INDEX_op_goto_tb:
1071 t0 = tci_read_i32(&tb_ptr);
1072 assert(tb_ptr == old_code_ptr + op_size);
1073 tb_ptr += (int32_t)t0;
1074 continue;
1075 case INDEX_op_qemu_ld8u:
1076 t0 = *tb_ptr++;
1077 taddr = tci_read_ulong(&tb_ptr);
1078 #ifdef CONFIG_SOFTMMU
1079 tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
1080 #else
1081 host_addr = (tcg_target_ulong)taddr;
1082 assert(taddr == host_addr);
1083 tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
1084 #endif
1085 tci_write_reg8(t0, tmp8);
1086 break;
1087 case INDEX_op_qemu_ld8s:
1088 t0 = *tb_ptr++;
1089 taddr = tci_read_ulong(&tb_ptr);
1090 #ifdef CONFIG_SOFTMMU
1091 tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
1092 #else
1093 host_addr = (tcg_target_ulong)taddr;
1094 assert(taddr == host_addr);
1095 tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
1096 #endif
1097 tci_write_reg8s(t0, tmp8);
1098 break;
1099 case INDEX_op_qemu_ld16u:
1100 t0 = *tb_ptr++;
1101 taddr = tci_read_ulong(&tb_ptr);
1102 #ifdef CONFIG_SOFTMMU
1103 tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
1104 #else
1105 host_addr = (tcg_target_ulong)taddr;
1106 assert(taddr == host_addr);
1107 tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
1108 #endif
1109 tci_write_reg16(t0, tmp16);
1110 break;
1111 case INDEX_op_qemu_ld16s:
1112 t0 = *tb_ptr++;
1113 taddr = tci_read_ulong(&tb_ptr);
1114 #ifdef CONFIG_SOFTMMU
1115 tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
1116 #else
1117 host_addr = (tcg_target_ulong)taddr;
1118 assert(taddr == host_addr);
1119 tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
1120 #endif
1121 tci_write_reg16s(t0, tmp16);
1122 break;
1123 #if TCG_TARGET_REG_BITS == 64
1124 case INDEX_op_qemu_ld32u:
1125 t0 = *tb_ptr++;
1126 taddr = tci_read_ulong(&tb_ptr);
1127 #ifdef CONFIG_SOFTMMU
1128 tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1129 #else
1130 host_addr = (tcg_target_ulong)taddr;
1131 assert(taddr == host_addr);
1132 tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1133 #endif
1134 tci_write_reg32(t0, tmp32);
1135 break;
1136 case INDEX_op_qemu_ld32s:
1137 t0 = *tb_ptr++;
1138 taddr = tci_read_ulong(&tb_ptr);
1139 #ifdef CONFIG_SOFTMMU
1140 tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1141 #else
1142 host_addr = (tcg_target_ulong)taddr;
1143 assert(taddr == host_addr);
1144 tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1145 #endif
1146 tci_write_reg32s(t0, tmp32);
1147 break;
1148 #endif /* TCG_TARGET_REG_BITS == 64 */
1149 case INDEX_op_qemu_ld32:
1150 t0 = *tb_ptr++;
1151 taddr = tci_read_ulong(&tb_ptr);
1152 #ifdef CONFIG_SOFTMMU
1153 tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1154 #else
1155 host_addr = (tcg_target_ulong)taddr;
1156 assert(taddr == host_addr);
1157 tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1158 #endif
1159 tci_write_reg32(t0, tmp32);
1160 break;
1161 case INDEX_op_qemu_ld64:
1162 t0 = *tb_ptr++;
1163 #if TCG_TARGET_REG_BITS == 32
1164 t1 = *tb_ptr++;
1165 #endif
1166 taddr = tci_read_ulong(&tb_ptr);
1167 #ifdef CONFIG_SOFTMMU
1168 tmp64 = helper_ldq_mmu(env, taddr, tci_read_i(&tb_ptr));
1169 #else
1170 host_addr = (tcg_target_ulong)taddr;
1171 assert(taddr == host_addr);
1172 tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE));
1173 #endif
1174 tci_write_reg(t0, tmp64);
1175 #if TCG_TARGET_REG_BITS == 32
1176 tci_write_reg(t1, tmp64 >> 32);
1177 #endif
1178 break;
1179 case INDEX_op_qemu_st8:
1180 t0 = tci_read_r8(&tb_ptr);
1181 taddr = tci_read_ulong(&tb_ptr);
1182 #ifdef CONFIG_SOFTMMU
1183 t2 = tci_read_i(&tb_ptr);
1184 helper_stb_mmu(env, taddr, t0, t2);
1185 #else
1186 host_addr = (tcg_target_ulong)taddr;
1187 assert(taddr == host_addr);
1188 *(uint8_t *)(host_addr + GUEST_BASE) = t0;
1189 #endif
1190 break;
1191 case INDEX_op_qemu_st16:
1192 t0 = tci_read_r16(&tb_ptr);
1193 taddr = tci_read_ulong(&tb_ptr);
1194 #ifdef CONFIG_SOFTMMU
1195 t2 = tci_read_i(&tb_ptr);
1196 helper_stw_mmu(env, taddr, t0, t2);
1197 #else
1198 host_addr = (tcg_target_ulong)taddr;
1199 assert(taddr == host_addr);
1200 *(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0);
1201 #endif
1202 break;
1203 case INDEX_op_qemu_st32:
1204 t0 = tci_read_r32(&tb_ptr);
1205 taddr = tci_read_ulong(&tb_ptr);
1206 #ifdef CONFIG_SOFTMMU
1207 t2 = tci_read_i(&tb_ptr);
1208 helper_stl_mmu(env, taddr, t0, t2);
1209 #else
1210 host_addr = (tcg_target_ulong)taddr;
1211 assert(taddr == host_addr);
1212 *(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0);
1213 #endif
1214 break;
1215 case INDEX_op_qemu_st64:
1216 tmp64 = tci_read_r64(&tb_ptr);
1217 taddr = tci_read_ulong(&tb_ptr);
1218 #ifdef CONFIG_SOFTMMU
1219 t2 = tci_read_i(&tb_ptr);
1220 helper_stq_mmu(env, taddr, tmp64, t2);
1221 #else
1222 host_addr = (tcg_target_ulong)taddr;
1223 assert(taddr == host_addr);
1224 *(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64);
1225 #endif
1226 break;
1227 default:
1228 TODO();
1229 break;
1230 }
1231 assert(tb_ptr == old_code_ptr + op_size);
1232 }
1233 exit:
1234 return next_tb;
1235 }