]> git.proxmox.com Git - qemu.git/blob - tci.c
tci: Use a local variable for env
[qemu.git] / tci.c
1 /*
2 * Tiny Code Interpreter for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 */
19
20 #include "config.h"
21
22 /* Defining NDEBUG disables assertions (which makes the code faster). */
23 #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
24 # define NDEBUG
25 #endif
26
27 #include "qemu-common.h"
28 #include "exec/exec-all.h" /* MAX_OPC_PARAM_IARGS */
29 #include "tcg-op.h"
30
31 /* Marker for missing code. */
32 #define TODO() \
33 do { \
34 fprintf(stderr, "TODO %s:%u: %s()\n", \
35 __FILE__, __LINE__, __func__); \
36 tcg_abort(); \
37 } while (0)
38
39 #if MAX_OPC_PARAM_IARGS != 5
40 # error Fix needed, number of supported input arguments changed!
41 #endif
42 #if TCG_TARGET_REG_BITS == 32
43 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
44 tcg_target_ulong, tcg_target_ulong,
45 tcg_target_ulong, tcg_target_ulong,
46 tcg_target_ulong, tcg_target_ulong,
47 tcg_target_ulong, tcg_target_ulong);
48 #else
49 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
50 tcg_target_ulong, tcg_target_ulong,
51 tcg_target_ulong);
52 #endif
53
54 /* Targets which don't use GETPC also don't need tci_tb_ptr
55 which makes them a little faster. */
56 #if defined(GETPC)
57 uintptr_t tci_tb_ptr;
58 #endif
59
60 static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
61
62 static tcg_target_ulong tci_read_reg(TCGReg index)
63 {
64 assert(index < ARRAY_SIZE(tci_reg));
65 return tci_reg[index];
66 }
67
68 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
69 static int8_t tci_read_reg8s(TCGReg index)
70 {
71 return (int8_t)tci_read_reg(index);
72 }
73 #endif
74
75 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
76 static int16_t tci_read_reg16s(TCGReg index)
77 {
78 return (int16_t)tci_read_reg(index);
79 }
80 #endif
81
82 #if TCG_TARGET_REG_BITS == 64
83 static int32_t tci_read_reg32s(TCGReg index)
84 {
85 return (int32_t)tci_read_reg(index);
86 }
87 #endif
88
89 static uint8_t tci_read_reg8(TCGReg index)
90 {
91 return (uint8_t)tci_read_reg(index);
92 }
93
94 static uint16_t tci_read_reg16(TCGReg index)
95 {
96 return (uint16_t)tci_read_reg(index);
97 }
98
99 static uint32_t tci_read_reg32(TCGReg index)
100 {
101 return (uint32_t)tci_read_reg(index);
102 }
103
104 #if TCG_TARGET_REG_BITS == 64
105 static uint64_t tci_read_reg64(TCGReg index)
106 {
107 return tci_read_reg(index);
108 }
109 #endif
110
111 static void tci_write_reg(TCGReg index, tcg_target_ulong value)
112 {
113 assert(index < ARRAY_SIZE(tci_reg));
114 assert(index != TCG_AREG0);
115 tci_reg[index] = value;
116 }
117
118 static void tci_write_reg8s(TCGReg index, int8_t value)
119 {
120 tci_write_reg(index, value);
121 }
122
123 static void tci_write_reg16s(TCGReg index, int16_t value)
124 {
125 tci_write_reg(index, value);
126 }
127
128 #if TCG_TARGET_REG_BITS == 64
129 static void tci_write_reg32s(TCGReg index, int32_t value)
130 {
131 tci_write_reg(index, value);
132 }
133 #endif
134
135 static void tci_write_reg8(TCGReg index, uint8_t value)
136 {
137 tci_write_reg(index, value);
138 }
139
140 static void tci_write_reg16(TCGReg index, uint16_t value)
141 {
142 tci_write_reg(index, value);
143 }
144
145 static void tci_write_reg32(TCGReg index, uint32_t value)
146 {
147 tci_write_reg(index, value);
148 }
149
150 #if TCG_TARGET_REG_BITS == 32
151 static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
152 uint64_t value)
153 {
154 tci_write_reg(low_index, value);
155 tci_write_reg(high_index, value >> 32);
156 }
157 #elif TCG_TARGET_REG_BITS == 64
158 static void tci_write_reg64(TCGReg index, uint64_t value)
159 {
160 tci_write_reg(index, value);
161 }
162 #endif
163
164 #if TCG_TARGET_REG_BITS == 32
165 /* Create a 64 bit value from two 32 bit values. */
166 static uint64_t tci_uint64(uint32_t high, uint32_t low)
167 {
168 return ((uint64_t)high << 32) + low;
169 }
170 #endif
171
172 /* Read constant (native size) from bytecode. */
173 static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
174 {
175 tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
176 *tb_ptr += sizeof(value);
177 return value;
178 }
179
180 /* Read unsigned constant (32 bit) from bytecode. */
181 static uint32_t tci_read_i32(uint8_t **tb_ptr)
182 {
183 uint32_t value = *(uint32_t *)(*tb_ptr);
184 *tb_ptr += sizeof(value);
185 return value;
186 }
187
188 /* Read signed constant (32 bit) from bytecode. */
189 static int32_t tci_read_s32(uint8_t **tb_ptr)
190 {
191 int32_t value = *(int32_t *)(*tb_ptr);
192 *tb_ptr += sizeof(value);
193 return value;
194 }
195
196 #if TCG_TARGET_REG_BITS == 64
197 /* Read constant (64 bit) from bytecode. */
198 static uint64_t tci_read_i64(uint8_t **tb_ptr)
199 {
200 uint64_t value = *(uint64_t *)(*tb_ptr);
201 *tb_ptr += sizeof(value);
202 return value;
203 }
204 #endif
205
206 /* Read indexed register (native size) from bytecode. */
207 static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
208 {
209 tcg_target_ulong value = tci_read_reg(**tb_ptr);
210 *tb_ptr += 1;
211 return value;
212 }
213
214 /* Read indexed register (8 bit) from bytecode. */
215 static uint8_t tci_read_r8(uint8_t **tb_ptr)
216 {
217 uint8_t value = tci_read_reg8(**tb_ptr);
218 *tb_ptr += 1;
219 return value;
220 }
221
222 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
223 /* Read indexed register (8 bit signed) from bytecode. */
224 static int8_t tci_read_r8s(uint8_t **tb_ptr)
225 {
226 int8_t value = tci_read_reg8s(**tb_ptr);
227 *tb_ptr += 1;
228 return value;
229 }
230 #endif
231
232 /* Read indexed register (16 bit) from bytecode. */
233 static uint16_t tci_read_r16(uint8_t **tb_ptr)
234 {
235 uint16_t value = tci_read_reg16(**tb_ptr);
236 *tb_ptr += 1;
237 return value;
238 }
239
240 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
241 /* Read indexed register (16 bit signed) from bytecode. */
242 static int16_t tci_read_r16s(uint8_t **tb_ptr)
243 {
244 int16_t value = tci_read_reg16s(**tb_ptr);
245 *tb_ptr += 1;
246 return value;
247 }
248 #endif
249
250 /* Read indexed register (32 bit) from bytecode. */
251 static uint32_t tci_read_r32(uint8_t **tb_ptr)
252 {
253 uint32_t value = tci_read_reg32(**tb_ptr);
254 *tb_ptr += 1;
255 return value;
256 }
257
258 #if TCG_TARGET_REG_BITS == 32
259 /* Read two indexed registers (2 * 32 bit) from bytecode. */
260 static uint64_t tci_read_r64(uint8_t **tb_ptr)
261 {
262 uint32_t low = tci_read_r32(tb_ptr);
263 return tci_uint64(tci_read_r32(tb_ptr), low);
264 }
265 #elif TCG_TARGET_REG_BITS == 64
266 /* Read indexed register (32 bit signed) from bytecode. */
267 static int32_t tci_read_r32s(uint8_t **tb_ptr)
268 {
269 int32_t value = tci_read_reg32s(**tb_ptr);
270 *tb_ptr += 1;
271 return value;
272 }
273
274 /* Read indexed register (64 bit) from bytecode. */
275 static uint64_t tci_read_r64(uint8_t **tb_ptr)
276 {
277 uint64_t value = tci_read_reg64(**tb_ptr);
278 *tb_ptr += 1;
279 return value;
280 }
281 #endif
282
283 /* Read indexed register(s) with target address from bytecode. */
284 static target_ulong tci_read_ulong(uint8_t **tb_ptr)
285 {
286 target_ulong taddr = tci_read_r(tb_ptr);
287 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
288 taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
289 #endif
290 return taddr;
291 }
292
293 /* Read indexed register or constant (native size) from bytecode. */
294 static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
295 {
296 tcg_target_ulong value;
297 TCGReg r = **tb_ptr;
298 *tb_ptr += 1;
299 if (r == TCG_CONST) {
300 value = tci_read_i(tb_ptr);
301 } else {
302 value = tci_read_reg(r);
303 }
304 return value;
305 }
306
307 /* Read indexed register or constant (32 bit) from bytecode. */
308 static uint32_t tci_read_ri32(uint8_t **tb_ptr)
309 {
310 uint32_t value;
311 TCGReg r = **tb_ptr;
312 *tb_ptr += 1;
313 if (r == TCG_CONST) {
314 value = tci_read_i32(tb_ptr);
315 } else {
316 value = tci_read_reg32(r);
317 }
318 return value;
319 }
320
321 #if TCG_TARGET_REG_BITS == 32
322 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
323 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
324 {
325 uint32_t low = tci_read_ri32(tb_ptr);
326 return tci_uint64(tci_read_ri32(tb_ptr), low);
327 }
328 #elif TCG_TARGET_REG_BITS == 64
329 /* Read indexed register or constant (64 bit) from bytecode. */
330 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
331 {
332 uint64_t value;
333 TCGReg r = **tb_ptr;
334 *tb_ptr += 1;
335 if (r == TCG_CONST) {
336 value = tci_read_i64(tb_ptr);
337 } else {
338 value = tci_read_reg64(r);
339 }
340 return value;
341 }
342 #endif
343
344 static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
345 {
346 tcg_target_ulong label = tci_read_i(tb_ptr);
347 assert(label != 0);
348 return label;
349 }
350
351 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
352 {
353 bool result = false;
354 int32_t i0 = u0;
355 int32_t i1 = u1;
356 switch (condition) {
357 case TCG_COND_EQ:
358 result = (u0 == u1);
359 break;
360 case TCG_COND_NE:
361 result = (u0 != u1);
362 break;
363 case TCG_COND_LT:
364 result = (i0 < i1);
365 break;
366 case TCG_COND_GE:
367 result = (i0 >= i1);
368 break;
369 case TCG_COND_LE:
370 result = (i0 <= i1);
371 break;
372 case TCG_COND_GT:
373 result = (i0 > i1);
374 break;
375 case TCG_COND_LTU:
376 result = (u0 < u1);
377 break;
378 case TCG_COND_GEU:
379 result = (u0 >= u1);
380 break;
381 case TCG_COND_LEU:
382 result = (u0 <= u1);
383 break;
384 case TCG_COND_GTU:
385 result = (u0 > u1);
386 break;
387 default:
388 TODO();
389 }
390 return result;
391 }
392
393 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
394 {
395 bool result = false;
396 int64_t i0 = u0;
397 int64_t i1 = u1;
398 switch (condition) {
399 case TCG_COND_EQ:
400 result = (u0 == u1);
401 break;
402 case TCG_COND_NE:
403 result = (u0 != u1);
404 break;
405 case TCG_COND_LT:
406 result = (i0 < i1);
407 break;
408 case TCG_COND_GE:
409 result = (i0 >= i1);
410 break;
411 case TCG_COND_LE:
412 result = (i0 <= i1);
413 break;
414 case TCG_COND_GT:
415 result = (i0 > i1);
416 break;
417 case TCG_COND_LTU:
418 result = (u0 < u1);
419 break;
420 case TCG_COND_GEU:
421 result = (u0 >= u1);
422 break;
423 case TCG_COND_LEU:
424 result = (u0 <= u1);
425 break;
426 case TCG_COND_GTU:
427 result = (u0 > u1);
428 break;
429 default:
430 TODO();
431 }
432 return result;
433 }
434
435 /* Interpret pseudo code in tb. */
436 tcg_target_ulong tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
437 {
438 tcg_target_ulong next_tb = 0;
439
440 tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
441 assert(tb_ptr);
442
443 for (;;) {
444 #if defined(GETPC)
445 tci_tb_ptr = (uintptr_t)tb_ptr;
446 #endif
447 TCGOpcode opc = tb_ptr[0];
448 #if !defined(NDEBUG)
449 uint8_t op_size = tb_ptr[1];
450 uint8_t *old_code_ptr = tb_ptr;
451 #endif
452 tcg_target_ulong t0;
453 tcg_target_ulong t1;
454 tcg_target_ulong t2;
455 tcg_target_ulong label;
456 TCGCond condition;
457 target_ulong taddr;
458 #ifndef CONFIG_SOFTMMU
459 tcg_target_ulong host_addr;
460 #endif
461 uint8_t tmp8;
462 uint16_t tmp16;
463 uint32_t tmp32;
464 uint64_t tmp64;
465 #if TCG_TARGET_REG_BITS == 32
466 uint64_t v64;
467 #endif
468
469 /* Skip opcode and size entry. */
470 tb_ptr += 2;
471
472 switch (opc) {
473 case INDEX_op_end:
474 case INDEX_op_nop:
475 break;
476 case INDEX_op_nop1:
477 case INDEX_op_nop2:
478 case INDEX_op_nop3:
479 case INDEX_op_nopn:
480 case INDEX_op_discard:
481 TODO();
482 break;
483 case INDEX_op_set_label:
484 TODO();
485 break;
486 case INDEX_op_call:
487 t0 = tci_read_ri(&tb_ptr);
488 #if TCG_TARGET_REG_BITS == 32
489 tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
490 tci_read_reg(TCG_REG_R1),
491 tci_read_reg(TCG_REG_R2),
492 tci_read_reg(TCG_REG_R3),
493 tci_read_reg(TCG_REG_R5),
494 tci_read_reg(TCG_REG_R6),
495 tci_read_reg(TCG_REG_R7),
496 tci_read_reg(TCG_REG_R8),
497 tci_read_reg(TCG_REG_R9),
498 tci_read_reg(TCG_REG_R10));
499 tci_write_reg(TCG_REG_R0, tmp64);
500 tci_write_reg(TCG_REG_R1, tmp64 >> 32);
501 #else
502 tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
503 tci_read_reg(TCG_REG_R1),
504 tci_read_reg(TCG_REG_R2),
505 tci_read_reg(TCG_REG_R3),
506 tci_read_reg(TCG_REG_R5));
507 tci_write_reg(TCG_REG_R0, tmp64);
508 #endif
509 break;
510 case INDEX_op_br:
511 label = tci_read_label(&tb_ptr);
512 assert(tb_ptr == old_code_ptr + op_size);
513 tb_ptr = (uint8_t *)label;
514 continue;
515 case INDEX_op_setcond_i32:
516 t0 = *tb_ptr++;
517 t1 = tci_read_r32(&tb_ptr);
518 t2 = tci_read_ri32(&tb_ptr);
519 condition = *tb_ptr++;
520 tci_write_reg32(t0, tci_compare32(t1, t2, condition));
521 break;
522 #if TCG_TARGET_REG_BITS == 32
523 case INDEX_op_setcond2_i32:
524 t0 = *tb_ptr++;
525 tmp64 = tci_read_r64(&tb_ptr);
526 v64 = tci_read_ri64(&tb_ptr);
527 condition = *tb_ptr++;
528 tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
529 break;
530 #elif TCG_TARGET_REG_BITS == 64
531 case INDEX_op_setcond_i64:
532 t0 = *tb_ptr++;
533 t1 = tci_read_r64(&tb_ptr);
534 t2 = tci_read_ri64(&tb_ptr);
535 condition = *tb_ptr++;
536 tci_write_reg64(t0, tci_compare64(t1, t2, condition));
537 break;
538 #endif
539 case INDEX_op_mov_i32:
540 t0 = *tb_ptr++;
541 t1 = tci_read_r32(&tb_ptr);
542 tci_write_reg32(t0, t1);
543 break;
544 case INDEX_op_movi_i32:
545 t0 = *tb_ptr++;
546 t1 = tci_read_i32(&tb_ptr);
547 tci_write_reg32(t0, t1);
548 break;
549
550 /* Load/store operations (32 bit). */
551
552 case INDEX_op_ld8u_i32:
553 t0 = *tb_ptr++;
554 t1 = tci_read_r(&tb_ptr);
555 t2 = tci_read_s32(&tb_ptr);
556 tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
557 break;
558 case INDEX_op_ld8s_i32:
559 case INDEX_op_ld16u_i32:
560 TODO();
561 break;
562 case INDEX_op_ld16s_i32:
563 TODO();
564 break;
565 case INDEX_op_ld_i32:
566 t0 = *tb_ptr++;
567 t1 = tci_read_r(&tb_ptr);
568 t2 = tci_read_s32(&tb_ptr);
569 tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
570 break;
571 case INDEX_op_st8_i32:
572 t0 = tci_read_r8(&tb_ptr);
573 t1 = tci_read_r(&tb_ptr);
574 t2 = tci_read_s32(&tb_ptr);
575 *(uint8_t *)(t1 + t2) = t0;
576 break;
577 case INDEX_op_st16_i32:
578 t0 = tci_read_r16(&tb_ptr);
579 t1 = tci_read_r(&tb_ptr);
580 t2 = tci_read_s32(&tb_ptr);
581 *(uint16_t *)(t1 + t2) = t0;
582 break;
583 case INDEX_op_st_i32:
584 t0 = tci_read_r32(&tb_ptr);
585 t1 = tci_read_r(&tb_ptr);
586 t2 = tci_read_s32(&tb_ptr);
587 *(uint32_t *)(t1 + t2) = t0;
588 break;
589
590 /* Arithmetic operations (32 bit). */
591
592 case INDEX_op_add_i32:
593 t0 = *tb_ptr++;
594 t1 = tci_read_ri32(&tb_ptr);
595 t2 = tci_read_ri32(&tb_ptr);
596 tci_write_reg32(t0, t1 + t2);
597 break;
598 case INDEX_op_sub_i32:
599 t0 = *tb_ptr++;
600 t1 = tci_read_ri32(&tb_ptr);
601 t2 = tci_read_ri32(&tb_ptr);
602 tci_write_reg32(t0, t1 - t2);
603 break;
604 case INDEX_op_mul_i32:
605 t0 = *tb_ptr++;
606 t1 = tci_read_ri32(&tb_ptr);
607 t2 = tci_read_ri32(&tb_ptr);
608 tci_write_reg32(t0, t1 * t2);
609 break;
610 #if TCG_TARGET_HAS_div_i32
611 case INDEX_op_div_i32:
612 t0 = *tb_ptr++;
613 t1 = tci_read_ri32(&tb_ptr);
614 t2 = tci_read_ri32(&tb_ptr);
615 tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
616 break;
617 case INDEX_op_divu_i32:
618 t0 = *tb_ptr++;
619 t1 = tci_read_ri32(&tb_ptr);
620 t2 = tci_read_ri32(&tb_ptr);
621 tci_write_reg32(t0, t1 / t2);
622 break;
623 case INDEX_op_rem_i32:
624 t0 = *tb_ptr++;
625 t1 = tci_read_ri32(&tb_ptr);
626 t2 = tci_read_ri32(&tb_ptr);
627 tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
628 break;
629 case INDEX_op_remu_i32:
630 t0 = *tb_ptr++;
631 t1 = tci_read_ri32(&tb_ptr);
632 t2 = tci_read_ri32(&tb_ptr);
633 tci_write_reg32(t0, t1 % t2);
634 break;
635 #elif TCG_TARGET_HAS_div2_i32
636 case INDEX_op_div2_i32:
637 case INDEX_op_divu2_i32:
638 TODO();
639 break;
640 #endif
641 case INDEX_op_and_i32:
642 t0 = *tb_ptr++;
643 t1 = tci_read_ri32(&tb_ptr);
644 t2 = tci_read_ri32(&tb_ptr);
645 tci_write_reg32(t0, t1 & t2);
646 break;
647 case INDEX_op_or_i32:
648 t0 = *tb_ptr++;
649 t1 = tci_read_ri32(&tb_ptr);
650 t2 = tci_read_ri32(&tb_ptr);
651 tci_write_reg32(t0, t1 | t2);
652 break;
653 case INDEX_op_xor_i32:
654 t0 = *tb_ptr++;
655 t1 = tci_read_ri32(&tb_ptr);
656 t2 = tci_read_ri32(&tb_ptr);
657 tci_write_reg32(t0, t1 ^ t2);
658 break;
659
660 /* Shift/rotate operations (32 bit). */
661
662 case INDEX_op_shl_i32:
663 t0 = *tb_ptr++;
664 t1 = tci_read_ri32(&tb_ptr);
665 t2 = tci_read_ri32(&tb_ptr);
666 tci_write_reg32(t0, t1 << t2);
667 break;
668 case INDEX_op_shr_i32:
669 t0 = *tb_ptr++;
670 t1 = tci_read_ri32(&tb_ptr);
671 t2 = tci_read_ri32(&tb_ptr);
672 tci_write_reg32(t0, t1 >> t2);
673 break;
674 case INDEX_op_sar_i32:
675 t0 = *tb_ptr++;
676 t1 = tci_read_ri32(&tb_ptr);
677 t2 = tci_read_ri32(&tb_ptr);
678 tci_write_reg32(t0, ((int32_t)t1 >> t2));
679 break;
680 #if TCG_TARGET_HAS_rot_i32
681 case INDEX_op_rotl_i32:
682 t0 = *tb_ptr++;
683 t1 = tci_read_ri32(&tb_ptr);
684 t2 = tci_read_ri32(&tb_ptr);
685 tci_write_reg32(t0, (t1 << t2) | (t1 >> (32 - t2)));
686 break;
687 case INDEX_op_rotr_i32:
688 t0 = *tb_ptr++;
689 t1 = tci_read_ri32(&tb_ptr);
690 t2 = tci_read_ri32(&tb_ptr);
691 tci_write_reg32(t0, (t1 >> t2) | (t1 << (32 - t2)));
692 break;
693 #endif
694 #if TCG_TARGET_HAS_deposit_i32
695 case INDEX_op_deposit_i32:
696 t0 = *tb_ptr++;
697 t1 = tci_read_r32(&tb_ptr);
698 t2 = tci_read_r32(&tb_ptr);
699 tmp16 = *tb_ptr++;
700 tmp8 = *tb_ptr++;
701 tmp32 = (((1 << tmp8) - 1) << tmp16);
702 tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
703 break;
704 #endif
705 case INDEX_op_brcond_i32:
706 t0 = tci_read_r32(&tb_ptr);
707 t1 = tci_read_ri32(&tb_ptr);
708 condition = *tb_ptr++;
709 label = tci_read_label(&tb_ptr);
710 if (tci_compare32(t0, t1, condition)) {
711 assert(tb_ptr == old_code_ptr + op_size);
712 tb_ptr = (uint8_t *)label;
713 continue;
714 }
715 break;
716 #if TCG_TARGET_REG_BITS == 32
717 case INDEX_op_add2_i32:
718 t0 = *tb_ptr++;
719 t1 = *tb_ptr++;
720 tmp64 = tci_read_r64(&tb_ptr);
721 tmp64 += tci_read_r64(&tb_ptr);
722 tci_write_reg64(t1, t0, tmp64);
723 break;
724 case INDEX_op_sub2_i32:
725 t0 = *tb_ptr++;
726 t1 = *tb_ptr++;
727 tmp64 = tci_read_r64(&tb_ptr);
728 tmp64 -= tci_read_r64(&tb_ptr);
729 tci_write_reg64(t1, t0, tmp64);
730 break;
731 case INDEX_op_brcond2_i32:
732 tmp64 = tci_read_r64(&tb_ptr);
733 v64 = tci_read_ri64(&tb_ptr);
734 condition = *tb_ptr++;
735 label = tci_read_label(&tb_ptr);
736 if (tci_compare64(tmp64, v64, condition)) {
737 assert(tb_ptr == old_code_ptr + op_size);
738 tb_ptr = (uint8_t *)label;
739 continue;
740 }
741 break;
742 case INDEX_op_mulu2_i32:
743 t0 = *tb_ptr++;
744 t1 = *tb_ptr++;
745 t2 = tci_read_r32(&tb_ptr);
746 tmp64 = tci_read_r32(&tb_ptr);
747 tci_write_reg64(t1, t0, t2 * tmp64);
748 break;
749 #endif /* TCG_TARGET_REG_BITS == 32 */
750 #if TCG_TARGET_HAS_ext8s_i32
751 case INDEX_op_ext8s_i32:
752 t0 = *tb_ptr++;
753 t1 = tci_read_r8s(&tb_ptr);
754 tci_write_reg32(t0, t1);
755 break;
756 #endif
757 #if TCG_TARGET_HAS_ext16s_i32
758 case INDEX_op_ext16s_i32:
759 t0 = *tb_ptr++;
760 t1 = tci_read_r16s(&tb_ptr);
761 tci_write_reg32(t0, t1);
762 break;
763 #endif
764 #if TCG_TARGET_HAS_ext8u_i32
765 case INDEX_op_ext8u_i32:
766 t0 = *tb_ptr++;
767 t1 = tci_read_r8(&tb_ptr);
768 tci_write_reg32(t0, t1);
769 break;
770 #endif
771 #if TCG_TARGET_HAS_ext16u_i32
772 case INDEX_op_ext16u_i32:
773 t0 = *tb_ptr++;
774 t1 = tci_read_r16(&tb_ptr);
775 tci_write_reg32(t0, t1);
776 break;
777 #endif
778 #if TCG_TARGET_HAS_bswap16_i32
779 case INDEX_op_bswap16_i32:
780 t0 = *tb_ptr++;
781 t1 = tci_read_r16(&tb_ptr);
782 tci_write_reg32(t0, bswap16(t1));
783 break;
784 #endif
785 #if TCG_TARGET_HAS_bswap32_i32
786 case INDEX_op_bswap32_i32:
787 t0 = *tb_ptr++;
788 t1 = tci_read_r32(&tb_ptr);
789 tci_write_reg32(t0, bswap32(t1));
790 break;
791 #endif
792 #if TCG_TARGET_HAS_not_i32
793 case INDEX_op_not_i32:
794 t0 = *tb_ptr++;
795 t1 = tci_read_r32(&tb_ptr);
796 tci_write_reg32(t0, ~t1);
797 break;
798 #endif
799 #if TCG_TARGET_HAS_neg_i32
800 case INDEX_op_neg_i32:
801 t0 = *tb_ptr++;
802 t1 = tci_read_r32(&tb_ptr);
803 tci_write_reg32(t0, -t1);
804 break;
805 #endif
806 #if TCG_TARGET_REG_BITS == 64
807 case INDEX_op_mov_i64:
808 t0 = *tb_ptr++;
809 t1 = tci_read_r64(&tb_ptr);
810 tci_write_reg64(t0, t1);
811 break;
812 case INDEX_op_movi_i64:
813 t0 = *tb_ptr++;
814 t1 = tci_read_i64(&tb_ptr);
815 tci_write_reg64(t0, t1);
816 break;
817
818 /* Load/store operations (64 bit). */
819
820 case INDEX_op_ld8u_i64:
821 t0 = *tb_ptr++;
822 t1 = tci_read_r(&tb_ptr);
823 t2 = tci_read_s32(&tb_ptr);
824 tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
825 break;
826 case INDEX_op_ld8s_i64:
827 case INDEX_op_ld16u_i64:
828 case INDEX_op_ld16s_i64:
829 TODO();
830 break;
831 case INDEX_op_ld32u_i64:
832 t0 = *tb_ptr++;
833 t1 = tci_read_r(&tb_ptr);
834 t2 = tci_read_s32(&tb_ptr);
835 tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
836 break;
837 case INDEX_op_ld32s_i64:
838 t0 = *tb_ptr++;
839 t1 = tci_read_r(&tb_ptr);
840 t2 = tci_read_s32(&tb_ptr);
841 tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
842 break;
843 case INDEX_op_ld_i64:
844 t0 = *tb_ptr++;
845 t1 = tci_read_r(&tb_ptr);
846 t2 = tci_read_s32(&tb_ptr);
847 tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
848 break;
849 case INDEX_op_st8_i64:
850 t0 = tci_read_r8(&tb_ptr);
851 t1 = tci_read_r(&tb_ptr);
852 t2 = tci_read_s32(&tb_ptr);
853 *(uint8_t *)(t1 + t2) = t0;
854 break;
855 case INDEX_op_st16_i64:
856 t0 = tci_read_r16(&tb_ptr);
857 t1 = tci_read_r(&tb_ptr);
858 t2 = tci_read_s32(&tb_ptr);
859 *(uint16_t *)(t1 + t2) = t0;
860 break;
861 case INDEX_op_st32_i64:
862 t0 = tci_read_r32(&tb_ptr);
863 t1 = tci_read_r(&tb_ptr);
864 t2 = tci_read_s32(&tb_ptr);
865 *(uint32_t *)(t1 + t2) = t0;
866 break;
867 case INDEX_op_st_i64:
868 t0 = tci_read_r64(&tb_ptr);
869 t1 = tci_read_r(&tb_ptr);
870 t2 = tci_read_s32(&tb_ptr);
871 *(uint64_t *)(t1 + t2) = t0;
872 break;
873
874 /* Arithmetic operations (64 bit). */
875
876 case INDEX_op_add_i64:
877 t0 = *tb_ptr++;
878 t1 = tci_read_ri64(&tb_ptr);
879 t2 = tci_read_ri64(&tb_ptr);
880 tci_write_reg64(t0, t1 + t2);
881 break;
882 case INDEX_op_sub_i64:
883 t0 = *tb_ptr++;
884 t1 = tci_read_ri64(&tb_ptr);
885 t2 = tci_read_ri64(&tb_ptr);
886 tci_write_reg64(t0, t1 - t2);
887 break;
888 case INDEX_op_mul_i64:
889 t0 = *tb_ptr++;
890 t1 = tci_read_ri64(&tb_ptr);
891 t2 = tci_read_ri64(&tb_ptr);
892 tci_write_reg64(t0, t1 * t2);
893 break;
894 #if TCG_TARGET_HAS_div_i64
895 case INDEX_op_div_i64:
896 case INDEX_op_divu_i64:
897 case INDEX_op_rem_i64:
898 case INDEX_op_remu_i64:
899 TODO();
900 break;
901 #elif TCG_TARGET_HAS_div2_i64
902 case INDEX_op_div2_i64:
903 case INDEX_op_divu2_i64:
904 TODO();
905 break;
906 #endif
907 case INDEX_op_and_i64:
908 t0 = *tb_ptr++;
909 t1 = tci_read_ri64(&tb_ptr);
910 t2 = tci_read_ri64(&tb_ptr);
911 tci_write_reg64(t0, t1 & t2);
912 break;
913 case INDEX_op_or_i64:
914 t0 = *tb_ptr++;
915 t1 = tci_read_ri64(&tb_ptr);
916 t2 = tci_read_ri64(&tb_ptr);
917 tci_write_reg64(t0, t1 | t2);
918 break;
919 case INDEX_op_xor_i64:
920 t0 = *tb_ptr++;
921 t1 = tci_read_ri64(&tb_ptr);
922 t2 = tci_read_ri64(&tb_ptr);
923 tci_write_reg64(t0, t1 ^ t2);
924 break;
925
926 /* Shift/rotate operations (64 bit). */
927
928 case INDEX_op_shl_i64:
929 t0 = *tb_ptr++;
930 t1 = tci_read_ri64(&tb_ptr);
931 t2 = tci_read_ri64(&tb_ptr);
932 tci_write_reg64(t0, t1 << t2);
933 break;
934 case INDEX_op_shr_i64:
935 t0 = *tb_ptr++;
936 t1 = tci_read_ri64(&tb_ptr);
937 t2 = tci_read_ri64(&tb_ptr);
938 tci_write_reg64(t0, t1 >> t2);
939 break;
940 case INDEX_op_sar_i64:
941 t0 = *tb_ptr++;
942 t1 = tci_read_ri64(&tb_ptr);
943 t2 = tci_read_ri64(&tb_ptr);
944 tci_write_reg64(t0, ((int64_t)t1 >> t2));
945 break;
946 #if TCG_TARGET_HAS_rot_i64
947 case INDEX_op_rotl_i64:
948 case INDEX_op_rotr_i64:
949 TODO();
950 break;
951 #endif
952 #if TCG_TARGET_HAS_deposit_i64
953 case INDEX_op_deposit_i64:
954 t0 = *tb_ptr++;
955 t1 = tci_read_r64(&tb_ptr);
956 t2 = tci_read_r64(&tb_ptr);
957 tmp16 = *tb_ptr++;
958 tmp8 = *tb_ptr++;
959 tmp64 = (((1ULL << tmp8) - 1) << tmp16);
960 tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
961 break;
962 #endif
963 case INDEX_op_brcond_i64:
964 t0 = tci_read_r64(&tb_ptr);
965 t1 = tci_read_ri64(&tb_ptr);
966 condition = *tb_ptr++;
967 label = tci_read_label(&tb_ptr);
968 if (tci_compare64(t0, t1, condition)) {
969 assert(tb_ptr == old_code_ptr + op_size);
970 tb_ptr = (uint8_t *)label;
971 continue;
972 }
973 break;
974 #if TCG_TARGET_HAS_ext8u_i64
975 case INDEX_op_ext8u_i64:
976 t0 = *tb_ptr++;
977 t1 = tci_read_r8(&tb_ptr);
978 tci_write_reg64(t0, t1);
979 break;
980 #endif
981 #if TCG_TARGET_HAS_ext8s_i64
982 case INDEX_op_ext8s_i64:
983 t0 = *tb_ptr++;
984 t1 = tci_read_r8s(&tb_ptr);
985 tci_write_reg64(t0, t1);
986 break;
987 #endif
988 #if TCG_TARGET_HAS_ext16s_i64
989 case INDEX_op_ext16s_i64:
990 t0 = *tb_ptr++;
991 t1 = tci_read_r16s(&tb_ptr);
992 tci_write_reg64(t0, t1);
993 break;
994 #endif
995 #if TCG_TARGET_HAS_ext16u_i64
996 case INDEX_op_ext16u_i64:
997 t0 = *tb_ptr++;
998 t1 = tci_read_r16(&tb_ptr);
999 tci_write_reg64(t0, t1);
1000 break;
1001 #endif
1002 #if TCG_TARGET_HAS_ext32s_i64
1003 case INDEX_op_ext32s_i64:
1004 t0 = *tb_ptr++;
1005 t1 = tci_read_r32s(&tb_ptr);
1006 tci_write_reg64(t0, t1);
1007 break;
1008 #endif
1009 #if TCG_TARGET_HAS_ext32u_i64
1010 case INDEX_op_ext32u_i64:
1011 t0 = *tb_ptr++;
1012 t1 = tci_read_r32(&tb_ptr);
1013 tci_write_reg64(t0, t1);
1014 break;
1015 #endif
1016 #if TCG_TARGET_HAS_bswap16_i64
1017 case INDEX_op_bswap16_i64:
1018 TODO();
1019 t0 = *tb_ptr++;
1020 t1 = tci_read_r16(&tb_ptr);
1021 tci_write_reg64(t0, bswap16(t1));
1022 break;
1023 #endif
1024 #if TCG_TARGET_HAS_bswap32_i64
1025 case INDEX_op_bswap32_i64:
1026 t0 = *tb_ptr++;
1027 t1 = tci_read_r32(&tb_ptr);
1028 tci_write_reg64(t0, bswap32(t1));
1029 break;
1030 #endif
1031 #if TCG_TARGET_HAS_bswap64_i64
1032 case INDEX_op_bswap64_i64:
1033 t0 = *tb_ptr++;
1034 t1 = tci_read_r64(&tb_ptr);
1035 tci_write_reg64(t0, bswap64(t1));
1036 break;
1037 #endif
1038 #if TCG_TARGET_HAS_not_i64
1039 case INDEX_op_not_i64:
1040 t0 = *tb_ptr++;
1041 t1 = tci_read_r64(&tb_ptr);
1042 tci_write_reg64(t0, ~t1);
1043 break;
1044 #endif
1045 #if TCG_TARGET_HAS_neg_i64
1046 case INDEX_op_neg_i64:
1047 t0 = *tb_ptr++;
1048 t1 = tci_read_r64(&tb_ptr);
1049 tci_write_reg64(t0, -t1);
1050 break;
1051 #endif
1052 #endif /* TCG_TARGET_REG_BITS == 64 */
1053
1054 /* QEMU specific operations. */
1055
1056 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
1057 case INDEX_op_debug_insn_start:
1058 TODO();
1059 break;
1060 #else
1061 case INDEX_op_debug_insn_start:
1062 TODO();
1063 break;
1064 #endif
1065 case INDEX_op_exit_tb:
1066 next_tb = *(uint64_t *)tb_ptr;
1067 goto exit;
1068 break;
1069 case INDEX_op_goto_tb:
1070 t0 = tci_read_i32(&tb_ptr);
1071 assert(tb_ptr == old_code_ptr + op_size);
1072 tb_ptr += (int32_t)t0;
1073 continue;
1074 case INDEX_op_qemu_ld8u:
1075 t0 = *tb_ptr++;
1076 taddr = tci_read_ulong(&tb_ptr);
1077 #ifdef CONFIG_SOFTMMU
1078 tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
1079 #else
1080 host_addr = (tcg_target_ulong)taddr;
1081 assert(taddr == host_addr);
1082 tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
1083 #endif
1084 tci_write_reg8(t0, tmp8);
1085 break;
1086 case INDEX_op_qemu_ld8s:
1087 t0 = *tb_ptr++;
1088 taddr = tci_read_ulong(&tb_ptr);
1089 #ifdef CONFIG_SOFTMMU
1090 tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
1091 #else
1092 host_addr = (tcg_target_ulong)taddr;
1093 assert(taddr == host_addr);
1094 tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
1095 #endif
1096 tci_write_reg8s(t0, tmp8);
1097 break;
1098 case INDEX_op_qemu_ld16u:
1099 t0 = *tb_ptr++;
1100 taddr = tci_read_ulong(&tb_ptr);
1101 #ifdef CONFIG_SOFTMMU
1102 tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
1103 #else
1104 host_addr = (tcg_target_ulong)taddr;
1105 assert(taddr == host_addr);
1106 tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
1107 #endif
1108 tci_write_reg16(t0, tmp16);
1109 break;
1110 case INDEX_op_qemu_ld16s:
1111 t0 = *tb_ptr++;
1112 taddr = tci_read_ulong(&tb_ptr);
1113 #ifdef CONFIG_SOFTMMU
1114 tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
1115 #else
1116 host_addr = (tcg_target_ulong)taddr;
1117 assert(taddr == host_addr);
1118 tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
1119 #endif
1120 tci_write_reg16s(t0, tmp16);
1121 break;
1122 #if TCG_TARGET_REG_BITS == 64
1123 case INDEX_op_qemu_ld32u:
1124 t0 = *tb_ptr++;
1125 taddr = tci_read_ulong(&tb_ptr);
1126 #ifdef CONFIG_SOFTMMU
1127 tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1128 #else
1129 host_addr = (tcg_target_ulong)taddr;
1130 assert(taddr == host_addr);
1131 tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1132 #endif
1133 tci_write_reg32(t0, tmp32);
1134 break;
1135 case INDEX_op_qemu_ld32s:
1136 t0 = *tb_ptr++;
1137 taddr = tci_read_ulong(&tb_ptr);
1138 #ifdef CONFIG_SOFTMMU
1139 tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1140 #else
1141 host_addr = (tcg_target_ulong)taddr;
1142 assert(taddr == host_addr);
1143 tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1144 #endif
1145 tci_write_reg32s(t0, tmp32);
1146 break;
1147 #endif /* TCG_TARGET_REG_BITS == 64 */
1148 case INDEX_op_qemu_ld32:
1149 t0 = *tb_ptr++;
1150 taddr = tci_read_ulong(&tb_ptr);
1151 #ifdef CONFIG_SOFTMMU
1152 tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1153 #else
1154 host_addr = (tcg_target_ulong)taddr;
1155 assert(taddr == host_addr);
1156 tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1157 #endif
1158 tci_write_reg32(t0, tmp32);
1159 break;
1160 case INDEX_op_qemu_ld64:
1161 t0 = *tb_ptr++;
1162 #if TCG_TARGET_REG_BITS == 32
1163 t1 = *tb_ptr++;
1164 #endif
1165 taddr = tci_read_ulong(&tb_ptr);
1166 #ifdef CONFIG_SOFTMMU
1167 tmp64 = helper_ldq_mmu(env, taddr, tci_read_i(&tb_ptr));
1168 #else
1169 host_addr = (tcg_target_ulong)taddr;
1170 assert(taddr == host_addr);
1171 tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE));
1172 #endif
1173 tci_write_reg(t0, tmp64);
1174 #if TCG_TARGET_REG_BITS == 32
1175 tci_write_reg(t1, tmp64 >> 32);
1176 #endif
1177 break;
1178 case INDEX_op_qemu_st8:
1179 t0 = tci_read_r8(&tb_ptr);
1180 taddr = tci_read_ulong(&tb_ptr);
1181 #ifdef CONFIG_SOFTMMU
1182 t2 = tci_read_i(&tb_ptr);
1183 helper_stb_mmu(env, taddr, t0, t2);
1184 #else
1185 host_addr = (tcg_target_ulong)taddr;
1186 assert(taddr == host_addr);
1187 *(uint8_t *)(host_addr + GUEST_BASE) = t0;
1188 #endif
1189 break;
1190 case INDEX_op_qemu_st16:
1191 t0 = tci_read_r16(&tb_ptr);
1192 taddr = tci_read_ulong(&tb_ptr);
1193 #ifdef CONFIG_SOFTMMU
1194 t2 = tci_read_i(&tb_ptr);
1195 helper_stw_mmu(env, taddr, t0, t2);
1196 #else
1197 host_addr = (tcg_target_ulong)taddr;
1198 assert(taddr == host_addr);
1199 *(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0);
1200 #endif
1201 break;
1202 case INDEX_op_qemu_st32:
1203 t0 = tci_read_r32(&tb_ptr);
1204 taddr = tci_read_ulong(&tb_ptr);
1205 #ifdef CONFIG_SOFTMMU
1206 t2 = tci_read_i(&tb_ptr);
1207 helper_stl_mmu(env, taddr, t0, t2);
1208 #else
1209 host_addr = (tcg_target_ulong)taddr;
1210 assert(taddr == host_addr);
1211 *(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0);
1212 #endif
1213 break;
1214 case INDEX_op_qemu_st64:
1215 tmp64 = tci_read_r64(&tb_ptr);
1216 taddr = tci_read_ulong(&tb_ptr);
1217 #ifdef CONFIG_SOFTMMU
1218 t2 = tci_read_i(&tb_ptr);
1219 helper_stq_mmu(env, taddr, tmp64, t2);
1220 #else
1221 host_addr = (tcg_target_ulong)taddr;
1222 assert(taddr == host_addr);
1223 *(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64);
1224 #endif
1225 break;
1226 default:
1227 TODO();
1228 break;
1229 }
1230 assert(tb_ptr == old_code_ptr + op_size);
1231 }
1232 exit:
1233 return next_tb;
1234 }