]> git.proxmox.com Git - mirror_qemu.git/blob - tci.c
tci: Convert to new ldst opcodes
[mirror_qemu.git] / tci.c
1 /*
2 * Tiny Code Interpreter for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 */
19
20 #include "config.h"
21
22 /* Defining NDEBUG disables assertions (which makes the code faster). */
23 #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
24 # define NDEBUG
25 #endif
26
27 #include "qemu-common.h"
28 #include "exec/exec-all.h" /* MAX_OPC_PARAM_IARGS */
29 #include "tcg-op.h"
30
31 /* Marker for missing code. */
32 #define TODO() \
33 do { \
34 fprintf(stderr, "TODO %s:%u: %s()\n", \
35 __FILE__, __LINE__, __func__); \
36 tcg_abort(); \
37 } while (0)
38
39 #if MAX_OPC_PARAM_IARGS != 5
40 # error Fix needed, number of supported input arguments changed!
41 #endif
42 #if TCG_TARGET_REG_BITS == 32
43 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
44 tcg_target_ulong, tcg_target_ulong,
45 tcg_target_ulong, tcg_target_ulong,
46 tcg_target_ulong, tcg_target_ulong,
47 tcg_target_ulong, tcg_target_ulong);
48 #else
49 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
50 tcg_target_ulong, tcg_target_ulong,
51 tcg_target_ulong);
52 #endif
53
54 /* Targets which don't use GETPC also don't need tci_tb_ptr
55 which makes them a little faster. */
56 #if defined(GETPC)
57 uintptr_t tci_tb_ptr;
58 #endif
59
60 static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
61
62 static tcg_target_ulong tci_read_reg(TCGReg index)
63 {
64 assert(index < ARRAY_SIZE(tci_reg));
65 return tci_reg[index];
66 }
67
68 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
69 static int8_t tci_read_reg8s(TCGReg index)
70 {
71 return (int8_t)tci_read_reg(index);
72 }
73 #endif
74
75 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
76 static int16_t tci_read_reg16s(TCGReg index)
77 {
78 return (int16_t)tci_read_reg(index);
79 }
80 #endif
81
82 #if TCG_TARGET_REG_BITS == 64
83 static int32_t tci_read_reg32s(TCGReg index)
84 {
85 return (int32_t)tci_read_reg(index);
86 }
87 #endif
88
89 static uint8_t tci_read_reg8(TCGReg index)
90 {
91 return (uint8_t)tci_read_reg(index);
92 }
93
94 static uint16_t tci_read_reg16(TCGReg index)
95 {
96 return (uint16_t)tci_read_reg(index);
97 }
98
99 static uint32_t tci_read_reg32(TCGReg index)
100 {
101 return (uint32_t)tci_read_reg(index);
102 }
103
104 #if TCG_TARGET_REG_BITS == 64
105 static uint64_t tci_read_reg64(TCGReg index)
106 {
107 return tci_read_reg(index);
108 }
109 #endif
110
111 static void tci_write_reg(TCGReg index, tcg_target_ulong value)
112 {
113 assert(index < ARRAY_SIZE(tci_reg));
114 assert(index != TCG_AREG0);
115 assert(index != TCG_REG_CALL_STACK);
116 tci_reg[index] = value;
117 }
118
119 #if TCG_TARGET_REG_BITS == 64
120 static void tci_write_reg32s(TCGReg index, int32_t value)
121 {
122 tci_write_reg(index, value);
123 }
124 #endif
125
126 static void tci_write_reg8(TCGReg index, uint8_t value)
127 {
128 tci_write_reg(index, value);
129 }
130
131 static void tci_write_reg32(TCGReg index, uint32_t value)
132 {
133 tci_write_reg(index, value);
134 }
135
136 #if TCG_TARGET_REG_BITS == 32
137 static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
138 uint64_t value)
139 {
140 tci_write_reg(low_index, value);
141 tci_write_reg(high_index, value >> 32);
142 }
143 #elif TCG_TARGET_REG_BITS == 64
144 static void tci_write_reg64(TCGReg index, uint64_t value)
145 {
146 tci_write_reg(index, value);
147 }
148 #endif
149
150 #if TCG_TARGET_REG_BITS == 32
151 /* Create a 64 bit value from two 32 bit values. */
152 static uint64_t tci_uint64(uint32_t high, uint32_t low)
153 {
154 return ((uint64_t)high << 32) + low;
155 }
156 #endif
157
158 /* Read constant (native size) from bytecode. */
159 static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
160 {
161 tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
162 *tb_ptr += sizeof(value);
163 return value;
164 }
165
166 /* Read unsigned constant (32 bit) from bytecode. */
167 static uint32_t tci_read_i32(uint8_t **tb_ptr)
168 {
169 uint32_t value = *(uint32_t *)(*tb_ptr);
170 *tb_ptr += sizeof(value);
171 return value;
172 }
173
174 /* Read signed constant (32 bit) from bytecode. */
175 static int32_t tci_read_s32(uint8_t **tb_ptr)
176 {
177 int32_t value = *(int32_t *)(*tb_ptr);
178 *tb_ptr += sizeof(value);
179 return value;
180 }
181
182 #if TCG_TARGET_REG_BITS == 64
183 /* Read constant (64 bit) from bytecode. */
184 static uint64_t tci_read_i64(uint8_t **tb_ptr)
185 {
186 uint64_t value = *(uint64_t *)(*tb_ptr);
187 *tb_ptr += sizeof(value);
188 return value;
189 }
190 #endif
191
192 /* Read indexed register (native size) from bytecode. */
193 static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
194 {
195 tcg_target_ulong value = tci_read_reg(**tb_ptr);
196 *tb_ptr += 1;
197 return value;
198 }
199
200 /* Read indexed register (8 bit) from bytecode. */
201 static uint8_t tci_read_r8(uint8_t **tb_ptr)
202 {
203 uint8_t value = tci_read_reg8(**tb_ptr);
204 *tb_ptr += 1;
205 return value;
206 }
207
208 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
209 /* Read indexed register (8 bit signed) from bytecode. */
210 static int8_t tci_read_r8s(uint8_t **tb_ptr)
211 {
212 int8_t value = tci_read_reg8s(**tb_ptr);
213 *tb_ptr += 1;
214 return value;
215 }
216 #endif
217
218 /* Read indexed register (16 bit) from bytecode. */
219 static uint16_t tci_read_r16(uint8_t **tb_ptr)
220 {
221 uint16_t value = tci_read_reg16(**tb_ptr);
222 *tb_ptr += 1;
223 return value;
224 }
225
226 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
227 /* Read indexed register (16 bit signed) from bytecode. */
228 static int16_t tci_read_r16s(uint8_t **tb_ptr)
229 {
230 int16_t value = tci_read_reg16s(**tb_ptr);
231 *tb_ptr += 1;
232 return value;
233 }
234 #endif
235
236 /* Read indexed register (32 bit) from bytecode. */
237 static uint32_t tci_read_r32(uint8_t **tb_ptr)
238 {
239 uint32_t value = tci_read_reg32(**tb_ptr);
240 *tb_ptr += 1;
241 return value;
242 }
243
244 #if TCG_TARGET_REG_BITS == 32
245 /* Read two indexed registers (2 * 32 bit) from bytecode. */
246 static uint64_t tci_read_r64(uint8_t **tb_ptr)
247 {
248 uint32_t low = tci_read_r32(tb_ptr);
249 return tci_uint64(tci_read_r32(tb_ptr), low);
250 }
251 #elif TCG_TARGET_REG_BITS == 64
252 /* Read indexed register (32 bit signed) from bytecode. */
253 static int32_t tci_read_r32s(uint8_t **tb_ptr)
254 {
255 int32_t value = tci_read_reg32s(**tb_ptr);
256 *tb_ptr += 1;
257 return value;
258 }
259
260 /* Read indexed register (64 bit) from bytecode. */
261 static uint64_t tci_read_r64(uint8_t **tb_ptr)
262 {
263 uint64_t value = tci_read_reg64(**tb_ptr);
264 *tb_ptr += 1;
265 return value;
266 }
267 #endif
268
269 /* Read indexed register(s) with target address from bytecode. */
270 static target_ulong tci_read_ulong(uint8_t **tb_ptr)
271 {
272 target_ulong taddr = tci_read_r(tb_ptr);
273 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
274 taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
275 #endif
276 return taddr;
277 }
278
279 /* Read indexed register or constant (native size) from bytecode. */
280 static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
281 {
282 tcg_target_ulong value;
283 TCGReg r = **tb_ptr;
284 *tb_ptr += 1;
285 if (r == TCG_CONST) {
286 value = tci_read_i(tb_ptr);
287 } else {
288 value = tci_read_reg(r);
289 }
290 return value;
291 }
292
293 /* Read indexed register or constant (32 bit) from bytecode. */
294 static uint32_t tci_read_ri32(uint8_t **tb_ptr)
295 {
296 uint32_t value;
297 TCGReg r = **tb_ptr;
298 *tb_ptr += 1;
299 if (r == TCG_CONST) {
300 value = tci_read_i32(tb_ptr);
301 } else {
302 value = tci_read_reg32(r);
303 }
304 return value;
305 }
306
307 #if TCG_TARGET_REG_BITS == 32
308 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
309 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
310 {
311 uint32_t low = tci_read_ri32(tb_ptr);
312 return tci_uint64(tci_read_ri32(tb_ptr), low);
313 }
314 #elif TCG_TARGET_REG_BITS == 64
315 /* Read indexed register or constant (64 bit) from bytecode. */
316 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
317 {
318 uint64_t value;
319 TCGReg r = **tb_ptr;
320 *tb_ptr += 1;
321 if (r == TCG_CONST) {
322 value = tci_read_i64(tb_ptr);
323 } else {
324 value = tci_read_reg64(r);
325 }
326 return value;
327 }
328 #endif
329
330 static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
331 {
332 tcg_target_ulong label = tci_read_i(tb_ptr);
333 assert(label != 0);
334 return label;
335 }
336
337 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
338 {
339 bool result = false;
340 int32_t i0 = u0;
341 int32_t i1 = u1;
342 switch (condition) {
343 case TCG_COND_EQ:
344 result = (u0 == u1);
345 break;
346 case TCG_COND_NE:
347 result = (u0 != u1);
348 break;
349 case TCG_COND_LT:
350 result = (i0 < i1);
351 break;
352 case TCG_COND_GE:
353 result = (i0 >= i1);
354 break;
355 case TCG_COND_LE:
356 result = (i0 <= i1);
357 break;
358 case TCG_COND_GT:
359 result = (i0 > i1);
360 break;
361 case TCG_COND_LTU:
362 result = (u0 < u1);
363 break;
364 case TCG_COND_GEU:
365 result = (u0 >= u1);
366 break;
367 case TCG_COND_LEU:
368 result = (u0 <= u1);
369 break;
370 case TCG_COND_GTU:
371 result = (u0 > u1);
372 break;
373 default:
374 TODO();
375 }
376 return result;
377 }
378
379 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
380 {
381 bool result = false;
382 int64_t i0 = u0;
383 int64_t i1 = u1;
384 switch (condition) {
385 case TCG_COND_EQ:
386 result = (u0 == u1);
387 break;
388 case TCG_COND_NE:
389 result = (u0 != u1);
390 break;
391 case TCG_COND_LT:
392 result = (i0 < i1);
393 break;
394 case TCG_COND_GE:
395 result = (i0 >= i1);
396 break;
397 case TCG_COND_LE:
398 result = (i0 <= i1);
399 break;
400 case TCG_COND_GT:
401 result = (i0 > i1);
402 break;
403 case TCG_COND_LTU:
404 result = (u0 < u1);
405 break;
406 case TCG_COND_GEU:
407 result = (u0 >= u1);
408 break;
409 case TCG_COND_LEU:
410 result = (u0 <= u1);
411 break;
412 case TCG_COND_GTU:
413 result = (u0 > u1);
414 break;
415 default:
416 TODO();
417 }
418 return result;
419 }
420
421 #ifdef CONFIG_SOFTMMU
422 # define mmuidx tci_read_i(&tb_ptr)
423 # define qemu_ld_ub \
424 helper_ret_ldub_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
425 # define qemu_ld_leuw \
426 helper_le_lduw_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
427 # define qemu_ld_leul \
428 helper_le_ldul_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
429 # define qemu_ld_leq \
430 helper_le_ldq_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
431 # define qemu_ld_beuw \
432 helper_be_lduw_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
433 # define qemu_ld_beul \
434 helper_be_ldul_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
435 # define qemu_ld_beq \
436 helper_be_ldq_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
437 # define qemu_st_b(X) \
438 helper_ret_stb_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
439 # define qemu_st_lew(X) \
440 helper_le_stw_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
441 # define qemu_st_lel(X) \
442 helper_le_stl_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
443 # define qemu_st_leq(X) \
444 helper_le_stq_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
445 # define qemu_st_bew(X) \
446 helper_be_stw_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
447 # define qemu_st_bel(X) \
448 helper_be_stl_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
449 # define qemu_st_beq(X) \
450 helper_be_stq_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
451 #else
452 # define qemu_ld_ub ldub_p(g2h(taddr))
453 # define qemu_ld_leuw lduw_le_p(g2h(taddr))
454 # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr))
455 # define qemu_ld_leq ldq_le_p(g2h(taddr))
456 # define qemu_ld_beuw lduw_be_p(g2h(taddr))
457 # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr))
458 # define qemu_ld_beq ldq_be_p(g2h(taddr))
459 # define qemu_st_b(X) stb_p(g2h(taddr), X)
460 # define qemu_st_lew(X) stw_le_p(g2h(taddr), X)
461 # define qemu_st_lel(X) stl_le_p(g2h(taddr), X)
462 # define qemu_st_leq(X) stq_le_p(g2h(taddr), X)
463 # define qemu_st_bew(X) stw_be_p(g2h(taddr), X)
464 # define qemu_st_bel(X) stl_be_p(g2h(taddr), X)
465 # define qemu_st_beq(X) stq_be_p(g2h(taddr), X)
466 #endif
467
468 /* Interpret pseudo code in tb. */
469 uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
470 {
471 long tcg_temps[CPU_TEMP_BUF_NLONGS];
472 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
473 uintptr_t next_tb = 0;
474
475 tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
476 tci_reg[TCG_REG_CALL_STACK] = sp_value;
477 assert(tb_ptr);
478
479 for (;;) {
480 TCGOpcode opc = tb_ptr[0];
481 #if !defined(NDEBUG)
482 uint8_t op_size = tb_ptr[1];
483 uint8_t *old_code_ptr = tb_ptr;
484 #endif
485 tcg_target_ulong t0;
486 tcg_target_ulong t1;
487 tcg_target_ulong t2;
488 tcg_target_ulong label;
489 TCGCond condition;
490 target_ulong taddr;
491 uint8_t tmp8;
492 uint16_t tmp16;
493 uint32_t tmp32;
494 uint64_t tmp64;
495 #if TCG_TARGET_REG_BITS == 32
496 uint64_t v64;
497 #endif
498 TCGMemOp memop;
499
500 #if defined(GETPC)
501 tci_tb_ptr = (uintptr_t)tb_ptr;
502 #endif
503
504 /* Skip opcode and size entry. */
505 tb_ptr += 2;
506
507 switch (opc) {
508 case INDEX_op_end:
509 case INDEX_op_nop:
510 break;
511 case INDEX_op_nop1:
512 case INDEX_op_nop2:
513 case INDEX_op_nop3:
514 case INDEX_op_nopn:
515 case INDEX_op_discard:
516 TODO();
517 break;
518 case INDEX_op_set_label:
519 TODO();
520 break;
521 case INDEX_op_call:
522 t0 = tci_read_ri(&tb_ptr);
523 #if TCG_TARGET_REG_BITS == 32
524 tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
525 tci_read_reg(TCG_REG_R1),
526 tci_read_reg(TCG_REG_R2),
527 tci_read_reg(TCG_REG_R3),
528 tci_read_reg(TCG_REG_R5),
529 tci_read_reg(TCG_REG_R6),
530 tci_read_reg(TCG_REG_R7),
531 tci_read_reg(TCG_REG_R8),
532 tci_read_reg(TCG_REG_R9),
533 tci_read_reg(TCG_REG_R10));
534 tci_write_reg(TCG_REG_R0, tmp64);
535 tci_write_reg(TCG_REG_R1, tmp64 >> 32);
536 #else
537 tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
538 tci_read_reg(TCG_REG_R1),
539 tci_read_reg(TCG_REG_R2),
540 tci_read_reg(TCG_REG_R3),
541 tci_read_reg(TCG_REG_R5));
542 tci_write_reg(TCG_REG_R0, tmp64);
543 #endif
544 break;
545 case INDEX_op_br:
546 label = tci_read_label(&tb_ptr);
547 assert(tb_ptr == old_code_ptr + op_size);
548 tb_ptr = (uint8_t *)label;
549 continue;
550 case INDEX_op_setcond_i32:
551 t0 = *tb_ptr++;
552 t1 = tci_read_r32(&tb_ptr);
553 t2 = tci_read_ri32(&tb_ptr);
554 condition = *tb_ptr++;
555 tci_write_reg32(t0, tci_compare32(t1, t2, condition));
556 break;
557 #if TCG_TARGET_REG_BITS == 32
558 case INDEX_op_setcond2_i32:
559 t0 = *tb_ptr++;
560 tmp64 = tci_read_r64(&tb_ptr);
561 v64 = tci_read_ri64(&tb_ptr);
562 condition = *tb_ptr++;
563 tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
564 break;
565 #elif TCG_TARGET_REG_BITS == 64
566 case INDEX_op_setcond_i64:
567 t0 = *tb_ptr++;
568 t1 = tci_read_r64(&tb_ptr);
569 t2 = tci_read_ri64(&tb_ptr);
570 condition = *tb_ptr++;
571 tci_write_reg64(t0, tci_compare64(t1, t2, condition));
572 break;
573 #endif
574 case INDEX_op_mov_i32:
575 t0 = *tb_ptr++;
576 t1 = tci_read_r32(&tb_ptr);
577 tci_write_reg32(t0, t1);
578 break;
579 case INDEX_op_movi_i32:
580 t0 = *tb_ptr++;
581 t1 = tci_read_i32(&tb_ptr);
582 tci_write_reg32(t0, t1);
583 break;
584
585 /* Load/store operations (32 bit). */
586
587 case INDEX_op_ld8u_i32:
588 t0 = *tb_ptr++;
589 t1 = tci_read_r(&tb_ptr);
590 t2 = tci_read_s32(&tb_ptr);
591 tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
592 break;
593 case INDEX_op_ld8s_i32:
594 case INDEX_op_ld16u_i32:
595 TODO();
596 break;
597 case INDEX_op_ld16s_i32:
598 TODO();
599 break;
600 case INDEX_op_ld_i32:
601 t0 = *tb_ptr++;
602 t1 = tci_read_r(&tb_ptr);
603 t2 = tci_read_s32(&tb_ptr);
604 tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
605 break;
606 case INDEX_op_st8_i32:
607 t0 = tci_read_r8(&tb_ptr);
608 t1 = tci_read_r(&tb_ptr);
609 t2 = tci_read_s32(&tb_ptr);
610 *(uint8_t *)(t1 + t2) = t0;
611 break;
612 case INDEX_op_st16_i32:
613 t0 = tci_read_r16(&tb_ptr);
614 t1 = tci_read_r(&tb_ptr);
615 t2 = tci_read_s32(&tb_ptr);
616 *(uint16_t *)(t1 + t2) = t0;
617 break;
618 case INDEX_op_st_i32:
619 t0 = tci_read_r32(&tb_ptr);
620 t1 = tci_read_r(&tb_ptr);
621 t2 = tci_read_s32(&tb_ptr);
622 assert(t1 != sp_value || (int32_t)t2 < 0);
623 *(uint32_t *)(t1 + t2) = t0;
624 break;
625
626 /* Arithmetic operations (32 bit). */
627
628 case INDEX_op_add_i32:
629 t0 = *tb_ptr++;
630 t1 = tci_read_ri32(&tb_ptr);
631 t2 = tci_read_ri32(&tb_ptr);
632 tci_write_reg32(t0, t1 + t2);
633 break;
634 case INDEX_op_sub_i32:
635 t0 = *tb_ptr++;
636 t1 = tci_read_ri32(&tb_ptr);
637 t2 = tci_read_ri32(&tb_ptr);
638 tci_write_reg32(t0, t1 - t2);
639 break;
640 case INDEX_op_mul_i32:
641 t0 = *tb_ptr++;
642 t1 = tci_read_ri32(&tb_ptr);
643 t2 = tci_read_ri32(&tb_ptr);
644 tci_write_reg32(t0, t1 * t2);
645 break;
646 #if TCG_TARGET_HAS_div_i32
647 case INDEX_op_div_i32:
648 t0 = *tb_ptr++;
649 t1 = tci_read_ri32(&tb_ptr);
650 t2 = tci_read_ri32(&tb_ptr);
651 tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
652 break;
653 case INDEX_op_divu_i32:
654 t0 = *tb_ptr++;
655 t1 = tci_read_ri32(&tb_ptr);
656 t2 = tci_read_ri32(&tb_ptr);
657 tci_write_reg32(t0, t1 / t2);
658 break;
659 case INDEX_op_rem_i32:
660 t0 = *tb_ptr++;
661 t1 = tci_read_ri32(&tb_ptr);
662 t2 = tci_read_ri32(&tb_ptr);
663 tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
664 break;
665 case INDEX_op_remu_i32:
666 t0 = *tb_ptr++;
667 t1 = tci_read_ri32(&tb_ptr);
668 t2 = tci_read_ri32(&tb_ptr);
669 tci_write_reg32(t0, t1 % t2);
670 break;
671 #elif TCG_TARGET_HAS_div2_i32
672 case INDEX_op_div2_i32:
673 case INDEX_op_divu2_i32:
674 TODO();
675 break;
676 #endif
677 case INDEX_op_and_i32:
678 t0 = *tb_ptr++;
679 t1 = tci_read_ri32(&tb_ptr);
680 t2 = tci_read_ri32(&tb_ptr);
681 tci_write_reg32(t0, t1 & t2);
682 break;
683 case INDEX_op_or_i32:
684 t0 = *tb_ptr++;
685 t1 = tci_read_ri32(&tb_ptr);
686 t2 = tci_read_ri32(&tb_ptr);
687 tci_write_reg32(t0, t1 | t2);
688 break;
689 case INDEX_op_xor_i32:
690 t0 = *tb_ptr++;
691 t1 = tci_read_ri32(&tb_ptr);
692 t2 = tci_read_ri32(&tb_ptr);
693 tci_write_reg32(t0, t1 ^ t2);
694 break;
695
696 /* Shift/rotate operations (32 bit). */
697
698 case INDEX_op_shl_i32:
699 t0 = *tb_ptr++;
700 t1 = tci_read_ri32(&tb_ptr);
701 t2 = tci_read_ri32(&tb_ptr);
702 tci_write_reg32(t0, t1 << (t2 & 31));
703 break;
704 case INDEX_op_shr_i32:
705 t0 = *tb_ptr++;
706 t1 = tci_read_ri32(&tb_ptr);
707 t2 = tci_read_ri32(&tb_ptr);
708 tci_write_reg32(t0, t1 >> (t2 & 31));
709 break;
710 case INDEX_op_sar_i32:
711 t0 = *tb_ptr++;
712 t1 = tci_read_ri32(&tb_ptr);
713 t2 = tci_read_ri32(&tb_ptr);
714 tci_write_reg32(t0, ((int32_t)t1 >> (t2 & 31)));
715 break;
716 #if TCG_TARGET_HAS_rot_i32
717 case INDEX_op_rotl_i32:
718 t0 = *tb_ptr++;
719 t1 = tci_read_ri32(&tb_ptr);
720 t2 = tci_read_ri32(&tb_ptr);
721 tci_write_reg32(t0, rol32(t1, t2 & 31));
722 break;
723 case INDEX_op_rotr_i32:
724 t0 = *tb_ptr++;
725 t1 = tci_read_ri32(&tb_ptr);
726 t2 = tci_read_ri32(&tb_ptr);
727 tci_write_reg32(t0, ror32(t1, t2 & 31));
728 break;
729 #endif
730 #if TCG_TARGET_HAS_deposit_i32
731 case INDEX_op_deposit_i32:
732 t0 = *tb_ptr++;
733 t1 = tci_read_r32(&tb_ptr);
734 t2 = tci_read_r32(&tb_ptr);
735 tmp16 = *tb_ptr++;
736 tmp8 = *tb_ptr++;
737 tmp32 = (((1 << tmp8) - 1) << tmp16);
738 tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
739 break;
740 #endif
741 case INDEX_op_brcond_i32:
742 t0 = tci_read_r32(&tb_ptr);
743 t1 = tci_read_ri32(&tb_ptr);
744 condition = *tb_ptr++;
745 label = tci_read_label(&tb_ptr);
746 if (tci_compare32(t0, t1, condition)) {
747 assert(tb_ptr == old_code_ptr + op_size);
748 tb_ptr = (uint8_t *)label;
749 continue;
750 }
751 break;
752 #if TCG_TARGET_REG_BITS == 32
753 case INDEX_op_add2_i32:
754 t0 = *tb_ptr++;
755 t1 = *tb_ptr++;
756 tmp64 = tci_read_r64(&tb_ptr);
757 tmp64 += tci_read_r64(&tb_ptr);
758 tci_write_reg64(t1, t0, tmp64);
759 break;
760 case INDEX_op_sub2_i32:
761 t0 = *tb_ptr++;
762 t1 = *tb_ptr++;
763 tmp64 = tci_read_r64(&tb_ptr);
764 tmp64 -= tci_read_r64(&tb_ptr);
765 tci_write_reg64(t1, t0, tmp64);
766 break;
767 case INDEX_op_brcond2_i32:
768 tmp64 = tci_read_r64(&tb_ptr);
769 v64 = tci_read_ri64(&tb_ptr);
770 condition = *tb_ptr++;
771 label = tci_read_label(&tb_ptr);
772 if (tci_compare64(tmp64, v64, condition)) {
773 assert(tb_ptr == old_code_ptr + op_size);
774 tb_ptr = (uint8_t *)label;
775 continue;
776 }
777 break;
778 case INDEX_op_mulu2_i32:
779 t0 = *tb_ptr++;
780 t1 = *tb_ptr++;
781 t2 = tci_read_r32(&tb_ptr);
782 tmp64 = tci_read_r32(&tb_ptr);
783 tci_write_reg64(t1, t0, t2 * tmp64);
784 break;
785 #endif /* TCG_TARGET_REG_BITS == 32 */
786 #if TCG_TARGET_HAS_ext8s_i32
787 case INDEX_op_ext8s_i32:
788 t0 = *tb_ptr++;
789 t1 = tci_read_r8s(&tb_ptr);
790 tci_write_reg32(t0, t1);
791 break;
792 #endif
793 #if TCG_TARGET_HAS_ext16s_i32
794 case INDEX_op_ext16s_i32:
795 t0 = *tb_ptr++;
796 t1 = tci_read_r16s(&tb_ptr);
797 tci_write_reg32(t0, t1);
798 break;
799 #endif
800 #if TCG_TARGET_HAS_ext8u_i32
801 case INDEX_op_ext8u_i32:
802 t0 = *tb_ptr++;
803 t1 = tci_read_r8(&tb_ptr);
804 tci_write_reg32(t0, t1);
805 break;
806 #endif
807 #if TCG_TARGET_HAS_ext16u_i32
808 case INDEX_op_ext16u_i32:
809 t0 = *tb_ptr++;
810 t1 = tci_read_r16(&tb_ptr);
811 tci_write_reg32(t0, t1);
812 break;
813 #endif
814 #if TCG_TARGET_HAS_bswap16_i32
815 case INDEX_op_bswap16_i32:
816 t0 = *tb_ptr++;
817 t1 = tci_read_r16(&tb_ptr);
818 tci_write_reg32(t0, bswap16(t1));
819 break;
820 #endif
821 #if TCG_TARGET_HAS_bswap32_i32
822 case INDEX_op_bswap32_i32:
823 t0 = *tb_ptr++;
824 t1 = tci_read_r32(&tb_ptr);
825 tci_write_reg32(t0, bswap32(t1));
826 break;
827 #endif
828 #if TCG_TARGET_HAS_not_i32
829 case INDEX_op_not_i32:
830 t0 = *tb_ptr++;
831 t1 = tci_read_r32(&tb_ptr);
832 tci_write_reg32(t0, ~t1);
833 break;
834 #endif
835 #if TCG_TARGET_HAS_neg_i32
836 case INDEX_op_neg_i32:
837 t0 = *tb_ptr++;
838 t1 = tci_read_r32(&tb_ptr);
839 tci_write_reg32(t0, -t1);
840 break;
841 #endif
842 #if TCG_TARGET_REG_BITS == 64
843 case INDEX_op_mov_i64:
844 t0 = *tb_ptr++;
845 t1 = tci_read_r64(&tb_ptr);
846 tci_write_reg64(t0, t1);
847 break;
848 case INDEX_op_movi_i64:
849 t0 = *tb_ptr++;
850 t1 = tci_read_i64(&tb_ptr);
851 tci_write_reg64(t0, t1);
852 break;
853
854 /* Load/store operations (64 bit). */
855
856 case INDEX_op_ld8u_i64:
857 t0 = *tb_ptr++;
858 t1 = tci_read_r(&tb_ptr);
859 t2 = tci_read_s32(&tb_ptr);
860 tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
861 break;
862 case INDEX_op_ld8s_i64:
863 case INDEX_op_ld16u_i64:
864 case INDEX_op_ld16s_i64:
865 TODO();
866 break;
867 case INDEX_op_ld32u_i64:
868 t0 = *tb_ptr++;
869 t1 = tci_read_r(&tb_ptr);
870 t2 = tci_read_s32(&tb_ptr);
871 tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
872 break;
873 case INDEX_op_ld32s_i64:
874 t0 = *tb_ptr++;
875 t1 = tci_read_r(&tb_ptr);
876 t2 = tci_read_s32(&tb_ptr);
877 tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
878 break;
879 case INDEX_op_ld_i64:
880 t0 = *tb_ptr++;
881 t1 = tci_read_r(&tb_ptr);
882 t2 = tci_read_s32(&tb_ptr);
883 tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
884 break;
885 case INDEX_op_st8_i64:
886 t0 = tci_read_r8(&tb_ptr);
887 t1 = tci_read_r(&tb_ptr);
888 t2 = tci_read_s32(&tb_ptr);
889 *(uint8_t *)(t1 + t2) = t0;
890 break;
891 case INDEX_op_st16_i64:
892 t0 = tci_read_r16(&tb_ptr);
893 t1 = tci_read_r(&tb_ptr);
894 t2 = tci_read_s32(&tb_ptr);
895 *(uint16_t *)(t1 + t2) = t0;
896 break;
897 case INDEX_op_st32_i64:
898 t0 = tci_read_r32(&tb_ptr);
899 t1 = tci_read_r(&tb_ptr);
900 t2 = tci_read_s32(&tb_ptr);
901 *(uint32_t *)(t1 + t2) = t0;
902 break;
903 case INDEX_op_st_i64:
904 t0 = tci_read_r64(&tb_ptr);
905 t1 = tci_read_r(&tb_ptr);
906 t2 = tci_read_s32(&tb_ptr);
907 assert(t1 != sp_value || (int32_t)t2 < 0);
908 *(uint64_t *)(t1 + t2) = t0;
909 break;
910
911 /* Arithmetic operations (64 bit). */
912
913 case INDEX_op_add_i64:
914 t0 = *tb_ptr++;
915 t1 = tci_read_ri64(&tb_ptr);
916 t2 = tci_read_ri64(&tb_ptr);
917 tci_write_reg64(t0, t1 + t2);
918 break;
919 case INDEX_op_sub_i64:
920 t0 = *tb_ptr++;
921 t1 = tci_read_ri64(&tb_ptr);
922 t2 = tci_read_ri64(&tb_ptr);
923 tci_write_reg64(t0, t1 - t2);
924 break;
925 case INDEX_op_mul_i64:
926 t0 = *tb_ptr++;
927 t1 = tci_read_ri64(&tb_ptr);
928 t2 = tci_read_ri64(&tb_ptr);
929 tci_write_reg64(t0, t1 * t2);
930 break;
931 #if TCG_TARGET_HAS_div_i64
932 case INDEX_op_div_i64:
933 case INDEX_op_divu_i64:
934 case INDEX_op_rem_i64:
935 case INDEX_op_remu_i64:
936 TODO();
937 break;
938 #elif TCG_TARGET_HAS_div2_i64
939 case INDEX_op_div2_i64:
940 case INDEX_op_divu2_i64:
941 TODO();
942 break;
943 #endif
944 case INDEX_op_and_i64:
945 t0 = *tb_ptr++;
946 t1 = tci_read_ri64(&tb_ptr);
947 t2 = tci_read_ri64(&tb_ptr);
948 tci_write_reg64(t0, t1 & t2);
949 break;
950 case INDEX_op_or_i64:
951 t0 = *tb_ptr++;
952 t1 = tci_read_ri64(&tb_ptr);
953 t2 = tci_read_ri64(&tb_ptr);
954 tci_write_reg64(t0, t1 | t2);
955 break;
956 case INDEX_op_xor_i64:
957 t0 = *tb_ptr++;
958 t1 = tci_read_ri64(&tb_ptr);
959 t2 = tci_read_ri64(&tb_ptr);
960 tci_write_reg64(t0, t1 ^ t2);
961 break;
962
963 /* Shift/rotate operations (64 bit). */
964
965 case INDEX_op_shl_i64:
966 t0 = *tb_ptr++;
967 t1 = tci_read_ri64(&tb_ptr);
968 t2 = tci_read_ri64(&tb_ptr);
969 tci_write_reg64(t0, t1 << (t2 & 63));
970 break;
971 case INDEX_op_shr_i64:
972 t0 = *tb_ptr++;
973 t1 = tci_read_ri64(&tb_ptr);
974 t2 = tci_read_ri64(&tb_ptr);
975 tci_write_reg64(t0, t1 >> (t2 & 63));
976 break;
977 case INDEX_op_sar_i64:
978 t0 = *tb_ptr++;
979 t1 = tci_read_ri64(&tb_ptr);
980 t2 = tci_read_ri64(&tb_ptr);
981 tci_write_reg64(t0, ((int64_t)t1 >> (t2 & 63)));
982 break;
983 #if TCG_TARGET_HAS_rot_i64
984 case INDEX_op_rotl_i64:
985 t0 = *tb_ptr++;
986 t1 = tci_read_ri64(&tb_ptr);
987 t2 = tci_read_ri64(&tb_ptr);
988 tci_write_reg64(t0, rol64(t1, t2 & 63));
989 break;
990 case INDEX_op_rotr_i64:
991 t0 = *tb_ptr++;
992 t1 = tci_read_ri64(&tb_ptr);
993 t2 = tci_read_ri64(&tb_ptr);
994 tci_write_reg64(t0, ror64(t1, t2 & 63));
995 break;
996 #endif
997 #if TCG_TARGET_HAS_deposit_i64
998 case INDEX_op_deposit_i64:
999 t0 = *tb_ptr++;
1000 t1 = tci_read_r64(&tb_ptr);
1001 t2 = tci_read_r64(&tb_ptr);
1002 tmp16 = *tb_ptr++;
1003 tmp8 = *tb_ptr++;
1004 tmp64 = (((1ULL << tmp8) - 1) << tmp16);
1005 tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
1006 break;
1007 #endif
1008 case INDEX_op_brcond_i64:
1009 t0 = tci_read_r64(&tb_ptr);
1010 t1 = tci_read_ri64(&tb_ptr);
1011 condition = *tb_ptr++;
1012 label = tci_read_label(&tb_ptr);
1013 if (tci_compare64(t0, t1, condition)) {
1014 assert(tb_ptr == old_code_ptr + op_size);
1015 tb_ptr = (uint8_t *)label;
1016 continue;
1017 }
1018 break;
1019 #if TCG_TARGET_HAS_ext8u_i64
1020 case INDEX_op_ext8u_i64:
1021 t0 = *tb_ptr++;
1022 t1 = tci_read_r8(&tb_ptr);
1023 tci_write_reg64(t0, t1);
1024 break;
1025 #endif
1026 #if TCG_TARGET_HAS_ext8s_i64
1027 case INDEX_op_ext8s_i64:
1028 t0 = *tb_ptr++;
1029 t1 = tci_read_r8s(&tb_ptr);
1030 tci_write_reg64(t0, t1);
1031 break;
1032 #endif
1033 #if TCG_TARGET_HAS_ext16s_i64
1034 case INDEX_op_ext16s_i64:
1035 t0 = *tb_ptr++;
1036 t1 = tci_read_r16s(&tb_ptr);
1037 tci_write_reg64(t0, t1);
1038 break;
1039 #endif
1040 #if TCG_TARGET_HAS_ext16u_i64
1041 case INDEX_op_ext16u_i64:
1042 t0 = *tb_ptr++;
1043 t1 = tci_read_r16(&tb_ptr);
1044 tci_write_reg64(t0, t1);
1045 break;
1046 #endif
1047 #if TCG_TARGET_HAS_ext32s_i64
1048 case INDEX_op_ext32s_i64:
1049 t0 = *tb_ptr++;
1050 t1 = tci_read_r32s(&tb_ptr);
1051 tci_write_reg64(t0, t1);
1052 break;
1053 #endif
1054 #if TCG_TARGET_HAS_ext32u_i64
1055 case INDEX_op_ext32u_i64:
1056 t0 = *tb_ptr++;
1057 t1 = tci_read_r32(&tb_ptr);
1058 tci_write_reg64(t0, t1);
1059 break;
1060 #endif
1061 #if TCG_TARGET_HAS_bswap16_i64
1062 case INDEX_op_bswap16_i64:
1063 TODO();
1064 t0 = *tb_ptr++;
1065 t1 = tci_read_r16(&tb_ptr);
1066 tci_write_reg64(t0, bswap16(t1));
1067 break;
1068 #endif
1069 #if TCG_TARGET_HAS_bswap32_i64
1070 case INDEX_op_bswap32_i64:
1071 t0 = *tb_ptr++;
1072 t1 = tci_read_r32(&tb_ptr);
1073 tci_write_reg64(t0, bswap32(t1));
1074 break;
1075 #endif
1076 #if TCG_TARGET_HAS_bswap64_i64
1077 case INDEX_op_bswap64_i64:
1078 t0 = *tb_ptr++;
1079 t1 = tci_read_r64(&tb_ptr);
1080 tci_write_reg64(t0, bswap64(t1));
1081 break;
1082 #endif
1083 #if TCG_TARGET_HAS_not_i64
1084 case INDEX_op_not_i64:
1085 t0 = *tb_ptr++;
1086 t1 = tci_read_r64(&tb_ptr);
1087 tci_write_reg64(t0, ~t1);
1088 break;
1089 #endif
1090 #if TCG_TARGET_HAS_neg_i64
1091 case INDEX_op_neg_i64:
1092 t0 = *tb_ptr++;
1093 t1 = tci_read_r64(&tb_ptr);
1094 tci_write_reg64(t0, -t1);
1095 break;
1096 #endif
1097 #endif /* TCG_TARGET_REG_BITS == 64 */
1098
1099 /* QEMU specific operations. */
1100
1101 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
1102 case INDEX_op_debug_insn_start:
1103 TODO();
1104 break;
1105 #else
1106 case INDEX_op_debug_insn_start:
1107 TODO();
1108 break;
1109 #endif
1110 case INDEX_op_exit_tb:
1111 next_tb = *(uint64_t *)tb_ptr;
1112 goto exit;
1113 break;
1114 case INDEX_op_goto_tb:
1115 t0 = tci_read_i32(&tb_ptr);
1116 assert(tb_ptr == old_code_ptr + op_size);
1117 tb_ptr += (int32_t)t0;
1118 continue;
1119 case INDEX_op_qemu_ld_i32:
1120 t0 = *tb_ptr++;
1121 taddr = tci_read_ulong(&tb_ptr);
1122 memop = tci_read_i(&tb_ptr);
1123 switch (memop) {
1124 case MO_UB:
1125 tmp32 = qemu_ld_ub;
1126 break;
1127 case MO_SB:
1128 tmp32 = (int8_t)qemu_ld_ub;
1129 break;
1130 case MO_LEUW:
1131 tmp32 = qemu_ld_leuw;
1132 break;
1133 case MO_LESW:
1134 tmp32 = (int16_t)qemu_ld_leuw;
1135 break;
1136 case MO_LEUL:
1137 tmp32 = qemu_ld_leul;
1138 break;
1139 case MO_BEUW:
1140 tmp32 = qemu_ld_beuw;
1141 break;
1142 case MO_BESW:
1143 tmp32 = (int16_t)qemu_ld_beuw;
1144 break;
1145 case MO_BEUL:
1146 tmp32 = qemu_ld_beul;
1147 break;
1148 default:
1149 tcg_abort();
1150 }
1151 tci_write_reg(t0, tmp32);
1152 break;
1153 case INDEX_op_qemu_ld_i64:
1154 t0 = *tb_ptr++;
1155 if (TCG_TARGET_REG_BITS == 32) {
1156 t1 = *tb_ptr++;
1157 }
1158 taddr = tci_read_ulong(&tb_ptr);
1159 memop = tci_read_i(&tb_ptr);
1160 switch (memop) {
1161 case MO_UB:
1162 tmp64 = qemu_ld_ub;
1163 break;
1164 case MO_SB:
1165 tmp64 = (int8_t)qemu_ld_ub;
1166 break;
1167 case MO_LEUW:
1168 tmp64 = qemu_ld_leuw;
1169 break;
1170 case MO_LESW:
1171 tmp64 = (int16_t)qemu_ld_leuw;
1172 break;
1173 case MO_LEUL:
1174 tmp64 = qemu_ld_leul;
1175 break;
1176 case MO_LESL:
1177 tmp64 = (int32_t)qemu_ld_leul;
1178 break;
1179 case MO_LEQ:
1180 tmp64 = qemu_ld_leq;
1181 break;
1182 case MO_BEUW:
1183 tmp64 = qemu_ld_beuw;
1184 break;
1185 case MO_BESW:
1186 tmp64 = (int16_t)qemu_ld_beuw;
1187 break;
1188 case MO_BEUL:
1189 tmp64 = qemu_ld_beul;
1190 break;
1191 case MO_BESL:
1192 tmp64 = (int32_t)qemu_ld_beul;
1193 break;
1194 case MO_BEQ:
1195 tmp64 = qemu_ld_beq;
1196 break;
1197 default:
1198 tcg_abort();
1199 }
1200 tci_write_reg(t0, tmp64);
1201 if (TCG_TARGET_REG_BITS == 32) {
1202 tci_write_reg(t1, tmp64 >> 32);
1203 }
1204 break;
1205 case INDEX_op_qemu_st_i32:
1206 t0 = tci_read_r(&tb_ptr);
1207 taddr = tci_read_ulong(&tb_ptr);
1208 memop = tci_read_i(&tb_ptr);
1209 switch (memop) {
1210 case MO_UB:
1211 qemu_st_b(t0);
1212 break;
1213 case MO_LEUW:
1214 qemu_st_lew(t0);
1215 break;
1216 case MO_LEUL:
1217 qemu_st_lel(t0);
1218 break;
1219 case MO_BEUW:
1220 qemu_st_bew(t0);
1221 break;
1222 case MO_BEUL:
1223 qemu_st_bel(t0);
1224 break;
1225 default:
1226 tcg_abort();
1227 }
1228 break;
1229 case INDEX_op_qemu_st_i64:
1230 tmp64 = tci_read_r64(&tb_ptr);
1231 taddr = tci_read_ulong(&tb_ptr);
1232 memop = tci_read_i(&tb_ptr);
1233 switch (memop) {
1234 case MO_UB:
1235 qemu_st_b(tmp64);
1236 break;
1237 case MO_LEUW:
1238 qemu_st_lew(tmp64);
1239 break;
1240 case MO_LEUL:
1241 qemu_st_lel(tmp64);
1242 break;
1243 case MO_LEQ:
1244 qemu_st_leq(tmp64);
1245 break;
1246 case MO_BEUW:
1247 qemu_st_bew(tmp64);
1248 break;
1249 case MO_BEUL:
1250 qemu_st_bel(tmp64);
1251 break;
1252 case MO_BEQ:
1253 qemu_st_beq(tmp64);
1254 break;
1255 default:
1256 tcg_abort();
1257 }
1258 break;
1259 default:
1260 TODO();
1261 break;
1262 }
1263 assert(tb_ptr == old_code_ptr + op_size);
1264 }
1265 exit:
1266 return next_tb;
1267 }