]> git.proxmox.com Git - mirror_qemu.git/blob - tcg/tci.c
tcg/tci: Split out tci_args_{rrm,rrrm,rrrrm}
[mirror_qemu.git] / tcg / tci.c
1 /*
2 * Tiny Code Interpreter for QEMU
3 *
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
5 *
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 */
19
20 #include "qemu/osdep.h"
21
22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23 * Without assertions, the interpreter runs much faster. */
24 #if defined(CONFIG_DEBUG_TCG)
25 # define tci_assert(cond) assert(cond)
26 #else
27 # define tci_assert(cond) ((void)0)
28 #endif
29
30 #include "qemu-common.h"
31 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
32 #include "exec/cpu_ldst.h"
33 #include "tcg/tcg-op.h"
34 #include "qemu/compiler.h"
35
36 #if MAX_OPC_PARAM_IARGS != 6
37 # error Fix needed, number of supported input arguments changed!
38 #endif
39 #if TCG_TARGET_REG_BITS == 32
40 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
41 tcg_target_ulong, tcg_target_ulong,
42 tcg_target_ulong, tcg_target_ulong,
43 tcg_target_ulong, tcg_target_ulong,
44 tcg_target_ulong, tcg_target_ulong,
45 tcg_target_ulong, tcg_target_ulong);
46 #else
47 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
48 tcg_target_ulong, tcg_target_ulong,
49 tcg_target_ulong, tcg_target_ulong);
50 #endif
51
52 __thread uintptr_t tci_tb_ptr;
53
54 static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index)
55 {
56 tci_assert(index < TCG_TARGET_NB_REGS);
57 return regs[index];
58 }
59
60 static void
61 tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
62 {
63 tci_assert(index < TCG_TARGET_NB_REGS);
64 tci_assert(index != TCG_AREG0);
65 tci_assert(index != TCG_REG_CALL_STACK);
66 regs[index] = value;
67 }
68
69 static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
70 uint32_t low_index, uint64_t value)
71 {
72 tci_write_reg(regs, low_index, value);
73 tci_write_reg(regs, high_index, value >> 32);
74 }
75
76 /* Create a 64 bit value from two 32 bit values. */
77 static uint64_t tci_uint64(uint32_t high, uint32_t low)
78 {
79 return ((uint64_t)high << 32) + low;
80 }
81
82 /* Read constant byte from bytecode. */
83 static uint8_t tci_read_b(const uint8_t **tb_ptr)
84 {
85 return *(tb_ptr[0]++);
86 }
87
88 /* Read register number from bytecode. */
89 static TCGReg tci_read_r(const uint8_t **tb_ptr)
90 {
91 uint8_t regno = tci_read_b(tb_ptr);
92 tci_assert(regno < TCG_TARGET_NB_REGS);
93 return regno;
94 }
95
96 /* Read constant (native size) from bytecode. */
97 static tcg_target_ulong tci_read_i(const uint8_t **tb_ptr)
98 {
99 tcg_target_ulong value = *(const tcg_target_ulong *)(*tb_ptr);
100 *tb_ptr += sizeof(value);
101 return value;
102 }
103
104 /* Read unsigned constant (32 bit) from bytecode. */
105 static uint32_t tci_read_i32(const uint8_t **tb_ptr)
106 {
107 uint32_t value = *(const uint32_t *)(*tb_ptr);
108 *tb_ptr += sizeof(value);
109 return value;
110 }
111
112 /* Read signed constant (32 bit) from bytecode. */
113 static int32_t tci_read_s32(const uint8_t **tb_ptr)
114 {
115 int32_t value = *(const int32_t *)(*tb_ptr);
116 *tb_ptr += sizeof(value);
117 return value;
118 }
119
120 static tcg_target_ulong tci_read_label(const uint8_t **tb_ptr)
121 {
122 return tci_read_i(tb_ptr);
123 }
124
125 /*
126 * Load sets of arguments all at once. The naming convention is:
127 * tci_args_<arguments>
128 * where arguments is a sequence of
129 *
130 * b = immediate (bit position)
131 * c = condition (TCGCond)
132 * i = immediate (uint32_t)
133 * I = immediate (tcg_target_ulong)
134 * l = label or pointer
135 * m = immediate (TCGMemOpIdx)
136 * r = register
137 * s = signed ldst offset
138 */
139
140 static void tci_args_l(const uint8_t **tb_ptr, void **l0)
141 {
142 *l0 = (void *)tci_read_label(tb_ptr);
143 }
144
145 static void tci_args_rr(const uint8_t **tb_ptr,
146 TCGReg *r0, TCGReg *r1)
147 {
148 *r0 = tci_read_r(tb_ptr);
149 *r1 = tci_read_r(tb_ptr);
150 }
151
152 static void tci_args_ri(const uint8_t **tb_ptr,
153 TCGReg *r0, tcg_target_ulong *i1)
154 {
155 *r0 = tci_read_r(tb_ptr);
156 *i1 = tci_read_i32(tb_ptr);
157 }
158
159 #if TCG_TARGET_REG_BITS == 64
160 static void tci_args_rI(const uint8_t **tb_ptr,
161 TCGReg *r0, tcg_target_ulong *i1)
162 {
163 *r0 = tci_read_r(tb_ptr);
164 *i1 = tci_read_i(tb_ptr);
165 }
166 #endif
167
168 static void tci_args_rrm(const uint8_t **tb_ptr,
169 TCGReg *r0, TCGReg *r1, TCGMemOpIdx *m2)
170 {
171 *r0 = tci_read_r(tb_ptr);
172 *r1 = tci_read_r(tb_ptr);
173 *m2 = tci_read_i32(tb_ptr);
174 }
175
176 static void tci_args_rrr(const uint8_t **tb_ptr,
177 TCGReg *r0, TCGReg *r1, TCGReg *r2)
178 {
179 *r0 = tci_read_r(tb_ptr);
180 *r1 = tci_read_r(tb_ptr);
181 *r2 = tci_read_r(tb_ptr);
182 }
183
184 static void tci_args_rrs(const uint8_t **tb_ptr,
185 TCGReg *r0, TCGReg *r1, int32_t *i2)
186 {
187 *r0 = tci_read_r(tb_ptr);
188 *r1 = tci_read_r(tb_ptr);
189 *i2 = tci_read_s32(tb_ptr);
190 }
191
192 static void tci_args_rrcl(const uint8_t **tb_ptr,
193 TCGReg *r0, TCGReg *r1, TCGCond *c2, void **l3)
194 {
195 *r0 = tci_read_r(tb_ptr);
196 *r1 = tci_read_r(tb_ptr);
197 *c2 = tci_read_b(tb_ptr);
198 *l3 = (void *)tci_read_label(tb_ptr);
199 }
200
201 static void tci_args_rrrc(const uint8_t **tb_ptr,
202 TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGCond *c3)
203 {
204 *r0 = tci_read_r(tb_ptr);
205 *r1 = tci_read_r(tb_ptr);
206 *r2 = tci_read_r(tb_ptr);
207 *c3 = tci_read_b(tb_ptr);
208 }
209
210 static void tci_args_rrrm(const uint8_t **tb_ptr,
211 TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGMemOpIdx *m3)
212 {
213 *r0 = tci_read_r(tb_ptr);
214 *r1 = tci_read_r(tb_ptr);
215 *r2 = tci_read_r(tb_ptr);
216 *m3 = tci_read_i32(tb_ptr);
217 }
218
219 static void tci_args_rrrbb(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
220 TCGReg *r2, uint8_t *i3, uint8_t *i4)
221 {
222 *r0 = tci_read_r(tb_ptr);
223 *r1 = tci_read_r(tb_ptr);
224 *r2 = tci_read_r(tb_ptr);
225 *i3 = tci_read_b(tb_ptr);
226 *i4 = tci_read_b(tb_ptr);
227 }
228
229 static void tci_args_rrrrm(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
230 TCGReg *r2, TCGReg *r3, TCGMemOpIdx *m4)
231 {
232 *r0 = tci_read_r(tb_ptr);
233 *r1 = tci_read_r(tb_ptr);
234 *r2 = tci_read_r(tb_ptr);
235 *r3 = tci_read_r(tb_ptr);
236 *m4 = tci_read_i32(tb_ptr);
237 }
238
239 #if TCG_TARGET_REG_BITS == 32
240 static void tci_args_rrrr(const uint8_t **tb_ptr,
241 TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGReg *r3)
242 {
243 *r0 = tci_read_r(tb_ptr);
244 *r1 = tci_read_r(tb_ptr);
245 *r2 = tci_read_r(tb_ptr);
246 *r3 = tci_read_r(tb_ptr);
247 }
248
249 static void tci_args_rrrrcl(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
250 TCGReg *r2, TCGReg *r3, TCGCond *c4, void **l5)
251 {
252 *r0 = tci_read_r(tb_ptr);
253 *r1 = tci_read_r(tb_ptr);
254 *r2 = tci_read_r(tb_ptr);
255 *r3 = tci_read_r(tb_ptr);
256 *c4 = tci_read_b(tb_ptr);
257 *l5 = (void *)tci_read_label(tb_ptr);
258 }
259
260 static void tci_args_rrrrrc(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
261 TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGCond *c5)
262 {
263 *r0 = tci_read_r(tb_ptr);
264 *r1 = tci_read_r(tb_ptr);
265 *r2 = tci_read_r(tb_ptr);
266 *r3 = tci_read_r(tb_ptr);
267 *r4 = tci_read_r(tb_ptr);
268 *c5 = tci_read_b(tb_ptr);
269 }
270
271 static void tci_args_rrrrrr(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
272 TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGReg *r5)
273 {
274 *r0 = tci_read_r(tb_ptr);
275 *r1 = tci_read_r(tb_ptr);
276 *r2 = tci_read_r(tb_ptr);
277 *r3 = tci_read_r(tb_ptr);
278 *r4 = tci_read_r(tb_ptr);
279 *r5 = tci_read_r(tb_ptr);
280 }
281 #endif
282
283 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
284 {
285 bool result = false;
286 int32_t i0 = u0;
287 int32_t i1 = u1;
288 switch (condition) {
289 case TCG_COND_EQ:
290 result = (u0 == u1);
291 break;
292 case TCG_COND_NE:
293 result = (u0 != u1);
294 break;
295 case TCG_COND_LT:
296 result = (i0 < i1);
297 break;
298 case TCG_COND_GE:
299 result = (i0 >= i1);
300 break;
301 case TCG_COND_LE:
302 result = (i0 <= i1);
303 break;
304 case TCG_COND_GT:
305 result = (i0 > i1);
306 break;
307 case TCG_COND_LTU:
308 result = (u0 < u1);
309 break;
310 case TCG_COND_GEU:
311 result = (u0 >= u1);
312 break;
313 case TCG_COND_LEU:
314 result = (u0 <= u1);
315 break;
316 case TCG_COND_GTU:
317 result = (u0 > u1);
318 break;
319 default:
320 g_assert_not_reached();
321 }
322 return result;
323 }
324
325 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
326 {
327 bool result = false;
328 int64_t i0 = u0;
329 int64_t i1 = u1;
330 switch (condition) {
331 case TCG_COND_EQ:
332 result = (u0 == u1);
333 break;
334 case TCG_COND_NE:
335 result = (u0 != u1);
336 break;
337 case TCG_COND_LT:
338 result = (i0 < i1);
339 break;
340 case TCG_COND_GE:
341 result = (i0 >= i1);
342 break;
343 case TCG_COND_LE:
344 result = (i0 <= i1);
345 break;
346 case TCG_COND_GT:
347 result = (i0 > i1);
348 break;
349 case TCG_COND_LTU:
350 result = (u0 < u1);
351 break;
352 case TCG_COND_GEU:
353 result = (u0 >= u1);
354 break;
355 case TCG_COND_LEU:
356 result = (u0 <= u1);
357 break;
358 case TCG_COND_GTU:
359 result = (u0 > u1);
360 break;
361 default:
362 g_assert_not_reached();
363 }
364 return result;
365 }
366
367 #define qemu_ld_ub \
368 cpu_ldub_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
369 #define qemu_ld_leuw \
370 cpu_lduw_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
371 #define qemu_ld_leul \
372 cpu_ldl_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
373 #define qemu_ld_leq \
374 cpu_ldq_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
375 #define qemu_ld_beuw \
376 cpu_lduw_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
377 #define qemu_ld_beul \
378 cpu_ldl_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
379 #define qemu_ld_beq \
380 cpu_ldq_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
381 #define qemu_st_b(X) \
382 cpu_stb_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
383 #define qemu_st_lew(X) \
384 cpu_stw_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
385 #define qemu_st_lel(X) \
386 cpu_stl_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
387 #define qemu_st_leq(X) \
388 cpu_stq_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
389 #define qemu_st_bew(X) \
390 cpu_stw_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
391 #define qemu_st_bel(X) \
392 cpu_stl_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
393 #define qemu_st_beq(X) \
394 cpu_stq_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
395
396 #if TCG_TARGET_REG_BITS == 64
397 # define CASE_32_64(x) \
398 case glue(glue(INDEX_op_, x), _i64): \
399 case glue(glue(INDEX_op_, x), _i32):
400 # define CASE_64(x) \
401 case glue(glue(INDEX_op_, x), _i64):
402 #else
403 # define CASE_32_64(x) \
404 case glue(glue(INDEX_op_, x), _i32):
405 # define CASE_64(x)
406 #endif
407
408 /* Interpret pseudo code in tb. */
409 /*
410 * Disable CFI checks.
411 * One possible operation in the pseudo code is a call to binary code.
412 * Therefore, disable CFI checks in the interpreter function
413 */
414 uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
415 const void *v_tb_ptr)
416 {
417 const uint8_t *tb_ptr = v_tb_ptr;
418 tcg_target_ulong regs[TCG_TARGET_NB_REGS];
419 long tcg_temps[CPU_TEMP_BUF_NLONGS];
420 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
421
422 regs[TCG_AREG0] = (tcg_target_ulong)env;
423 regs[TCG_REG_CALL_STACK] = sp_value;
424 tci_assert(tb_ptr);
425
426 for (;;) {
427 TCGOpcode opc = tb_ptr[0];
428 #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
429 uint8_t op_size = tb_ptr[1];
430 const uint8_t *old_code_ptr = tb_ptr;
431 #endif
432 TCGReg r0, r1, r2, r3;
433 tcg_target_ulong t1;
434 TCGCond condition;
435 target_ulong taddr;
436 uint8_t pos, len;
437 uint32_t tmp32;
438 uint64_t tmp64;
439 #if TCG_TARGET_REG_BITS == 32
440 TCGReg r4, r5;
441 uint64_t T1, T2;
442 #endif
443 TCGMemOpIdx oi;
444 int32_t ofs;
445 void *ptr;
446
447 /* Skip opcode and size entry. */
448 tb_ptr += 2;
449
450 switch (opc) {
451 case INDEX_op_call:
452 tci_args_l(&tb_ptr, &ptr);
453 tci_tb_ptr = (uintptr_t)tb_ptr;
454 #if TCG_TARGET_REG_BITS == 32
455 tmp64 = ((helper_function)ptr)(tci_read_reg(regs, TCG_REG_R0),
456 tci_read_reg(regs, TCG_REG_R1),
457 tci_read_reg(regs, TCG_REG_R2),
458 tci_read_reg(regs, TCG_REG_R3),
459 tci_read_reg(regs, TCG_REG_R4),
460 tci_read_reg(regs, TCG_REG_R5),
461 tci_read_reg(regs, TCG_REG_R6),
462 tci_read_reg(regs, TCG_REG_R7),
463 tci_read_reg(regs, TCG_REG_R8),
464 tci_read_reg(regs, TCG_REG_R9),
465 tci_read_reg(regs, TCG_REG_R10),
466 tci_read_reg(regs, TCG_REG_R11));
467 tci_write_reg(regs, TCG_REG_R0, tmp64);
468 tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32);
469 #else
470 tmp64 = ((helper_function)ptr)(tci_read_reg(regs, TCG_REG_R0),
471 tci_read_reg(regs, TCG_REG_R1),
472 tci_read_reg(regs, TCG_REG_R2),
473 tci_read_reg(regs, TCG_REG_R3),
474 tci_read_reg(regs, TCG_REG_R4),
475 tci_read_reg(regs, TCG_REG_R5));
476 tci_write_reg(regs, TCG_REG_R0, tmp64);
477 #endif
478 break;
479 case INDEX_op_br:
480 tci_args_l(&tb_ptr, &ptr);
481 tci_assert(tb_ptr == old_code_ptr + op_size);
482 tb_ptr = ptr;
483 continue;
484 case INDEX_op_setcond_i32:
485 tci_args_rrrc(&tb_ptr, &r0, &r1, &r2, &condition);
486 regs[r0] = tci_compare32(regs[r1], regs[r2], condition);
487 break;
488 #if TCG_TARGET_REG_BITS == 32
489 case INDEX_op_setcond2_i32:
490 tci_args_rrrrrc(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &condition);
491 T1 = tci_uint64(regs[r2], regs[r1]);
492 T2 = tci_uint64(regs[r4], regs[r3]);
493 regs[r0] = tci_compare64(T1, T2, condition);
494 break;
495 #elif TCG_TARGET_REG_BITS == 64
496 case INDEX_op_setcond_i64:
497 tci_args_rrrc(&tb_ptr, &r0, &r1, &r2, &condition);
498 regs[r0] = tci_compare64(regs[r1], regs[r2], condition);
499 break;
500 #endif
501 CASE_32_64(mov)
502 tci_args_rr(&tb_ptr, &r0, &r1);
503 regs[r0] = regs[r1];
504 break;
505 case INDEX_op_tci_movi_i32:
506 tci_args_ri(&tb_ptr, &r0, &t1);
507 regs[r0] = t1;
508 break;
509
510 /* Load/store operations (32 bit). */
511
512 CASE_32_64(ld8u)
513 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
514 ptr = (void *)(regs[r1] + ofs);
515 regs[r0] = *(uint8_t *)ptr;
516 break;
517 CASE_32_64(ld8s)
518 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
519 ptr = (void *)(regs[r1] + ofs);
520 regs[r0] = *(int8_t *)ptr;
521 break;
522 CASE_32_64(ld16u)
523 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
524 ptr = (void *)(regs[r1] + ofs);
525 regs[r0] = *(uint16_t *)ptr;
526 break;
527 CASE_32_64(ld16s)
528 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
529 ptr = (void *)(regs[r1] + ofs);
530 regs[r0] = *(int16_t *)ptr;
531 break;
532 case INDEX_op_ld_i32:
533 CASE_64(ld32u)
534 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
535 ptr = (void *)(regs[r1] + ofs);
536 regs[r0] = *(uint32_t *)ptr;
537 break;
538 CASE_32_64(st8)
539 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
540 ptr = (void *)(regs[r1] + ofs);
541 *(uint8_t *)ptr = regs[r0];
542 break;
543 CASE_32_64(st16)
544 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
545 ptr = (void *)(regs[r1] + ofs);
546 *(uint16_t *)ptr = regs[r0];
547 break;
548 case INDEX_op_st_i32:
549 CASE_64(st32)
550 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
551 ptr = (void *)(regs[r1] + ofs);
552 *(uint32_t *)ptr = regs[r0];
553 break;
554
555 /* Arithmetic operations (mixed 32/64 bit). */
556
557 CASE_32_64(add)
558 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
559 regs[r0] = regs[r1] + regs[r2];
560 break;
561 CASE_32_64(sub)
562 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
563 regs[r0] = regs[r1] - regs[r2];
564 break;
565 CASE_32_64(mul)
566 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
567 regs[r0] = regs[r1] * regs[r2];
568 break;
569 CASE_32_64(and)
570 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
571 regs[r0] = regs[r1] & regs[r2];
572 break;
573 CASE_32_64(or)
574 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
575 regs[r0] = regs[r1] | regs[r2];
576 break;
577 CASE_32_64(xor)
578 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
579 regs[r0] = regs[r1] ^ regs[r2];
580 break;
581
582 /* Arithmetic operations (32 bit). */
583
584 case INDEX_op_div_i32:
585 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
586 regs[r0] = (int32_t)regs[r1] / (int32_t)regs[r2];
587 break;
588 case INDEX_op_divu_i32:
589 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
590 regs[r0] = (uint32_t)regs[r1] / (uint32_t)regs[r2];
591 break;
592 case INDEX_op_rem_i32:
593 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
594 regs[r0] = (int32_t)regs[r1] % (int32_t)regs[r2];
595 break;
596 case INDEX_op_remu_i32:
597 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
598 regs[r0] = (uint32_t)regs[r1] % (uint32_t)regs[r2];
599 break;
600
601 /* Shift/rotate operations (32 bit). */
602
603 case INDEX_op_shl_i32:
604 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
605 regs[r0] = (uint32_t)regs[r1] << (regs[r2] & 31);
606 break;
607 case INDEX_op_shr_i32:
608 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
609 regs[r0] = (uint32_t)regs[r1] >> (regs[r2] & 31);
610 break;
611 case INDEX_op_sar_i32:
612 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
613 regs[r0] = (int32_t)regs[r1] >> (regs[r2] & 31);
614 break;
615 #if TCG_TARGET_HAS_rot_i32
616 case INDEX_op_rotl_i32:
617 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
618 regs[r0] = rol32(regs[r1], regs[r2] & 31);
619 break;
620 case INDEX_op_rotr_i32:
621 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
622 regs[r0] = ror32(regs[r1], regs[r2] & 31);
623 break;
624 #endif
625 #if TCG_TARGET_HAS_deposit_i32
626 case INDEX_op_deposit_i32:
627 tci_args_rrrbb(&tb_ptr, &r0, &r1, &r2, &pos, &len);
628 regs[r0] = deposit32(regs[r1], pos, len, regs[r2]);
629 break;
630 #endif
631 case INDEX_op_brcond_i32:
632 tci_args_rrcl(&tb_ptr, &r0, &r1, &condition, &ptr);
633 if (tci_compare32(regs[r0], regs[r1], condition)) {
634 tci_assert(tb_ptr == old_code_ptr + op_size);
635 tb_ptr = ptr;
636 continue;
637 }
638 break;
639 #if TCG_TARGET_REG_BITS == 32
640 case INDEX_op_add2_i32:
641 tci_args_rrrrrr(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &r5);
642 T1 = tci_uint64(regs[r3], regs[r2]);
643 T2 = tci_uint64(regs[r5], regs[r4]);
644 tci_write_reg64(regs, r1, r0, T1 + T2);
645 break;
646 case INDEX_op_sub2_i32:
647 tci_args_rrrrrr(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &r5);
648 T1 = tci_uint64(regs[r3], regs[r2]);
649 T2 = tci_uint64(regs[r5], regs[r4]);
650 tci_write_reg64(regs, r1, r0, T1 - T2);
651 break;
652 case INDEX_op_brcond2_i32:
653 tci_args_rrrrcl(&tb_ptr, &r0, &r1, &r2, &r3, &condition, &ptr);
654 T1 = tci_uint64(regs[r1], regs[r0]);
655 T2 = tci_uint64(regs[r3], regs[r2]);
656 if (tci_compare64(T1, T2, condition)) {
657 tci_assert(tb_ptr == old_code_ptr + op_size);
658 tb_ptr = ptr;
659 continue;
660 }
661 break;
662 case INDEX_op_mulu2_i32:
663 tci_args_rrrr(&tb_ptr, &r0, &r1, &r2, &r3);
664 tci_write_reg64(regs, r1, r0, (uint64_t)regs[r2] * regs[r3]);
665 break;
666 #endif /* TCG_TARGET_REG_BITS == 32 */
667 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
668 CASE_32_64(ext8s)
669 tci_args_rr(&tb_ptr, &r0, &r1);
670 regs[r0] = (int8_t)regs[r1];
671 break;
672 #endif
673 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
674 CASE_32_64(ext16s)
675 tci_args_rr(&tb_ptr, &r0, &r1);
676 regs[r0] = (int16_t)regs[r1];
677 break;
678 #endif
679 #if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
680 CASE_32_64(ext8u)
681 tci_args_rr(&tb_ptr, &r0, &r1);
682 regs[r0] = (uint8_t)regs[r1];
683 break;
684 #endif
685 #if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
686 CASE_32_64(ext16u)
687 tci_args_rr(&tb_ptr, &r0, &r1);
688 regs[r0] = (uint16_t)regs[r1];
689 break;
690 #endif
691 #if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
692 CASE_32_64(bswap16)
693 tci_args_rr(&tb_ptr, &r0, &r1);
694 regs[r0] = bswap16(regs[r1]);
695 break;
696 #endif
697 #if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
698 CASE_32_64(bswap32)
699 tci_args_rr(&tb_ptr, &r0, &r1);
700 regs[r0] = bswap32(regs[r1]);
701 break;
702 #endif
703 #if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
704 CASE_32_64(not)
705 tci_args_rr(&tb_ptr, &r0, &r1);
706 regs[r0] = ~regs[r1];
707 break;
708 #endif
709 #if TCG_TARGET_HAS_neg_i32 || TCG_TARGET_HAS_neg_i64
710 CASE_32_64(neg)
711 tci_args_rr(&tb_ptr, &r0, &r1);
712 regs[r0] = -regs[r1];
713 break;
714 #endif
715 #if TCG_TARGET_REG_BITS == 64
716 case INDEX_op_tci_movi_i64:
717 tci_args_rI(&tb_ptr, &r0, &t1);
718 regs[r0] = t1;
719 break;
720
721 /* Load/store operations (64 bit). */
722
723 case INDEX_op_ld32s_i64:
724 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
725 ptr = (void *)(regs[r1] + ofs);
726 regs[r0] = *(int32_t *)ptr;
727 break;
728 case INDEX_op_ld_i64:
729 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
730 ptr = (void *)(regs[r1] + ofs);
731 regs[r0] = *(uint64_t *)ptr;
732 break;
733 case INDEX_op_st_i64:
734 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
735 ptr = (void *)(regs[r1] + ofs);
736 *(uint64_t *)ptr = regs[r0];
737 break;
738
739 /* Arithmetic operations (64 bit). */
740
741 case INDEX_op_div_i64:
742 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
743 regs[r0] = (int64_t)regs[r1] / (int64_t)regs[r2];
744 break;
745 case INDEX_op_divu_i64:
746 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
747 regs[r0] = (uint64_t)regs[r1] / (uint64_t)regs[r2];
748 break;
749 case INDEX_op_rem_i64:
750 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
751 regs[r0] = (int64_t)regs[r1] % (int64_t)regs[r2];
752 break;
753 case INDEX_op_remu_i64:
754 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
755 regs[r0] = (uint64_t)regs[r1] % (uint64_t)regs[r2];
756 break;
757
758 /* Shift/rotate operations (64 bit). */
759
760 case INDEX_op_shl_i64:
761 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
762 regs[r0] = regs[r1] << (regs[r2] & 63);
763 break;
764 case INDEX_op_shr_i64:
765 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
766 regs[r0] = regs[r1] >> (regs[r2] & 63);
767 break;
768 case INDEX_op_sar_i64:
769 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
770 regs[r0] = (int64_t)regs[r1] >> (regs[r2] & 63);
771 break;
772 #if TCG_TARGET_HAS_rot_i64
773 case INDEX_op_rotl_i64:
774 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
775 regs[r0] = rol64(regs[r1], regs[r2] & 63);
776 break;
777 case INDEX_op_rotr_i64:
778 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
779 regs[r0] = ror64(regs[r1], regs[r2] & 63);
780 break;
781 #endif
782 #if TCG_TARGET_HAS_deposit_i64
783 case INDEX_op_deposit_i64:
784 tci_args_rrrbb(&tb_ptr, &r0, &r1, &r2, &pos, &len);
785 regs[r0] = deposit64(regs[r1], pos, len, regs[r2]);
786 break;
787 #endif
788 case INDEX_op_brcond_i64:
789 tci_args_rrcl(&tb_ptr, &r0, &r1, &condition, &ptr);
790 if (tci_compare64(regs[r0], regs[r1], condition)) {
791 tci_assert(tb_ptr == old_code_ptr + op_size);
792 tb_ptr = ptr;
793 continue;
794 }
795 break;
796 case INDEX_op_ext32s_i64:
797 case INDEX_op_ext_i32_i64:
798 tci_args_rr(&tb_ptr, &r0, &r1);
799 regs[r0] = (int32_t)regs[r1];
800 break;
801 case INDEX_op_ext32u_i64:
802 case INDEX_op_extu_i32_i64:
803 tci_args_rr(&tb_ptr, &r0, &r1);
804 regs[r0] = (uint32_t)regs[r1];
805 break;
806 #if TCG_TARGET_HAS_bswap64_i64
807 case INDEX_op_bswap64_i64:
808 tci_args_rr(&tb_ptr, &r0, &r1);
809 regs[r0] = bswap64(regs[r1]);
810 break;
811 #endif
812 #endif /* TCG_TARGET_REG_BITS == 64 */
813
814 /* QEMU specific operations. */
815
816 case INDEX_op_exit_tb:
817 tci_args_l(&tb_ptr, &ptr);
818 return (uintptr_t)ptr;
819
820 case INDEX_op_goto_tb:
821 tci_args_l(&tb_ptr, &ptr);
822 tci_assert(tb_ptr == old_code_ptr + op_size);
823 tb_ptr = *(void **)ptr;
824 continue;
825
826 case INDEX_op_qemu_ld_i32:
827 if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
828 tci_args_rrm(&tb_ptr, &r0, &r1, &oi);
829 taddr = regs[r1];
830 } else {
831 tci_args_rrrm(&tb_ptr, &r0, &r1, &r2, &oi);
832 taddr = tci_uint64(regs[r2], regs[r1]);
833 }
834 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
835 case MO_UB:
836 tmp32 = qemu_ld_ub;
837 break;
838 case MO_SB:
839 tmp32 = (int8_t)qemu_ld_ub;
840 break;
841 case MO_LEUW:
842 tmp32 = qemu_ld_leuw;
843 break;
844 case MO_LESW:
845 tmp32 = (int16_t)qemu_ld_leuw;
846 break;
847 case MO_LEUL:
848 tmp32 = qemu_ld_leul;
849 break;
850 case MO_BEUW:
851 tmp32 = qemu_ld_beuw;
852 break;
853 case MO_BESW:
854 tmp32 = (int16_t)qemu_ld_beuw;
855 break;
856 case MO_BEUL:
857 tmp32 = qemu_ld_beul;
858 break;
859 default:
860 g_assert_not_reached();
861 }
862 regs[r0] = tmp32;
863 break;
864
865 case INDEX_op_qemu_ld_i64:
866 if (TCG_TARGET_REG_BITS == 64) {
867 tci_args_rrm(&tb_ptr, &r0, &r1, &oi);
868 taddr = regs[r1];
869 } else if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
870 tci_args_rrrm(&tb_ptr, &r0, &r1, &r2, &oi);
871 taddr = regs[r2];
872 } else {
873 tci_args_rrrrm(&tb_ptr, &r0, &r1, &r2, &r3, &oi);
874 taddr = tci_uint64(regs[r3], regs[r2]);
875 }
876 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
877 case MO_UB:
878 tmp64 = qemu_ld_ub;
879 break;
880 case MO_SB:
881 tmp64 = (int8_t)qemu_ld_ub;
882 break;
883 case MO_LEUW:
884 tmp64 = qemu_ld_leuw;
885 break;
886 case MO_LESW:
887 tmp64 = (int16_t)qemu_ld_leuw;
888 break;
889 case MO_LEUL:
890 tmp64 = qemu_ld_leul;
891 break;
892 case MO_LESL:
893 tmp64 = (int32_t)qemu_ld_leul;
894 break;
895 case MO_LEQ:
896 tmp64 = qemu_ld_leq;
897 break;
898 case MO_BEUW:
899 tmp64 = qemu_ld_beuw;
900 break;
901 case MO_BESW:
902 tmp64 = (int16_t)qemu_ld_beuw;
903 break;
904 case MO_BEUL:
905 tmp64 = qemu_ld_beul;
906 break;
907 case MO_BESL:
908 tmp64 = (int32_t)qemu_ld_beul;
909 break;
910 case MO_BEQ:
911 tmp64 = qemu_ld_beq;
912 break;
913 default:
914 g_assert_not_reached();
915 }
916 if (TCG_TARGET_REG_BITS == 32) {
917 tci_write_reg64(regs, r1, r0, tmp64);
918 } else {
919 regs[r0] = tmp64;
920 }
921 break;
922
923 case INDEX_op_qemu_st_i32:
924 if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
925 tci_args_rrm(&tb_ptr, &r0, &r1, &oi);
926 taddr = regs[r1];
927 } else {
928 tci_args_rrrm(&tb_ptr, &r0, &r1, &r2, &oi);
929 taddr = tci_uint64(regs[r2], regs[r1]);
930 }
931 tmp32 = regs[r0];
932 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
933 case MO_UB:
934 qemu_st_b(tmp32);
935 break;
936 case MO_LEUW:
937 qemu_st_lew(tmp32);
938 break;
939 case MO_LEUL:
940 qemu_st_lel(tmp32);
941 break;
942 case MO_BEUW:
943 qemu_st_bew(tmp32);
944 break;
945 case MO_BEUL:
946 qemu_st_bel(tmp32);
947 break;
948 default:
949 g_assert_not_reached();
950 }
951 break;
952
953 case INDEX_op_qemu_st_i64:
954 if (TCG_TARGET_REG_BITS == 64) {
955 tci_args_rrm(&tb_ptr, &r0, &r1, &oi);
956 taddr = regs[r1];
957 tmp64 = regs[r0];
958 } else {
959 if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
960 tci_args_rrrm(&tb_ptr, &r0, &r1, &r2, &oi);
961 taddr = regs[r2];
962 } else {
963 tci_args_rrrrm(&tb_ptr, &r0, &r1, &r2, &r3, &oi);
964 taddr = tci_uint64(regs[r3], regs[r2]);
965 }
966 tmp64 = tci_uint64(regs[r1], regs[r0]);
967 }
968 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
969 case MO_UB:
970 qemu_st_b(tmp64);
971 break;
972 case MO_LEUW:
973 qemu_st_lew(tmp64);
974 break;
975 case MO_LEUL:
976 qemu_st_lel(tmp64);
977 break;
978 case MO_LEQ:
979 qemu_st_leq(tmp64);
980 break;
981 case MO_BEUW:
982 qemu_st_bew(tmp64);
983 break;
984 case MO_BEUL:
985 qemu_st_bel(tmp64);
986 break;
987 case MO_BEQ:
988 qemu_st_beq(tmp64);
989 break;
990 default:
991 g_assert_not_reached();
992 }
993 break;
994
995 case INDEX_op_mb:
996 /* Ensure ordering for all kinds */
997 smp_mb();
998 break;
999 default:
1000 g_assert_not_reached();
1001 }
1002 tci_assert(tb_ptr == old_code_ptr + op_size);
1003 }
1004 }