]> git.proxmox.com Git - mirror_qemu.git/blame - tcg/tci.c
hw/arm/armsse: Add support for TYPE_SSE_TIMER in ARMSSEDeviceInfo
[mirror_qemu.git] / tcg / tci.c
CommitLineData
7657f4bf
SW
1/*
2 * Tiny Code Interpreter for QEMU
3 *
3ccdbecf 4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
7657f4bf
SW
5 *
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 */
19
d38ea87a 20#include "qemu/osdep.h"
7657f4bf 21
3ccdbecf
SW
22/* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23 * Without assertions, the interpreter runs much faster. */
24#if defined(CONFIG_DEBUG_TCG)
25# define tci_assert(cond) assert(cond)
26#else
27# define tci_assert(cond) ((void)0)
7657f4bf
SW
28#endif
29
30#include "qemu-common.h"
65603e2f 31#include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
f08b6170 32#include "exec/cpu_ldst.h"
dcb32f1d 33#include "tcg/tcg-op.h"
c905a368 34#include "qemu/compiler.h"
7657f4bf 35
1df3caa9 36#if MAX_OPC_PARAM_IARGS != 6
7657f4bf
SW
37# error Fix needed, number of supported input arguments changed!
38#endif
39#if TCG_TARGET_REG_BITS == 32
40typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
6673f47d 41 tcg_target_ulong, tcg_target_ulong,
7657f4bf
SW
42 tcg_target_ulong, tcg_target_ulong,
43 tcg_target_ulong, tcg_target_ulong,
1df3caa9 44 tcg_target_ulong, tcg_target_ulong,
7657f4bf
SW
45 tcg_target_ulong, tcg_target_ulong);
46#else
47typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
6673f47d 48 tcg_target_ulong, tcg_target_ulong,
1df3caa9 49 tcg_target_ulong, tcg_target_ulong);
7657f4bf
SW
50#endif
51
13e71f08
RH
52__thread uintptr_t tci_tb_ptr;
53
5e75150c 54static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 55{
5e75150c
EC
56 tci_assert(index < TCG_TARGET_NB_REGS);
57 return regs[index];
7657f4bf
SW
58}
59
60#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
5e75150c 61static int8_t tci_read_reg8s(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 62{
5e75150c 63 return (int8_t)tci_read_reg(regs, index);
7657f4bf
SW
64}
65#endif
66
67#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
5e75150c 68static int16_t tci_read_reg16s(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 69{
5e75150c 70 return (int16_t)tci_read_reg(regs, index);
7657f4bf
SW
71}
72#endif
73
74#if TCG_TARGET_REG_BITS == 64
5e75150c 75static int32_t tci_read_reg32s(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 76{
5e75150c 77 return (int32_t)tci_read_reg(regs, index);
7657f4bf
SW
78}
79#endif
80
5e75150c 81static uint8_t tci_read_reg8(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 82{
5e75150c 83 return (uint8_t)tci_read_reg(regs, index);
7657f4bf
SW
84}
85
5e75150c 86static uint16_t tci_read_reg16(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 87{
5e75150c 88 return (uint16_t)tci_read_reg(regs, index);
7657f4bf
SW
89}
90
5e75150c 91static uint32_t tci_read_reg32(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 92{
5e75150c 93 return (uint32_t)tci_read_reg(regs, index);
7657f4bf
SW
94}
95
96#if TCG_TARGET_REG_BITS == 64
5e75150c 97static uint64_t tci_read_reg64(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 98{
5e75150c 99 return tci_read_reg(regs, index);
7657f4bf
SW
100}
101#endif
102
5e75150c
EC
103static void
104tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
7657f4bf 105{
5e75150c 106 tci_assert(index < TCG_TARGET_NB_REGS);
3ccdbecf
SW
107 tci_assert(index != TCG_AREG0);
108 tci_assert(index != TCG_REG_CALL_STACK);
5e75150c 109 regs[index] = value;
7657f4bf
SW
110}
111
7657f4bf 112#if TCG_TARGET_REG_BITS == 32
5e75150c
EC
113static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
114 uint32_t low_index, uint64_t value)
7657f4bf 115{
5e75150c
EC
116 tci_write_reg(regs, low_index, value);
117 tci_write_reg(regs, high_index, value >> 32);
7657f4bf 118}
7657f4bf
SW
119#endif
120
121#if TCG_TARGET_REG_BITS == 32
122/* Create a 64 bit value from two 32 bit values. */
123static uint64_t tci_uint64(uint32_t high, uint32_t low)
124{
125 return ((uint64_t)high << 32) + low;
126}
127#endif
128
129/* Read constant (native size) from bytecode. */
305daaed 130static tcg_target_ulong tci_read_i(const uint8_t **tb_ptr)
7657f4bf 131{
305daaed 132 tcg_target_ulong value = *(const tcg_target_ulong *)(*tb_ptr);
7657f4bf
SW
133 *tb_ptr += sizeof(value);
134 return value;
135}
136
03fc0548 137/* Read unsigned constant (32 bit) from bytecode. */
305daaed 138static uint32_t tci_read_i32(const uint8_t **tb_ptr)
7657f4bf 139{
305daaed 140 uint32_t value = *(const uint32_t *)(*tb_ptr);
7657f4bf
SW
141 *tb_ptr += sizeof(value);
142 return value;
143}
144
03fc0548 145/* Read signed constant (32 bit) from bytecode. */
305daaed 146static int32_t tci_read_s32(const uint8_t **tb_ptr)
03fc0548 147{
305daaed 148 int32_t value = *(const int32_t *)(*tb_ptr);
03fc0548
RH
149 *tb_ptr += sizeof(value);
150 return value;
151}
152
7657f4bf
SW
153#if TCG_TARGET_REG_BITS == 64
154/* Read constant (64 bit) from bytecode. */
305daaed 155static uint64_t tci_read_i64(const uint8_t **tb_ptr)
7657f4bf 156{
305daaed 157 uint64_t value = *(const uint64_t *)(*tb_ptr);
7657f4bf
SW
158 *tb_ptr += sizeof(value);
159 return value;
160}
161#endif
162
163/* Read indexed register (native size) from bytecode. */
5e75150c 164static tcg_target_ulong
305daaed 165tci_read_r(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
7657f4bf 166{
5e75150c 167 tcg_target_ulong value = tci_read_reg(regs, **tb_ptr);
7657f4bf
SW
168 *tb_ptr += 1;
169 return value;
170}
171
172/* Read indexed register (8 bit) from bytecode. */
305daaed 173static uint8_t tci_read_r8(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
7657f4bf 174{
5e75150c 175 uint8_t value = tci_read_reg8(regs, **tb_ptr);
7657f4bf
SW
176 *tb_ptr += 1;
177 return value;
178}
179
180#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
181/* Read indexed register (8 bit signed) from bytecode. */
305daaed 182static int8_t tci_read_r8s(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
7657f4bf 183{
5e75150c 184 int8_t value = tci_read_reg8s(regs, **tb_ptr);
7657f4bf
SW
185 *tb_ptr += 1;
186 return value;
187}
188#endif
189
190/* Read indexed register (16 bit) from bytecode. */
305daaed
RH
191static uint16_t tci_read_r16(const tcg_target_ulong *regs,
192 const uint8_t **tb_ptr)
7657f4bf 193{
5e75150c 194 uint16_t value = tci_read_reg16(regs, **tb_ptr);
7657f4bf
SW
195 *tb_ptr += 1;
196 return value;
197}
198
199#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
200/* Read indexed register (16 bit signed) from bytecode. */
305daaed
RH
201static int16_t tci_read_r16s(const tcg_target_ulong *regs,
202 const uint8_t **tb_ptr)
7657f4bf 203{
5e75150c 204 int16_t value = tci_read_reg16s(regs, **tb_ptr);
7657f4bf
SW
205 *tb_ptr += 1;
206 return value;
207}
208#endif
209
210/* Read indexed register (32 bit) from bytecode. */
305daaed
RH
211static uint32_t tci_read_r32(const tcg_target_ulong *regs,
212 const uint8_t **tb_ptr)
7657f4bf 213{
5e75150c 214 uint32_t value = tci_read_reg32(regs, **tb_ptr);
7657f4bf
SW
215 *tb_ptr += 1;
216 return value;
217}
218
219#if TCG_TARGET_REG_BITS == 32
220/* Read two indexed registers (2 * 32 bit) from bytecode. */
305daaed
RH
221static uint64_t tci_read_r64(const tcg_target_ulong *regs,
222 const uint8_t **tb_ptr)
7657f4bf 223{
5e75150c
EC
224 uint32_t low = tci_read_r32(regs, tb_ptr);
225 return tci_uint64(tci_read_r32(regs, tb_ptr), low);
7657f4bf
SW
226}
227#elif TCG_TARGET_REG_BITS == 64
228/* Read indexed register (32 bit signed) from bytecode. */
305daaed
RH
229static int32_t tci_read_r32s(const tcg_target_ulong *regs,
230 const uint8_t **tb_ptr)
7657f4bf 231{
5e75150c 232 int32_t value = tci_read_reg32s(regs, **tb_ptr);
7657f4bf
SW
233 *tb_ptr += 1;
234 return value;
235}
236
237/* Read indexed register (64 bit) from bytecode. */
305daaed
RH
238static uint64_t tci_read_r64(const tcg_target_ulong *regs,
239 const uint8_t **tb_ptr)
7657f4bf 240{
5e75150c 241 uint64_t value = tci_read_reg64(regs, **tb_ptr);
7657f4bf
SW
242 *tb_ptr += 1;
243 return value;
244}
245#endif
246
247/* Read indexed register(s) with target address from bytecode. */
5e75150c 248static target_ulong
305daaed 249tci_read_ulong(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
7657f4bf 250{
5e75150c 251 target_ulong taddr = tci_read_r(regs, tb_ptr);
7657f4bf 252#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
5e75150c 253 taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32;
7657f4bf
SW
254#endif
255 return taddr;
256}
257
305daaed 258static tcg_target_ulong tci_read_label(const uint8_t **tb_ptr)
7657f4bf 259{
c6c5063c 260 tcg_target_ulong label = tci_read_i(tb_ptr);
3ccdbecf 261 tci_assert(label != 0);
7657f4bf
SW
262 return label;
263}
264
265static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
266{
267 bool result = false;
268 int32_t i0 = u0;
269 int32_t i1 = u1;
270 switch (condition) {
271 case TCG_COND_EQ:
272 result = (u0 == u1);
273 break;
274 case TCG_COND_NE:
275 result = (u0 != u1);
276 break;
277 case TCG_COND_LT:
278 result = (i0 < i1);
279 break;
280 case TCG_COND_GE:
281 result = (i0 >= i1);
282 break;
283 case TCG_COND_LE:
284 result = (i0 <= i1);
285 break;
286 case TCG_COND_GT:
287 result = (i0 > i1);
288 break;
289 case TCG_COND_LTU:
290 result = (u0 < u1);
291 break;
292 case TCG_COND_GEU:
293 result = (u0 >= u1);
294 break;
295 case TCG_COND_LEU:
296 result = (u0 <= u1);
297 break;
298 case TCG_COND_GTU:
299 result = (u0 > u1);
300 break;
301 default:
f6996f99 302 g_assert_not_reached();
7657f4bf
SW
303 }
304 return result;
305}
306
307static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
308{
309 bool result = false;
310 int64_t i0 = u0;
311 int64_t i1 = u1;
312 switch (condition) {
313 case TCG_COND_EQ:
314 result = (u0 == u1);
315 break;
316 case TCG_COND_NE:
317 result = (u0 != u1);
318 break;
319 case TCG_COND_LT:
320 result = (i0 < i1);
321 break;
322 case TCG_COND_GE:
323 result = (i0 >= i1);
324 break;
325 case TCG_COND_LE:
326 result = (i0 <= i1);
327 break;
328 case TCG_COND_GT:
329 result = (i0 > i1);
330 break;
331 case TCG_COND_LTU:
332 result = (u0 < u1);
333 break;
334 case TCG_COND_GEU:
335 result = (u0 >= u1);
336 break;
337 case TCG_COND_LEU:
338 result = (u0 <= u1);
339 break;
340 case TCG_COND_GTU:
341 result = (u0 > u1);
342 break;
343 default:
f6996f99 344 g_assert_not_reached();
7657f4bf
SW
345 }
346 return result;
347}
348
76782fab 349#ifdef CONFIG_SOFTMMU
76782fab 350# define qemu_ld_ub \
3972ef6f 351 helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
76782fab 352# define qemu_ld_leuw \
3972ef6f 353 helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
76782fab 354# define qemu_ld_leul \
3972ef6f 355 helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
76782fab 356# define qemu_ld_leq \
3972ef6f 357 helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
76782fab 358# define qemu_ld_beuw \
3972ef6f 359 helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
76782fab 360# define qemu_ld_beul \
3972ef6f 361 helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
76782fab 362# define qemu_ld_beq \
3972ef6f 363 helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
76782fab 364# define qemu_st_b(X) \
3972ef6f 365 helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
76782fab 366# define qemu_st_lew(X) \
3972ef6f 367 helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
76782fab 368# define qemu_st_lel(X) \
3972ef6f 369 helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
76782fab 370# define qemu_st_leq(X) \
3972ef6f 371 helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
76782fab 372# define qemu_st_bew(X) \
3972ef6f 373 helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
76782fab 374# define qemu_st_bel(X) \
3972ef6f 375 helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
76782fab 376# define qemu_st_beq(X) \
3972ef6f 377 helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
76782fab
RH
378#else
379# define qemu_ld_ub ldub_p(g2h(taddr))
380# define qemu_ld_leuw lduw_le_p(g2h(taddr))
381# define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr))
382# define qemu_ld_leq ldq_le_p(g2h(taddr))
383# define qemu_ld_beuw lduw_be_p(g2h(taddr))
384# define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr))
385# define qemu_ld_beq ldq_be_p(g2h(taddr))
386# define qemu_st_b(X) stb_p(g2h(taddr), X)
387# define qemu_st_lew(X) stw_le_p(g2h(taddr), X)
388# define qemu_st_lel(X) stl_le_p(g2h(taddr), X)
389# define qemu_st_leq(X) stq_le_p(g2h(taddr), X)
390# define qemu_st_bew(X) stw_be_p(g2h(taddr), X)
391# define qemu_st_bel(X) stl_be_p(g2h(taddr), X)
392# define qemu_st_beq(X) stq_be_p(g2h(taddr), X)
393#endif
394
7f33f5cd
RH
395#if TCG_TARGET_REG_BITS == 64
396# define CASE_32_64(x) \
397 case glue(glue(INDEX_op_, x), _i64): \
398 case glue(glue(INDEX_op_, x), _i32):
399# define CASE_64(x) \
400 case glue(glue(INDEX_op_, x), _i64):
401#else
402# define CASE_32_64(x) \
403 case glue(glue(INDEX_op_, x), _i32):
404# define CASE_64(x)
405#endif
406
7657f4bf 407/* Interpret pseudo code in tb. */
c905a368
DB
408/*
409 * Disable CFI checks.
410 * One possible operation in the pseudo code is a call to binary code.
411 * Therefore, disable CFI checks in the interpreter function
412 */
db0c51a3
RH
413uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
414 const void *v_tb_ptr)
7657f4bf 415{
305daaed 416 const uint8_t *tb_ptr = v_tb_ptr;
5e75150c 417 tcg_target_ulong regs[TCG_TARGET_NB_REGS];
ee79c356
RH
418 long tcg_temps[CPU_TEMP_BUF_NLONGS];
419 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
819af24b 420 uintptr_t ret = 0;
7657f4bf 421
5e75150c
EC
422 regs[TCG_AREG0] = (tcg_target_ulong)env;
423 regs[TCG_REG_CALL_STACK] = sp_value;
3ccdbecf 424 tci_assert(tb_ptr);
7657f4bf
SW
425
426 for (;;) {
7657f4bf 427 TCGOpcode opc = tb_ptr[0];
3ccdbecf 428#if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
7657f4bf 429 uint8_t op_size = tb_ptr[1];
305daaed 430 const uint8_t *old_code_ptr = tb_ptr;
7657f4bf
SW
431#endif
432 tcg_target_ulong t0;
433 tcg_target_ulong t1;
434 tcg_target_ulong t2;
435 tcg_target_ulong label;
436 TCGCond condition;
437 target_ulong taddr;
7657f4bf
SW
438 uint8_t tmp8;
439 uint16_t tmp16;
440 uint32_t tmp32;
441 uint64_t tmp64;
442#if TCG_TARGET_REG_BITS == 32
443 uint64_t v64;
444#endif
59227d5d 445 TCGMemOpIdx oi;
7657f4bf
SW
446
447 /* Skip opcode and size entry. */
448 tb_ptr += 2;
449
450 switch (opc) {
7657f4bf 451 case INDEX_op_call:
2f74f45e 452 t0 = tci_read_i(&tb_ptr);
13e71f08 453 tci_tb_ptr = (uintptr_t)tb_ptr;
7657f4bf 454#if TCG_TARGET_REG_BITS == 32
5e75150c
EC
455 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
456 tci_read_reg(regs, TCG_REG_R1),
457 tci_read_reg(regs, TCG_REG_R2),
458 tci_read_reg(regs, TCG_REG_R3),
552672ba 459 tci_read_reg(regs, TCG_REG_R4),
5e75150c
EC
460 tci_read_reg(regs, TCG_REG_R5),
461 tci_read_reg(regs, TCG_REG_R6),
462 tci_read_reg(regs, TCG_REG_R7),
463 tci_read_reg(regs, TCG_REG_R8),
464 tci_read_reg(regs, TCG_REG_R9),
1df3caa9 465 tci_read_reg(regs, TCG_REG_R10),
552672ba 466 tci_read_reg(regs, TCG_REG_R11));
5e75150c
EC
467 tci_write_reg(regs, TCG_REG_R0, tmp64);
468 tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32);
7657f4bf 469#else
5e75150c
EC
470 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
471 tci_read_reg(regs, TCG_REG_R1),
472 tci_read_reg(regs, TCG_REG_R2),
473 tci_read_reg(regs, TCG_REG_R3),
552672ba
RH
474 tci_read_reg(regs, TCG_REG_R4),
475 tci_read_reg(regs, TCG_REG_R5));
5e75150c 476 tci_write_reg(regs, TCG_REG_R0, tmp64);
7657f4bf
SW
477#endif
478 break;
7657f4bf
SW
479 case INDEX_op_br:
480 label = tci_read_label(&tb_ptr);
3ccdbecf 481 tci_assert(tb_ptr == old_code_ptr + op_size);
7657f4bf
SW
482 tb_ptr = (uint8_t *)label;
483 continue;
484 case INDEX_op_setcond_i32:
485 t0 = *tb_ptr++;
5e75150c 486 t1 = tci_read_r32(regs, &tb_ptr);
2f74f45e 487 t2 = tci_read_r32(regs, &tb_ptr);
7657f4bf 488 condition = *tb_ptr++;
85bbbf70 489 tci_write_reg(regs, t0, tci_compare32(t1, t2, condition));
7657f4bf
SW
490 break;
491#if TCG_TARGET_REG_BITS == 32
492 case INDEX_op_setcond2_i32:
493 t0 = *tb_ptr++;
5e75150c 494 tmp64 = tci_read_r64(regs, &tb_ptr);
2f74f45e 495 v64 = tci_read_r64(regs, &tb_ptr);
7657f4bf 496 condition = *tb_ptr++;
85bbbf70 497 tci_write_reg(regs, t0, tci_compare64(tmp64, v64, condition));
7657f4bf
SW
498 break;
499#elif TCG_TARGET_REG_BITS == 64
500 case INDEX_op_setcond_i64:
501 t0 = *tb_ptr++;
5e75150c 502 t1 = tci_read_r64(regs, &tb_ptr);
2f74f45e 503 t2 = tci_read_r64(regs, &tb_ptr);
7657f4bf 504 condition = *tb_ptr++;
5410e434 505 tci_write_reg(regs, t0, tci_compare64(t1, t2, condition));
7657f4bf
SW
506 break;
507#endif
508 case INDEX_op_mov_i32:
509 t0 = *tb_ptr++;
5e75150c 510 t1 = tci_read_r32(regs, &tb_ptr);
85bbbf70 511 tci_write_reg(regs, t0, t1);
7657f4bf 512 break;
1bd1af98 513 case INDEX_op_tci_movi_i32:
7657f4bf
SW
514 t0 = *tb_ptr++;
515 t1 = tci_read_i32(&tb_ptr);
85bbbf70 516 tci_write_reg(regs, t0, t1);
7657f4bf
SW
517 break;
518
519 /* Load/store operations (32 bit). */
520
7f33f5cd 521 CASE_32_64(ld8u)
7657f4bf 522 t0 = *tb_ptr++;
5e75150c 523 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 524 t2 = tci_read_s32(&tb_ptr);
475a1561 525 tci_write_reg(regs, t0, *(uint8_t *)(t1 + t2));
7657f4bf 526 break;
850163eb
RH
527 CASE_32_64(ld8s)
528 t0 = *tb_ptr++;
529 t1 = tci_read_r(regs, &tb_ptr);
530 t2 = tci_read_s32(&tb_ptr);
531 tci_write_reg(regs, t0, *(int8_t *)(t1 + t2));
2f160e0f 532 break;
77c38c7c
RH
533 CASE_32_64(ld16u)
534 t0 = *tb_ptr++;
535 t1 = tci_read_r(regs, &tb_ptr);
536 t2 = tci_read_s32(&tb_ptr);
537 tci_write_reg(regs, t0, *(uint16_t *)(t1 + t2));
7657f4bf 538 break;
b09d78bf 539 CASE_32_64(ld16s)
49a5a75f
SW
540 t0 = *tb_ptr++;
541 t1 = tci_read_r(regs, &tb_ptr);
542 t2 = tci_read_s32(&tb_ptr);
543 tci_write_reg(regs, t0, *(int16_t *)(t1 + t2));
7657f4bf
SW
544 break;
545 case INDEX_op_ld_i32:
c1d77e94 546 CASE_64(ld32u)
7657f4bf 547 t0 = *tb_ptr++;
5e75150c 548 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 549 t2 = tci_read_s32(&tb_ptr);
85bbbf70 550 tci_write_reg(regs, t0, *(uint32_t *)(t1 + t2));
7657f4bf 551 break;
ba9a80c1 552 CASE_32_64(st8)
5e75150c
EC
553 t0 = tci_read_r8(regs, &tb_ptr);
554 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 555 t2 = tci_read_s32(&tb_ptr);
7657f4bf
SW
556 *(uint8_t *)(t1 + t2) = t0;
557 break;
90be4dde 558 CASE_32_64(st16)
5e75150c
EC
559 t0 = tci_read_r16(regs, &tb_ptr);
560 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 561 t2 = tci_read_s32(&tb_ptr);
7657f4bf
SW
562 *(uint16_t *)(t1 + t2) = t0;
563 break;
564 case INDEX_op_st_i32:
b4d5bf0f 565 CASE_64(st32)
5e75150c
EC
566 t0 = tci_read_r32(regs, &tb_ptr);
567 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 568 t2 = tci_read_s32(&tb_ptr);
7657f4bf
SW
569 *(uint32_t *)(t1 + t2) = t0;
570 break;
571
572 /* Arithmetic operations (32 bit). */
573
574 case INDEX_op_add_i32:
575 t0 = *tb_ptr++;
2f74f45e
RH
576 t1 = tci_read_r32(regs, &tb_ptr);
577 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 578 tci_write_reg(regs, t0, t1 + t2);
7657f4bf
SW
579 break;
580 case INDEX_op_sub_i32:
581 t0 = *tb_ptr++;
2f74f45e
RH
582 t1 = tci_read_r32(regs, &tb_ptr);
583 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 584 tci_write_reg(regs, t0, t1 - t2);
7657f4bf
SW
585 break;
586 case INDEX_op_mul_i32:
587 t0 = *tb_ptr++;
2f74f45e
RH
588 t1 = tci_read_r32(regs, &tb_ptr);
589 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 590 tci_write_reg(regs, t0, t1 * t2);
7657f4bf 591 break;
7657f4bf
SW
592 case INDEX_op_div_i32:
593 t0 = *tb_ptr++;
2f74f45e
RH
594 t1 = tci_read_r32(regs, &tb_ptr);
595 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 596 tci_write_reg(regs, t0, (int32_t)t1 / (int32_t)t2);
7657f4bf
SW
597 break;
598 case INDEX_op_divu_i32:
599 t0 = *tb_ptr++;
2f74f45e
RH
600 t1 = tci_read_r32(regs, &tb_ptr);
601 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 602 tci_write_reg(regs, t0, t1 / t2);
7657f4bf
SW
603 break;
604 case INDEX_op_rem_i32:
605 t0 = *tb_ptr++;
2f74f45e
RH
606 t1 = tci_read_r32(regs, &tb_ptr);
607 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 608 tci_write_reg(regs, t0, (int32_t)t1 % (int32_t)t2);
7657f4bf
SW
609 break;
610 case INDEX_op_remu_i32:
611 t0 = *tb_ptr++;
2f74f45e
RH
612 t1 = tci_read_r32(regs, &tb_ptr);
613 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 614 tci_write_reg(regs, t0, t1 % t2);
7657f4bf 615 break;
7657f4bf
SW
616 case INDEX_op_and_i32:
617 t0 = *tb_ptr++;
2f74f45e
RH
618 t1 = tci_read_r32(regs, &tb_ptr);
619 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 620 tci_write_reg(regs, t0, t1 & t2);
7657f4bf
SW
621 break;
622 case INDEX_op_or_i32:
623 t0 = *tb_ptr++;
2f74f45e
RH
624 t1 = tci_read_r32(regs, &tb_ptr);
625 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 626 tci_write_reg(regs, t0, t1 | t2);
7657f4bf
SW
627 break;
628 case INDEX_op_xor_i32:
629 t0 = *tb_ptr++;
2f74f45e
RH
630 t1 = tci_read_r32(regs, &tb_ptr);
631 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 632 tci_write_reg(regs, t0, t1 ^ t2);
7657f4bf
SW
633 break;
634
635 /* Shift/rotate operations (32 bit). */
636
637 case INDEX_op_shl_i32:
638 t0 = *tb_ptr++;
2f74f45e
RH
639 t1 = tci_read_r32(regs, &tb_ptr);
640 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 641 tci_write_reg(regs, t0, t1 << (t2 & 31));
7657f4bf
SW
642 break;
643 case INDEX_op_shr_i32:
644 t0 = *tb_ptr++;
2f74f45e
RH
645 t1 = tci_read_r32(regs, &tb_ptr);
646 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 647 tci_write_reg(regs, t0, t1 >> (t2 & 31));
7657f4bf
SW
648 break;
649 case INDEX_op_sar_i32:
650 t0 = *tb_ptr++;
2f74f45e
RH
651 t1 = tci_read_r32(regs, &tb_ptr);
652 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 653 tci_write_reg(regs, t0, ((int32_t)t1 >> (t2 & 31)));
7657f4bf
SW
654 break;
655#if TCG_TARGET_HAS_rot_i32
656 case INDEX_op_rotl_i32:
657 t0 = *tb_ptr++;
2f74f45e
RH
658 t1 = tci_read_r32(regs, &tb_ptr);
659 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 660 tci_write_reg(regs, t0, rol32(t1, t2 & 31));
7657f4bf
SW
661 break;
662 case INDEX_op_rotr_i32:
663 t0 = *tb_ptr++;
2f74f45e
RH
664 t1 = tci_read_r32(regs, &tb_ptr);
665 t2 = tci_read_r32(regs, &tb_ptr);
85bbbf70 666 tci_write_reg(regs, t0, ror32(t1, t2 & 31));
7657f4bf 667 break;
e24dc9fe
SW
668#endif
669#if TCG_TARGET_HAS_deposit_i32
670 case INDEX_op_deposit_i32:
671 t0 = *tb_ptr++;
5e75150c
EC
672 t1 = tci_read_r32(regs, &tb_ptr);
673 t2 = tci_read_r32(regs, &tb_ptr);
e24dc9fe
SW
674 tmp16 = *tb_ptr++;
675 tmp8 = *tb_ptr++;
676 tmp32 = (((1 << tmp8) - 1) << tmp16);
85bbbf70 677 tci_write_reg(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
e24dc9fe 678 break;
7657f4bf
SW
679#endif
680 case INDEX_op_brcond_i32:
5e75150c 681 t0 = tci_read_r32(regs, &tb_ptr);
2f74f45e 682 t1 = tci_read_r32(regs, &tb_ptr);
7657f4bf
SW
683 condition = *tb_ptr++;
684 label = tci_read_label(&tb_ptr);
685 if (tci_compare32(t0, t1, condition)) {
3ccdbecf 686 tci_assert(tb_ptr == old_code_ptr + op_size);
7657f4bf
SW
687 tb_ptr = (uint8_t *)label;
688 continue;
689 }
690 break;
691#if TCG_TARGET_REG_BITS == 32
692 case INDEX_op_add2_i32:
693 t0 = *tb_ptr++;
694 t1 = *tb_ptr++;
5e75150c
EC
695 tmp64 = tci_read_r64(regs, &tb_ptr);
696 tmp64 += tci_read_r64(regs, &tb_ptr);
697 tci_write_reg64(regs, t1, t0, tmp64);
7657f4bf
SW
698 break;
699 case INDEX_op_sub2_i32:
700 t0 = *tb_ptr++;
701 t1 = *tb_ptr++;
5e75150c
EC
702 tmp64 = tci_read_r64(regs, &tb_ptr);
703 tmp64 -= tci_read_r64(regs, &tb_ptr);
704 tci_write_reg64(regs, t1, t0, tmp64);
7657f4bf
SW
705 break;
706 case INDEX_op_brcond2_i32:
5e75150c 707 tmp64 = tci_read_r64(regs, &tb_ptr);
2f74f45e 708 v64 = tci_read_r64(regs, &tb_ptr);
7657f4bf
SW
709 condition = *tb_ptr++;
710 label = tci_read_label(&tb_ptr);
711 if (tci_compare64(tmp64, v64, condition)) {
3ccdbecf 712 tci_assert(tb_ptr == old_code_ptr + op_size);
7657f4bf
SW
713 tb_ptr = (uint8_t *)label;
714 continue;
715 }
716 break;
717 case INDEX_op_mulu2_i32:
718 t0 = *tb_ptr++;
719 t1 = *tb_ptr++;
5e75150c
EC
720 t2 = tci_read_r32(regs, &tb_ptr);
721 tmp64 = tci_read_r32(regs, &tb_ptr);
722 tci_write_reg64(regs, t1, t0, t2 * tmp64);
7657f4bf
SW
723 break;
724#endif /* TCG_TARGET_REG_BITS == 32 */
725#if TCG_TARGET_HAS_ext8s_i32
726 case INDEX_op_ext8s_i32:
727 t0 = *tb_ptr++;
5e75150c 728 t1 = tci_read_r8s(regs, &tb_ptr);
85bbbf70 729 tci_write_reg(regs, t0, t1);
7657f4bf
SW
730 break;
731#endif
732#if TCG_TARGET_HAS_ext16s_i32
733 case INDEX_op_ext16s_i32:
734 t0 = *tb_ptr++;
5e75150c 735 t1 = tci_read_r16s(regs, &tb_ptr);
85bbbf70 736 tci_write_reg(regs, t0, t1);
7657f4bf
SW
737 break;
738#endif
739#if TCG_TARGET_HAS_ext8u_i32
740 case INDEX_op_ext8u_i32:
741 t0 = *tb_ptr++;
5e75150c 742 t1 = tci_read_r8(regs, &tb_ptr);
85bbbf70 743 tci_write_reg(regs, t0, t1);
7657f4bf
SW
744 break;
745#endif
746#if TCG_TARGET_HAS_ext16u_i32
747 case INDEX_op_ext16u_i32:
748 t0 = *tb_ptr++;
5e75150c 749 t1 = tci_read_r16(regs, &tb_ptr);
85bbbf70 750 tci_write_reg(regs, t0, t1);
7657f4bf
SW
751 break;
752#endif
753#if TCG_TARGET_HAS_bswap16_i32
754 case INDEX_op_bswap16_i32:
755 t0 = *tb_ptr++;
5e75150c 756 t1 = tci_read_r16(regs, &tb_ptr);
85bbbf70 757 tci_write_reg(regs, t0, bswap16(t1));
7657f4bf
SW
758 break;
759#endif
760#if TCG_TARGET_HAS_bswap32_i32
761 case INDEX_op_bswap32_i32:
762 t0 = *tb_ptr++;
5e75150c 763 t1 = tci_read_r32(regs, &tb_ptr);
85bbbf70 764 tci_write_reg(regs, t0, bswap32(t1));
7657f4bf
SW
765 break;
766#endif
767#if TCG_TARGET_HAS_not_i32
768 case INDEX_op_not_i32:
769 t0 = *tb_ptr++;
5e75150c 770 t1 = tci_read_r32(regs, &tb_ptr);
85bbbf70 771 tci_write_reg(regs, t0, ~t1);
7657f4bf
SW
772 break;
773#endif
774#if TCG_TARGET_HAS_neg_i32
775 case INDEX_op_neg_i32:
776 t0 = *tb_ptr++;
5e75150c 777 t1 = tci_read_r32(regs, &tb_ptr);
85bbbf70 778 tci_write_reg(regs, t0, -t1);
7657f4bf
SW
779 break;
780#endif
781#if TCG_TARGET_REG_BITS == 64
782 case INDEX_op_mov_i64:
783 t0 = *tb_ptr++;
5e75150c 784 t1 = tci_read_r64(regs, &tb_ptr);
5410e434 785 tci_write_reg(regs, t0, t1);
7657f4bf 786 break;
1bd1af98 787 case INDEX_op_tci_movi_i64:
7657f4bf
SW
788 t0 = *tb_ptr++;
789 t1 = tci_read_i64(&tb_ptr);
5410e434 790 tci_write_reg(regs, t0, t1);
7657f4bf
SW
791 break;
792
793 /* Load/store operations (64 bit). */
794
7657f4bf
SW
795 case INDEX_op_ld32s_i64:
796 t0 = *tb_ptr++;
5e75150c 797 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 798 t2 = tci_read_s32(&tb_ptr);
9592e897 799 tci_write_reg(regs, t0, *(int32_t *)(t1 + t2));
7657f4bf
SW
800 break;
801 case INDEX_op_ld_i64:
802 t0 = *tb_ptr++;
5e75150c 803 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 804 t2 = tci_read_s32(&tb_ptr);
5410e434 805 tci_write_reg(regs, t0, *(uint64_t *)(t1 + t2));
7657f4bf 806 break;
7657f4bf 807 case INDEX_op_st_i64:
5e75150c
EC
808 t0 = tci_read_r64(regs, &tb_ptr);
809 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 810 t2 = tci_read_s32(&tb_ptr);
7657f4bf
SW
811 *(uint64_t *)(t1 + t2) = t0;
812 break;
813
814 /* Arithmetic operations (64 bit). */
815
816 case INDEX_op_add_i64:
817 t0 = *tb_ptr++;
2f74f45e
RH
818 t1 = tci_read_r64(regs, &tb_ptr);
819 t2 = tci_read_r64(regs, &tb_ptr);
5410e434 820 tci_write_reg(regs, t0, t1 + t2);
7657f4bf
SW
821 break;
822 case INDEX_op_sub_i64:
823 t0 = *tb_ptr++;
2f74f45e
RH
824 t1 = tci_read_r64(regs, &tb_ptr);
825 t2 = tci_read_r64(regs, &tb_ptr);
5410e434 826 tci_write_reg(regs, t0, t1 - t2);
7657f4bf
SW
827 break;
828 case INDEX_op_mul_i64:
829 t0 = *tb_ptr++;
2f74f45e
RH
830 t1 = tci_read_r64(regs, &tb_ptr);
831 t2 = tci_read_r64(regs, &tb_ptr);
5410e434 832 tci_write_reg(regs, t0, t1 * t2);
7657f4bf 833 break;
7657f4bf 834 case INDEX_op_div_i64:
ae40c098 835 t0 = *tb_ptr++;
2f74f45e
RH
836 t1 = tci_read_r64(regs, &tb_ptr);
837 t2 = tci_read_r64(regs, &tb_ptr);
ae40c098
RH
838 tci_write_reg(regs, t0, (int64_t)t1 / (int64_t)t2);
839 break;
7657f4bf 840 case INDEX_op_divu_i64:
ae40c098 841 t0 = *tb_ptr++;
2f74f45e
RH
842 t1 = tci_read_r64(regs, &tb_ptr);
843 t2 = tci_read_r64(regs, &tb_ptr);
ae40c098
RH
844 tci_write_reg(regs, t0, (uint64_t)t1 / (uint64_t)t2);
845 break;
7657f4bf 846 case INDEX_op_rem_i64:
ae40c098 847 t0 = *tb_ptr++;
2f74f45e
RH
848 t1 = tci_read_r64(regs, &tb_ptr);
849 t2 = tci_read_r64(regs, &tb_ptr);
ae40c098
RH
850 tci_write_reg(regs, t0, (int64_t)t1 % (int64_t)t2);
851 break;
7657f4bf 852 case INDEX_op_remu_i64:
ae40c098 853 t0 = *tb_ptr++;
2f74f45e
RH
854 t1 = tci_read_r64(regs, &tb_ptr);
855 t2 = tci_read_r64(regs, &tb_ptr);
ae40c098 856 tci_write_reg(regs, t0, (uint64_t)t1 % (uint64_t)t2);
7657f4bf 857 break;
7657f4bf
SW
858 case INDEX_op_and_i64:
859 t0 = *tb_ptr++;
2f74f45e
RH
860 t1 = tci_read_r64(regs, &tb_ptr);
861 t2 = tci_read_r64(regs, &tb_ptr);
5410e434 862 tci_write_reg(regs, t0, t1 & t2);
7657f4bf
SW
863 break;
864 case INDEX_op_or_i64:
865 t0 = *tb_ptr++;
2f74f45e
RH
866 t1 = tci_read_r64(regs, &tb_ptr);
867 t2 = tci_read_r64(regs, &tb_ptr);
5410e434 868 tci_write_reg(regs, t0, t1 | t2);
7657f4bf
SW
869 break;
870 case INDEX_op_xor_i64:
871 t0 = *tb_ptr++;
2f74f45e
RH
872 t1 = tci_read_r64(regs, &tb_ptr);
873 t2 = tci_read_r64(regs, &tb_ptr);
5410e434 874 tci_write_reg(regs, t0, t1 ^ t2);
7657f4bf
SW
875 break;
876
877 /* Shift/rotate operations (64 bit). */
878
879 case INDEX_op_shl_i64:
880 t0 = *tb_ptr++;
2f74f45e
RH
881 t1 = tci_read_r64(regs, &tb_ptr);
882 t2 = tci_read_r64(regs, &tb_ptr);
5410e434 883 tci_write_reg(regs, t0, t1 << (t2 & 63));
7657f4bf
SW
884 break;
885 case INDEX_op_shr_i64:
886 t0 = *tb_ptr++;
2f74f45e
RH
887 t1 = tci_read_r64(regs, &tb_ptr);
888 t2 = tci_read_r64(regs, &tb_ptr);
5410e434 889 tci_write_reg(regs, t0, t1 >> (t2 & 63));
7657f4bf
SW
890 break;
891 case INDEX_op_sar_i64:
892 t0 = *tb_ptr++;
2f74f45e
RH
893 t1 = tci_read_r64(regs, &tb_ptr);
894 t2 = tci_read_r64(regs, &tb_ptr);
5410e434 895 tci_write_reg(regs, t0, ((int64_t)t1 >> (t2 & 63)));
7657f4bf
SW
896 break;
897#if TCG_TARGET_HAS_rot_i64
898 case INDEX_op_rotl_i64:
d285bf78 899 t0 = *tb_ptr++;
2f74f45e
RH
900 t1 = tci_read_r64(regs, &tb_ptr);
901 t2 = tci_read_r64(regs, &tb_ptr);
5410e434 902 tci_write_reg(regs, t0, rol64(t1, t2 & 63));
d285bf78 903 break;
7657f4bf 904 case INDEX_op_rotr_i64:
d285bf78 905 t0 = *tb_ptr++;
2f74f45e
RH
906 t1 = tci_read_r64(regs, &tb_ptr);
907 t2 = tci_read_r64(regs, &tb_ptr);
5410e434 908 tci_write_reg(regs, t0, ror64(t1, t2 & 63));
7657f4bf 909 break;
e24dc9fe
SW
910#endif
911#if TCG_TARGET_HAS_deposit_i64
912 case INDEX_op_deposit_i64:
913 t0 = *tb_ptr++;
5e75150c
EC
914 t1 = tci_read_r64(regs, &tb_ptr);
915 t2 = tci_read_r64(regs, &tb_ptr);
e24dc9fe
SW
916 tmp16 = *tb_ptr++;
917 tmp8 = *tb_ptr++;
918 tmp64 = (((1ULL << tmp8) - 1) << tmp16);
5410e434 919 tci_write_reg(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
e24dc9fe 920 break;
7657f4bf
SW
921#endif
922 case INDEX_op_brcond_i64:
5e75150c 923 t0 = tci_read_r64(regs, &tb_ptr);
2f74f45e 924 t1 = tci_read_r64(regs, &tb_ptr);
7657f4bf
SW
925 condition = *tb_ptr++;
926 label = tci_read_label(&tb_ptr);
927 if (tci_compare64(t0, t1, condition)) {
3ccdbecf 928 tci_assert(tb_ptr == old_code_ptr + op_size);
7657f4bf
SW
929 tb_ptr = (uint8_t *)label;
930 continue;
931 }
932 break;
933#if TCG_TARGET_HAS_ext8u_i64
934 case INDEX_op_ext8u_i64:
935 t0 = *tb_ptr++;
5e75150c 936 t1 = tci_read_r8(regs, &tb_ptr);
5410e434 937 tci_write_reg(regs, t0, t1);
7657f4bf
SW
938 break;
939#endif
940#if TCG_TARGET_HAS_ext8s_i64
941 case INDEX_op_ext8s_i64:
942 t0 = *tb_ptr++;
5e75150c 943 t1 = tci_read_r8s(regs, &tb_ptr);
5410e434 944 tci_write_reg(regs, t0, t1);
7657f4bf
SW
945 break;
946#endif
947#if TCG_TARGET_HAS_ext16s_i64
948 case INDEX_op_ext16s_i64:
949 t0 = *tb_ptr++;
5e75150c 950 t1 = tci_read_r16s(regs, &tb_ptr);
5410e434 951 tci_write_reg(regs, t0, t1);
7657f4bf
SW
952 break;
953#endif
954#if TCG_TARGET_HAS_ext16u_i64
955 case INDEX_op_ext16u_i64:
956 t0 = *tb_ptr++;
5e75150c 957 t1 = tci_read_r16(regs, &tb_ptr);
5410e434 958 tci_write_reg(regs, t0, t1);
7657f4bf
SW
959 break;
960#endif
961#if TCG_TARGET_HAS_ext32s_i64
962 case INDEX_op_ext32s_i64:
4f2331e5
AJ
963#endif
964 case INDEX_op_ext_i32_i64:
7657f4bf 965 t0 = *tb_ptr++;
5e75150c 966 t1 = tci_read_r32s(regs, &tb_ptr);
5410e434 967 tci_write_reg(regs, t0, t1);
7657f4bf 968 break;
7657f4bf
SW
969#if TCG_TARGET_HAS_ext32u_i64
970 case INDEX_op_ext32u_i64:
4f2331e5
AJ
971#endif
972 case INDEX_op_extu_i32_i64:
7657f4bf 973 t0 = *tb_ptr++;
5e75150c 974 t1 = tci_read_r32(regs, &tb_ptr);
5410e434 975 tci_write_reg(regs, t0, t1);
7657f4bf 976 break;
7657f4bf
SW
977#if TCG_TARGET_HAS_bswap16_i64
978 case INDEX_op_bswap16_i64:
7657f4bf 979 t0 = *tb_ptr++;
5e75150c 980 t1 = tci_read_r16(regs, &tb_ptr);
5410e434 981 tci_write_reg(regs, t0, bswap16(t1));
7657f4bf
SW
982 break;
983#endif
984#if TCG_TARGET_HAS_bswap32_i64
985 case INDEX_op_bswap32_i64:
986 t0 = *tb_ptr++;
5e75150c 987 t1 = tci_read_r32(regs, &tb_ptr);
5410e434 988 tci_write_reg(regs, t0, bswap32(t1));
7657f4bf
SW
989 break;
990#endif
991#if TCG_TARGET_HAS_bswap64_i64
992 case INDEX_op_bswap64_i64:
7657f4bf 993 t0 = *tb_ptr++;
5e75150c 994 t1 = tci_read_r64(regs, &tb_ptr);
5410e434 995 tci_write_reg(regs, t0, bswap64(t1));
7657f4bf
SW
996 break;
997#endif
998#if TCG_TARGET_HAS_not_i64
999 case INDEX_op_not_i64:
1000 t0 = *tb_ptr++;
5e75150c 1001 t1 = tci_read_r64(regs, &tb_ptr);
5410e434 1002 tci_write_reg(regs, t0, ~t1);
7657f4bf
SW
1003 break;
1004#endif
1005#if TCG_TARGET_HAS_neg_i64
1006 case INDEX_op_neg_i64:
1007 t0 = *tb_ptr++;
5e75150c 1008 t1 = tci_read_r64(regs, &tb_ptr);
5410e434 1009 tci_write_reg(regs, t0, -t1);
7657f4bf
SW
1010 break;
1011#endif
1012#endif /* TCG_TARGET_REG_BITS == 64 */
1013
1014 /* QEMU specific operations. */
1015
7657f4bf 1016 case INDEX_op_exit_tb:
819af24b 1017 ret = *(uint64_t *)tb_ptr;
7657f4bf
SW
1018 goto exit;
1019 break;
1020 case INDEX_op_goto_tb:
76442a93
SF
1021 /* Jump address is aligned */
1022 tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4);
d73415a3 1023 t0 = qatomic_read((int32_t *)tb_ptr);
76442a93 1024 tb_ptr += sizeof(int32_t);
3ccdbecf 1025 tci_assert(tb_ptr == old_code_ptr + op_size);
7657f4bf
SW
1026 tb_ptr += (int32_t)t0;
1027 continue;
76782fab 1028 case INDEX_op_qemu_ld_i32:
7657f4bf 1029 t0 = *tb_ptr++;
5e75150c 1030 taddr = tci_read_ulong(regs, &tb_ptr);
59227d5d 1031 oi = tci_read_i(&tb_ptr);
2b7ec66f 1032 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
76782fab
RH
1033 case MO_UB:
1034 tmp32 = qemu_ld_ub;
1035 break;
1036 case MO_SB:
1037 tmp32 = (int8_t)qemu_ld_ub;
1038 break;
1039 case MO_LEUW:
1040 tmp32 = qemu_ld_leuw;
1041 break;
1042 case MO_LESW:
1043 tmp32 = (int16_t)qemu_ld_leuw;
1044 break;
1045 case MO_LEUL:
1046 tmp32 = qemu_ld_leul;
1047 break;
1048 case MO_BEUW:
1049 tmp32 = qemu_ld_beuw;
1050 break;
1051 case MO_BESW:
1052 tmp32 = (int16_t)qemu_ld_beuw;
1053 break;
1054 case MO_BEUL:
1055 tmp32 = qemu_ld_beul;
1056 break;
1057 default:
f6996f99 1058 g_assert_not_reached();
76782fab 1059 }
5e75150c 1060 tci_write_reg(regs, t0, tmp32);
7657f4bf 1061 break;
76782fab 1062 case INDEX_op_qemu_ld_i64:
7657f4bf 1063 t0 = *tb_ptr++;
76782fab
RH
1064 if (TCG_TARGET_REG_BITS == 32) {
1065 t1 = *tb_ptr++;
1066 }
5e75150c 1067 taddr = tci_read_ulong(regs, &tb_ptr);
59227d5d 1068 oi = tci_read_i(&tb_ptr);
2b7ec66f 1069 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
76782fab
RH
1070 case MO_UB:
1071 tmp64 = qemu_ld_ub;
1072 break;
1073 case MO_SB:
1074 tmp64 = (int8_t)qemu_ld_ub;
1075 break;
1076 case MO_LEUW:
1077 tmp64 = qemu_ld_leuw;
1078 break;
1079 case MO_LESW:
1080 tmp64 = (int16_t)qemu_ld_leuw;
1081 break;
1082 case MO_LEUL:
1083 tmp64 = qemu_ld_leul;
1084 break;
1085 case MO_LESL:
1086 tmp64 = (int32_t)qemu_ld_leul;
1087 break;
1088 case MO_LEQ:
1089 tmp64 = qemu_ld_leq;
1090 break;
1091 case MO_BEUW:
1092 tmp64 = qemu_ld_beuw;
1093 break;
1094 case MO_BESW:
1095 tmp64 = (int16_t)qemu_ld_beuw;
1096 break;
1097 case MO_BEUL:
1098 tmp64 = qemu_ld_beul;
1099 break;
1100 case MO_BESL:
1101 tmp64 = (int32_t)qemu_ld_beul;
1102 break;
1103 case MO_BEQ:
1104 tmp64 = qemu_ld_beq;
1105 break;
1106 default:
f6996f99 1107 g_assert_not_reached();
76782fab 1108 }
5e75150c 1109 tci_write_reg(regs, t0, tmp64);
76782fab 1110 if (TCG_TARGET_REG_BITS == 32) {
5e75150c 1111 tci_write_reg(regs, t1, tmp64 >> 32);
76782fab 1112 }
7657f4bf 1113 break;
76782fab 1114 case INDEX_op_qemu_st_i32:
5e75150c
EC
1115 t0 = tci_read_r(regs, &tb_ptr);
1116 taddr = tci_read_ulong(regs, &tb_ptr);
59227d5d 1117 oi = tci_read_i(&tb_ptr);
2b7ec66f 1118 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
76782fab
RH
1119 case MO_UB:
1120 qemu_st_b(t0);
1121 break;
1122 case MO_LEUW:
1123 qemu_st_lew(t0);
1124 break;
1125 case MO_LEUL:
1126 qemu_st_lel(t0);
1127 break;
1128 case MO_BEUW:
1129 qemu_st_bew(t0);
1130 break;
1131 case MO_BEUL:
1132 qemu_st_bel(t0);
1133 break;
1134 default:
f6996f99 1135 g_assert_not_reached();
76782fab 1136 }
7657f4bf 1137 break;
76782fab 1138 case INDEX_op_qemu_st_i64:
5e75150c
EC
1139 tmp64 = tci_read_r64(regs, &tb_ptr);
1140 taddr = tci_read_ulong(regs, &tb_ptr);
59227d5d 1141 oi = tci_read_i(&tb_ptr);
2b7ec66f 1142 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
76782fab
RH
1143 case MO_UB:
1144 qemu_st_b(tmp64);
1145 break;
1146 case MO_LEUW:
1147 qemu_st_lew(tmp64);
1148 break;
1149 case MO_LEUL:
1150 qemu_st_lel(tmp64);
1151 break;
1152 case MO_LEQ:
1153 qemu_st_leq(tmp64);
1154 break;
1155 case MO_BEUW:
1156 qemu_st_bew(tmp64);
1157 break;
1158 case MO_BEUL:
1159 qemu_st_bel(tmp64);
1160 break;
1161 case MO_BEQ:
1162 qemu_st_beq(tmp64);
1163 break;
1164 default:
f6996f99 1165 g_assert_not_reached();
76782fab 1166 }
7657f4bf 1167 break;
a1e69e2f
PK
1168 case INDEX_op_mb:
1169 /* Ensure ordering for all kinds */
1170 smp_mb();
1171 break;
7657f4bf 1172 default:
f6996f99 1173 g_assert_not_reached();
7657f4bf 1174 }
3ccdbecf 1175 tci_assert(tb_ptr == old_code_ptr + op_size);
7657f4bf
SW
1176 }
1177exit:
819af24b 1178 return ret;
7657f4bf 1179}