]>
Commit | Line | Data |
---|---|---|
7657f4bf SW |
1 | /* |
2 | * Tiny Code Interpreter for QEMU | |
3 | * | |
3ccdbecf | 4 | * Copyright (c) 2009, 2011, 2016 Stefan Weil |
7657f4bf SW |
5 | * |
6 | * This program is free software: you can redistribute it and/or modify | |
7 | * it under the terms of the GNU General Public License as published by | |
8 | * the Free Software Foundation, either version 2 of the License, or | |
9 | * (at your option) any later version. | |
10 | * | |
11 | * This program is distributed in the hope that it will be useful, | |
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | * GNU General Public License for more details. | |
15 | * | |
16 | * You should have received a copy of the GNU General Public License | |
17 | * along with this program. If not, see <http://www.gnu.org/licenses/>. | |
18 | */ | |
19 | ||
d38ea87a | 20 | #include "qemu/osdep.h" |
7657f4bf | 21 | |
3ccdbecf SW |
22 | /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined). |
23 | * Without assertions, the interpreter runs much faster. */ | |
24 | #if defined(CONFIG_DEBUG_TCG) | |
25 | # define tci_assert(cond) assert(cond) | |
26 | #else | |
27 | # define tci_assert(cond) ((void)0) | |
7657f4bf SW |
28 | #endif |
29 | ||
30 | #include "qemu-common.h" | |
022c62cb | 31 | #include "exec/exec-all.h" /* MAX_OPC_PARAM_IARGS */ |
f08b6170 | 32 | #include "exec/cpu_ldst.h" |
7657f4bf SW |
33 | #include "tcg-op.h" |
34 | ||
35 | /* Marker for missing code. */ | |
36 | #define TODO() \ | |
37 | do { \ | |
38 | fprintf(stderr, "TODO %s:%u: %s()\n", \ | |
39 | __FILE__, __LINE__, __func__); \ | |
40 | tcg_abort(); \ | |
41 | } while (0) | |
42 | ||
6673f47d | 43 | #if MAX_OPC_PARAM_IARGS != 5 |
7657f4bf SW |
44 | # error Fix needed, number of supported input arguments changed! |
45 | #endif | |
46 | #if TCG_TARGET_REG_BITS == 32 | |
47 | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, | |
6673f47d | 48 | tcg_target_ulong, tcg_target_ulong, |
7657f4bf SW |
49 | tcg_target_ulong, tcg_target_ulong, |
50 | tcg_target_ulong, tcg_target_ulong, | |
51 | tcg_target_ulong, tcg_target_ulong); | |
52 | #else | |
53 | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, | |
6673f47d SW |
54 | tcg_target_ulong, tcg_target_ulong, |
55 | tcg_target_ulong); | |
7657f4bf SW |
56 | #endif |
57 | ||
7657f4bf SW |
58 | static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS]; |
59 | ||
771142c2 | 60 | static tcg_target_ulong tci_read_reg(TCGReg index) |
7657f4bf | 61 | { |
3ccdbecf | 62 | tci_assert(index < ARRAY_SIZE(tci_reg)); |
7657f4bf SW |
63 | return tci_reg[index]; |
64 | } | |
65 | ||
66 | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 | |
771142c2 | 67 | static int8_t tci_read_reg8s(TCGReg index) |
7657f4bf SW |
68 | { |
69 | return (int8_t)tci_read_reg(index); | |
70 | } | |
71 | #endif | |
72 | ||
73 | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 | |
771142c2 | 74 | static int16_t tci_read_reg16s(TCGReg index) |
7657f4bf SW |
75 | { |
76 | return (int16_t)tci_read_reg(index); | |
77 | } | |
78 | #endif | |
79 | ||
80 | #if TCG_TARGET_REG_BITS == 64 | |
771142c2 | 81 | static int32_t tci_read_reg32s(TCGReg index) |
7657f4bf SW |
82 | { |
83 | return (int32_t)tci_read_reg(index); | |
84 | } | |
85 | #endif | |
86 | ||
771142c2 | 87 | static uint8_t tci_read_reg8(TCGReg index) |
7657f4bf SW |
88 | { |
89 | return (uint8_t)tci_read_reg(index); | |
90 | } | |
91 | ||
771142c2 | 92 | static uint16_t tci_read_reg16(TCGReg index) |
7657f4bf SW |
93 | { |
94 | return (uint16_t)tci_read_reg(index); | |
95 | } | |
96 | ||
771142c2 | 97 | static uint32_t tci_read_reg32(TCGReg index) |
7657f4bf SW |
98 | { |
99 | return (uint32_t)tci_read_reg(index); | |
100 | } | |
101 | ||
102 | #if TCG_TARGET_REG_BITS == 64 | |
771142c2 | 103 | static uint64_t tci_read_reg64(TCGReg index) |
7657f4bf SW |
104 | { |
105 | return tci_read_reg(index); | |
106 | } | |
107 | #endif | |
108 | ||
771142c2 | 109 | static void tci_write_reg(TCGReg index, tcg_target_ulong value) |
7657f4bf | 110 | { |
3ccdbecf SW |
111 | tci_assert(index < ARRAY_SIZE(tci_reg)); |
112 | tci_assert(index != TCG_AREG0); | |
113 | tci_assert(index != TCG_REG_CALL_STACK); | |
7657f4bf SW |
114 | tci_reg[index] = value; |
115 | } | |
116 | ||
7657f4bf | 117 | #if TCG_TARGET_REG_BITS == 64 |
771142c2 | 118 | static void tci_write_reg32s(TCGReg index, int32_t value) |
7657f4bf SW |
119 | { |
120 | tci_write_reg(index, value); | |
121 | } | |
122 | #endif | |
123 | ||
771142c2 | 124 | static void tci_write_reg8(TCGReg index, uint8_t value) |
7657f4bf SW |
125 | { |
126 | tci_write_reg(index, value); | |
127 | } | |
128 | ||
771142c2 | 129 | static void tci_write_reg32(TCGReg index, uint32_t value) |
7657f4bf SW |
130 | { |
131 | tci_write_reg(index, value); | |
132 | } | |
133 | ||
134 | #if TCG_TARGET_REG_BITS == 32 | |
135 | static void tci_write_reg64(uint32_t high_index, uint32_t low_index, | |
136 | uint64_t value) | |
137 | { | |
138 | tci_write_reg(low_index, value); | |
139 | tci_write_reg(high_index, value >> 32); | |
140 | } | |
141 | #elif TCG_TARGET_REG_BITS == 64 | |
771142c2 | 142 | static void tci_write_reg64(TCGReg index, uint64_t value) |
7657f4bf SW |
143 | { |
144 | tci_write_reg(index, value); | |
145 | } | |
146 | #endif | |
147 | ||
148 | #if TCG_TARGET_REG_BITS == 32 | |
149 | /* Create a 64 bit value from two 32 bit values. */ | |
150 | static uint64_t tci_uint64(uint32_t high, uint32_t low) | |
151 | { | |
152 | return ((uint64_t)high << 32) + low; | |
153 | } | |
154 | #endif | |
155 | ||
156 | /* Read constant (native size) from bytecode. */ | |
157 | static tcg_target_ulong tci_read_i(uint8_t **tb_ptr) | |
158 | { | |
159 | tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr); | |
160 | *tb_ptr += sizeof(value); | |
161 | return value; | |
162 | } | |
163 | ||
03fc0548 | 164 | /* Read unsigned constant (32 bit) from bytecode. */ |
7657f4bf SW |
165 | static uint32_t tci_read_i32(uint8_t **tb_ptr) |
166 | { | |
167 | uint32_t value = *(uint32_t *)(*tb_ptr); | |
168 | *tb_ptr += sizeof(value); | |
169 | return value; | |
170 | } | |
171 | ||
03fc0548 RH |
172 | /* Read signed constant (32 bit) from bytecode. */ |
173 | static int32_t tci_read_s32(uint8_t **tb_ptr) | |
174 | { | |
175 | int32_t value = *(int32_t *)(*tb_ptr); | |
176 | *tb_ptr += sizeof(value); | |
177 | return value; | |
178 | } | |
179 | ||
7657f4bf SW |
180 | #if TCG_TARGET_REG_BITS == 64 |
181 | /* Read constant (64 bit) from bytecode. */ | |
182 | static uint64_t tci_read_i64(uint8_t **tb_ptr) | |
183 | { | |
184 | uint64_t value = *(uint64_t *)(*tb_ptr); | |
185 | *tb_ptr += sizeof(value); | |
186 | return value; | |
187 | } | |
188 | #endif | |
189 | ||
190 | /* Read indexed register (native size) from bytecode. */ | |
191 | static tcg_target_ulong tci_read_r(uint8_t **tb_ptr) | |
192 | { | |
193 | tcg_target_ulong value = tci_read_reg(**tb_ptr); | |
194 | *tb_ptr += 1; | |
195 | return value; | |
196 | } | |
197 | ||
198 | /* Read indexed register (8 bit) from bytecode. */ | |
199 | static uint8_t tci_read_r8(uint8_t **tb_ptr) | |
200 | { | |
201 | uint8_t value = tci_read_reg8(**tb_ptr); | |
202 | *tb_ptr += 1; | |
203 | return value; | |
204 | } | |
205 | ||
206 | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 | |
207 | /* Read indexed register (8 bit signed) from bytecode. */ | |
208 | static int8_t tci_read_r8s(uint8_t **tb_ptr) | |
209 | { | |
210 | int8_t value = tci_read_reg8s(**tb_ptr); | |
211 | *tb_ptr += 1; | |
212 | return value; | |
213 | } | |
214 | #endif | |
215 | ||
216 | /* Read indexed register (16 bit) from bytecode. */ | |
217 | static uint16_t tci_read_r16(uint8_t **tb_ptr) | |
218 | { | |
219 | uint16_t value = tci_read_reg16(**tb_ptr); | |
220 | *tb_ptr += 1; | |
221 | return value; | |
222 | } | |
223 | ||
224 | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 | |
225 | /* Read indexed register (16 bit signed) from bytecode. */ | |
226 | static int16_t tci_read_r16s(uint8_t **tb_ptr) | |
227 | { | |
228 | int16_t value = tci_read_reg16s(**tb_ptr); | |
229 | *tb_ptr += 1; | |
230 | return value; | |
231 | } | |
232 | #endif | |
233 | ||
234 | /* Read indexed register (32 bit) from bytecode. */ | |
235 | static uint32_t tci_read_r32(uint8_t **tb_ptr) | |
236 | { | |
237 | uint32_t value = tci_read_reg32(**tb_ptr); | |
238 | *tb_ptr += 1; | |
239 | return value; | |
240 | } | |
241 | ||
242 | #if TCG_TARGET_REG_BITS == 32 | |
243 | /* Read two indexed registers (2 * 32 bit) from bytecode. */ | |
244 | static uint64_t tci_read_r64(uint8_t **tb_ptr) | |
245 | { | |
246 | uint32_t low = tci_read_r32(tb_ptr); | |
247 | return tci_uint64(tci_read_r32(tb_ptr), low); | |
248 | } | |
249 | #elif TCG_TARGET_REG_BITS == 64 | |
250 | /* Read indexed register (32 bit signed) from bytecode. */ | |
251 | static int32_t tci_read_r32s(uint8_t **tb_ptr) | |
252 | { | |
253 | int32_t value = tci_read_reg32s(**tb_ptr); | |
254 | *tb_ptr += 1; | |
255 | return value; | |
256 | } | |
257 | ||
258 | /* Read indexed register (64 bit) from bytecode. */ | |
259 | static uint64_t tci_read_r64(uint8_t **tb_ptr) | |
260 | { | |
261 | uint64_t value = tci_read_reg64(**tb_ptr); | |
262 | *tb_ptr += 1; | |
263 | return value; | |
264 | } | |
265 | #endif | |
266 | ||
267 | /* Read indexed register(s) with target address from bytecode. */ | |
268 | static target_ulong tci_read_ulong(uint8_t **tb_ptr) | |
269 | { | |
270 | target_ulong taddr = tci_read_r(tb_ptr); | |
271 | #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS | |
272 | taddr += (uint64_t)tci_read_r(tb_ptr) << 32; | |
273 | #endif | |
274 | return taddr; | |
275 | } | |
276 | ||
277 | /* Read indexed register or constant (native size) from bytecode. */ | |
278 | static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr) | |
279 | { | |
280 | tcg_target_ulong value; | |
771142c2 | 281 | TCGReg r = **tb_ptr; |
7657f4bf SW |
282 | *tb_ptr += 1; |
283 | if (r == TCG_CONST) { | |
284 | value = tci_read_i(tb_ptr); | |
285 | } else { | |
286 | value = tci_read_reg(r); | |
287 | } | |
288 | return value; | |
289 | } | |
290 | ||
291 | /* Read indexed register or constant (32 bit) from bytecode. */ | |
292 | static uint32_t tci_read_ri32(uint8_t **tb_ptr) | |
293 | { | |
294 | uint32_t value; | |
771142c2 | 295 | TCGReg r = **tb_ptr; |
7657f4bf SW |
296 | *tb_ptr += 1; |
297 | if (r == TCG_CONST) { | |
298 | value = tci_read_i32(tb_ptr); | |
299 | } else { | |
300 | value = tci_read_reg32(r); | |
301 | } | |
302 | return value; | |
303 | } | |
304 | ||
305 | #if TCG_TARGET_REG_BITS == 32 | |
306 | /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */ | |
307 | static uint64_t tci_read_ri64(uint8_t **tb_ptr) | |
308 | { | |
309 | uint32_t low = tci_read_ri32(tb_ptr); | |
310 | return tci_uint64(tci_read_ri32(tb_ptr), low); | |
311 | } | |
312 | #elif TCG_TARGET_REG_BITS == 64 | |
313 | /* Read indexed register or constant (64 bit) from bytecode. */ | |
314 | static uint64_t tci_read_ri64(uint8_t **tb_ptr) | |
315 | { | |
316 | uint64_t value; | |
771142c2 | 317 | TCGReg r = **tb_ptr; |
7657f4bf SW |
318 | *tb_ptr += 1; |
319 | if (r == TCG_CONST) { | |
320 | value = tci_read_i64(tb_ptr); | |
321 | } else { | |
322 | value = tci_read_reg64(r); | |
323 | } | |
324 | return value; | |
325 | } | |
326 | #endif | |
327 | ||
c6c5063c | 328 | static tcg_target_ulong tci_read_label(uint8_t **tb_ptr) |
7657f4bf | 329 | { |
c6c5063c | 330 | tcg_target_ulong label = tci_read_i(tb_ptr); |
3ccdbecf | 331 | tci_assert(label != 0); |
7657f4bf SW |
332 | return label; |
333 | } | |
334 | ||
335 | static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition) | |
336 | { | |
337 | bool result = false; | |
338 | int32_t i0 = u0; | |
339 | int32_t i1 = u1; | |
340 | switch (condition) { | |
341 | case TCG_COND_EQ: | |
342 | result = (u0 == u1); | |
343 | break; | |
344 | case TCG_COND_NE: | |
345 | result = (u0 != u1); | |
346 | break; | |
347 | case TCG_COND_LT: | |
348 | result = (i0 < i1); | |
349 | break; | |
350 | case TCG_COND_GE: | |
351 | result = (i0 >= i1); | |
352 | break; | |
353 | case TCG_COND_LE: | |
354 | result = (i0 <= i1); | |
355 | break; | |
356 | case TCG_COND_GT: | |
357 | result = (i0 > i1); | |
358 | break; | |
359 | case TCG_COND_LTU: | |
360 | result = (u0 < u1); | |
361 | break; | |
362 | case TCG_COND_GEU: | |
363 | result = (u0 >= u1); | |
364 | break; | |
365 | case TCG_COND_LEU: | |
366 | result = (u0 <= u1); | |
367 | break; | |
368 | case TCG_COND_GTU: | |
369 | result = (u0 > u1); | |
370 | break; | |
371 | default: | |
372 | TODO(); | |
373 | } | |
374 | return result; | |
375 | } | |
376 | ||
377 | static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition) | |
378 | { | |
379 | bool result = false; | |
380 | int64_t i0 = u0; | |
381 | int64_t i1 = u1; | |
382 | switch (condition) { | |
383 | case TCG_COND_EQ: | |
384 | result = (u0 == u1); | |
385 | break; | |
386 | case TCG_COND_NE: | |
387 | result = (u0 != u1); | |
388 | break; | |
389 | case TCG_COND_LT: | |
390 | result = (i0 < i1); | |
391 | break; | |
392 | case TCG_COND_GE: | |
393 | result = (i0 >= i1); | |
394 | break; | |
395 | case TCG_COND_LE: | |
396 | result = (i0 <= i1); | |
397 | break; | |
398 | case TCG_COND_GT: | |
399 | result = (i0 > i1); | |
400 | break; | |
401 | case TCG_COND_LTU: | |
402 | result = (u0 < u1); | |
403 | break; | |
404 | case TCG_COND_GEU: | |
405 | result = (u0 >= u1); | |
406 | break; | |
407 | case TCG_COND_LEU: | |
408 | result = (u0 <= u1); | |
409 | break; | |
410 | case TCG_COND_GTU: | |
411 | result = (u0 > u1); | |
412 | break; | |
413 | default: | |
414 | TODO(); | |
415 | } | |
416 | return result; | |
417 | } | |
418 | ||
76782fab | 419 | #ifdef CONFIG_SOFTMMU |
76782fab | 420 | # define qemu_ld_ub \ |
3972ef6f | 421 | helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 422 | # define qemu_ld_leuw \ |
3972ef6f | 423 | helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 424 | # define qemu_ld_leul \ |
3972ef6f | 425 | helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 426 | # define qemu_ld_leq \ |
3972ef6f | 427 | helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 428 | # define qemu_ld_beuw \ |
3972ef6f | 429 | helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 430 | # define qemu_ld_beul \ |
3972ef6f | 431 | helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 432 | # define qemu_ld_beq \ |
3972ef6f | 433 | helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 434 | # define qemu_st_b(X) \ |
3972ef6f | 435 | helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 436 | # define qemu_st_lew(X) \ |
3972ef6f | 437 | helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 438 | # define qemu_st_lel(X) \ |
3972ef6f | 439 | helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 440 | # define qemu_st_leq(X) \ |
3972ef6f | 441 | helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 442 | # define qemu_st_bew(X) \ |
3972ef6f | 443 | helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 444 | # define qemu_st_bel(X) \ |
3972ef6f | 445 | helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 446 | # define qemu_st_beq(X) \ |
3972ef6f | 447 | helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab RH |
448 | #else |
449 | # define qemu_ld_ub ldub_p(g2h(taddr)) | |
450 | # define qemu_ld_leuw lduw_le_p(g2h(taddr)) | |
451 | # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr)) | |
452 | # define qemu_ld_leq ldq_le_p(g2h(taddr)) | |
453 | # define qemu_ld_beuw lduw_be_p(g2h(taddr)) | |
454 | # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr)) | |
455 | # define qemu_ld_beq ldq_be_p(g2h(taddr)) | |
456 | # define qemu_st_b(X) stb_p(g2h(taddr), X) | |
457 | # define qemu_st_lew(X) stw_le_p(g2h(taddr), X) | |
458 | # define qemu_st_lel(X) stl_le_p(g2h(taddr), X) | |
459 | # define qemu_st_leq(X) stq_le_p(g2h(taddr), X) | |
460 | # define qemu_st_bew(X) stw_be_p(g2h(taddr), X) | |
461 | # define qemu_st_bel(X) stl_be_p(g2h(taddr), X) | |
462 | # define qemu_st_beq(X) stq_be_p(g2h(taddr), X) | |
463 | #endif | |
464 | ||
7657f4bf | 465 | /* Interpret pseudo code in tb. */ |
04d5a1da | 466 | uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr) |
7657f4bf | 467 | { |
ee79c356 RH |
468 | long tcg_temps[CPU_TEMP_BUF_NLONGS]; |
469 | uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS); | |
04d5a1da | 470 | uintptr_t next_tb = 0; |
7657f4bf | 471 | |
7657f4bf | 472 | tci_reg[TCG_AREG0] = (tcg_target_ulong)env; |
ee79c356 | 473 | tci_reg[TCG_REG_CALL_STACK] = sp_value; |
3ccdbecf | 474 | tci_assert(tb_ptr); |
7657f4bf SW |
475 | |
476 | for (;;) { | |
7657f4bf | 477 | TCGOpcode opc = tb_ptr[0]; |
3ccdbecf | 478 | #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG) |
7657f4bf SW |
479 | uint8_t op_size = tb_ptr[1]; |
480 | uint8_t *old_code_ptr = tb_ptr; | |
481 | #endif | |
482 | tcg_target_ulong t0; | |
483 | tcg_target_ulong t1; | |
484 | tcg_target_ulong t2; | |
485 | tcg_target_ulong label; | |
486 | TCGCond condition; | |
487 | target_ulong taddr; | |
7657f4bf SW |
488 | uint8_t tmp8; |
489 | uint16_t tmp16; | |
490 | uint32_t tmp32; | |
491 | uint64_t tmp64; | |
492 | #if TCG_TARGET_REG_BITS == 32 | |
493 | uint64_t v64; | |
494 | #endif | |
59227d5d | 495 | TCGMemOpIdx oi; |
7657f4bf | 496 | |
dea8fde8 RH |
497 | #if defined(GETPC) |
498 | tci_tb_ptr = (uintptr_t)tb_ptr; | |
499 | #endif | |
500 | ||
7657f4bf SW |
501 | /* Skip opcode and size entry. */ |
502 | tb_ptr += 2; | |
503 | ||
504 | switch (opc) { | |
7657f4bf SW |
505 | case INDEX_op_call: |
506 | t0 = tci_read_ri(&tb_ptr); | |
507 | #if TCG_TARGET_REG_BITS == 32 | |
508 | tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0), | |
509 | tci_read_reg(TCG_REG_R1), | |
510 | tci_read_reg(TCG_REG_R2), | |
511 | tci_read_reg(TCG_REG_R3), | |
512 | tci_read_reg(TCG_REG_R5), | |
513 | tci_read_reg(TCG_REG_R6), | |
514 | tci_read_reg(TCG_REG_R7), | |
6673f47d SW |
515 | tci_read_reg(TCG_REG_R8), |
516 | tci_read_reg(TCG_REG_R9), | |
517 | tci_read_reg(TCG_REG_R10)); | |
7657f4bf SW |
518 | tci_write_reg(TCG_REG_R0, tmp64); |
519 | tci_write_reg(TCG_REG_R1, tmp64 >> 32); | |
520 | #else | |
521 | tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0), | |
522 | tci_read_reg(TCG_REG_R1), | |
523 | tci_read_reg(TCG_REG_R2), | |
6673f47d SW |
524 | tci_read_reg(TCG_REG_R3), |
525 | tci_read_reg(TCG_REG_R5)); | |
7657f4bf SW |
526 | tci_write_reg(TCG_REG_R0, tmp64); |
527 | #endif | |
528 | break; | |
7657f4bf SW |
529 | case INDEX_op_br: |
530 | label = tci_read_label(&tb_ptr); | |
3ccdbecf | 531 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
532 | tb_ptr = (uint8_t *)label; |
533 | continue; | |
534 | case INDEX_op_setcond_i32: | |
535 | t0 = *tb_ptr++; | |
536 | t1 = tci_read_r32(&tb_ptr); | |
537 | t2 = tci_read_ri32(&tb_ptr); | |
538 | condition = *tb_ptr++; | |
539 | tci_write_reg32(t0, tci_compare32(t1, t2, condition)); | |
540 | break; | |
541 | #if TCG_TARGET_REG_BITS == 32 | |
542 | case INDEX_op_setcond2_i32: | |
543 | t0 = *tb_ptr++; | |
544 | tmp64 = tci_read_r64(&tb_ptr); | |
545 | v64 = tci_read_ri64(&tb_ptr); | |
546 | condition = *tb_ptr++; | |
547 | tci_write_reg32(t0, tci_compare64(tmp64, v64, condition)); | |
548 | break; | |
549 | #elif TCG_TARGET_REG_BITS == 64 | |
550 | case INDEX_op_setcond_i64: | |
551 | t0 = *tb_ptr++; | |
552 | t1 = tci_read_r64(&tb_ptr); | |
553 | t2 = tci_read_ri64(&tb_ptr); | |
554 | condition = *tb_ptr++; | |
555 | tci_write_reg64(t0, tci_compare64(t1, t2, condition)); | |
556 | break; | |
557 | #endif | |
558 | case INDEX_op_mov_i32: | |
559 | t0 = *tb_ptr++; | |
560 | t1 = tci_read_r32(&tb_ptr); | |
561 | tci_write_reg32(t0, t1); | |
562 | break; | |
563 | case INDEX_op_movi_i32: | |
564 | t0 = *tb_ptr++; | |
565 | t1 = tci_read_i32(&tb_ptr); | |
566 | tci_write_reg32(t0, t1); | |
567 | break; | |
568 | ||
569 | /* Load/store operations (32 bit). */ | |
570 | ||
571 | case INDEX_op_ld8u_i32: | |
572 | t0 = *tb_ptr++; | |
573 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 574 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
575 | tci_write_reg8(t0, *(uint8_t *)(t1 + t2)); |
576 | break; | |
577 | case INDEX_op_ld8s_i32: | |
578 | case INDEX_op_ld16u_i32: | |
579 | TODO(); | |
580 | break; | |
581 | case INDEX_op_ld16s_i32: | |
582 | TODO(); | |
583 | break; | |
584 | case INDEX_op_ld_i32: | |
585 | t0 = *tb_ptr++; | |
586 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 587 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
588 | tci_write_reg32(t0, *(uint32_t *)(t1 + t2)); |
589 | break; | |
590 | case INDEX_op_st8_i32: | |
591 | t0 = tci_read_r8(&tb_ptr); | |
592 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 593 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
594 | *(uint8_t *)(t1 + t2) = t0; |
595 | break; | |
596 | case INDEX_op_st16_i32: | |
597 | t0 = tci_read_r16(&tb_ptr); | |
598 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 599 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
600 | *(uint16_t *)(t1 + t2) = t0; |
601 | break; | |
602 | case INDEX_op_st_i32: | |
603 | t0 = tci_read_r32(&tb_ptr); | |
604 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 605 | t2 = tci_read_s32(&tb_ptr); |
3ccdbecf | 606 | tci_assert(t1 != sp_value || (int32_t)t2 < 0); |
7657f4bf SW |
607 | *(uint32_t *)(t1 + t2) = t0; |
608 | break; | |
609 | ||
610 | /* Arithmetic operations (32 bit). */ | |
611 | ||
612 | case INDEX_op_add_i32: | |
613 | t0 = *tb_ptr++; | |
614 | t1 = tci_read_ri32(&tb_ptr); | |
615 | t2 = tci_read_ri32(&tb_ptr); | |
616 | tci_write_reg32(t0, t1 + t2); | |
617 | break; | |
618 | case INDEX_op_sub_i32: | |
619 | t0 = *tb_ptr++; | |
620 | t1 = tci_read_ri32(&tb_ptr); | |
621 | t2 = tci_read_ri32(&tb_ptr); | |
622 | tci_write_reg32(t0, t1 - t2); | |
623 | break; | |
624 | case INDEX_op_mul_i32: | |
625 | t0 = *tb_ptr++; | |
626 | t1 = tci_read_ri32(&tb_ptr); | |
627 | t2 = tci_read_ri32(&tb_ptr); | |
628 | tci_write_reg32(t0, t1 * t2); | |
629 | break; | |
630 | #if TCG_TARGET_HAS_div_i32 | |
631 | case INDEX_op_div_i32: | |
632 | t0 = *tb_ptr++; | |
633 | t1 = tci_read_ri32(&tb_ptr); | |
634 | t2 = tci_read_ri32(&tb_ptr); | |
635 | tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2); | |
636 | break; | |
637 | case INDEX_op_divu_i32: | |
638 | t0 = *tb_ptr++; | |
639 | t1 = tci_read_ri32(&tb_ptr); | |
640 | t2 = tci_read_ri32(&tb_ptr); | |
641 | tci_write_reg32(t0, t1 / t2); | |
642 | break; | |
643 | case INDEX_op_rem_i32: | |
644 | t0 = *tb_ptr++; | |
645 | t1 = tci_read_ri32(&tb_ptr); | |
646 | t2 = tci_read_ri32(&tb_ptr); | |
647 | tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2); | |
648 | break; | |
649 | case INDEX_op_remu_i32: | |
650 | t0 = *tb_ptr++; | |
651 | t1 = tci_read_ri32(&tb_ptr); | |
652 | t2 = tci_read_ri32(&tb_ptr); | |
653 | tci_write_reg32(t0, t1 % t2); | |
654 | break; | |
655 | #elif TCG_TARGET_HAS_div2_i32 | |
656 | case INDEX_op_div2_i32: | |
657 | case INDEX_op_divu2_i32: | |
658 | TODO(); | |
659 | break; | |
660 | #endif | |
661 | case INDEX_op_and_i32: | |
662 | t0 = *tb_ptr++; | |
663 | t1 = tci_read_ri32(&tb_ptr); | |
664 | t2 = tci_read_ri32(&tb_ptr); | |
665 | tci_write_reg32(t0, t1 & t2); | |
666 | break; | |
667 | case INDEX_op_or_i32: | |
668 | t0 = *tb_ptr++; | |
669 | t1 = tci_read_ri32(&tb_ptr); | |
670 | t2 = tci_read_ri32(&tb_ptr); | |
671 | tci_write_reg32(t0, t1 | t2); | |
672 | break; | |
673 | case INDEX_op_xor_i32: | |
674 | t0 = *tb_ptr++; | |
675 | t1 = tci_read_ri32(&tb_ptr); | |
676 | t2 = tci_read_ri32(&tb_ptr); | |
677 | tci_write_reg32(t0, t1 ^ t2); | |
678 | break; | |
679 | ||
680 | /* Shift/rotate operations (32 bit). */ | |
681 | ||
682 | case INDEX_op_shl_i32: | |
683 | t0 = *tb_ptr++; | |
684 | t1 = tci_read_ri32(&tb_ptr); | |
685 | t2 = tci_read_ri32(&tb_ptr); | |
1976ccce | 686 | tci_write_reg32(t0, t1 << (t2 & 31)); |
7657f4bf SW |
687 | break; |
688 | case INDEX_op_shr_i32: | |
689 | t0 = *tb_ptr++; | |
690 | t1 = tci_read_ri32(&tb_ptr); | |
691 | t2 = tci_read_ri32(&tb_ptr); | |
1976ccce | 692 | tci_write_reg32(t0, t1 >> (t2 & 31)); |
7657f4bf SW |
693 | break; |
694 | case INDEX_op_sar_i32: | |
695 | t0 = *tb_ptr++; | |
696 | t1 = tci_read_ri32(&tb_ptr); | |
697 | t2 = tci_read_ri32(&tb_ptr); | |
1976ccce | 698 | tci_write_reg32(t0, ((int32_t)t1 >> (t2 & 31))); |
7657f4bf SW |
699 | break; |
700 | #if TCG_TARGET_HAS_rot_i32 | |
701 | case INDEX_op_rotl_i32: | |
702 | t0 = *tb_ptr++; | |
703 | t1 = tci_read_ri32(&tb_ptr); | |
704 | t2 = tci_read_ri32(&tb_ptr); | |
1976ccce | 705 | tci_write_reg32(t0, rol32(t1, t2 & 31)); |
7657f4bf SW |
706 | break; |
707 | case INDEX_op_rotr_i32: | |
708 | t0 = *tb_ptr++; | |
709 | t1 = tci_read_ri32(&tb_ptr); | |
710 | t2 = tci_read_ri32(&tb_ptr); | |
1976ccce | 711 | tci_write_reg32(t0, ror32(t1, t2 & 31)); |
7657f4bf | 712 | break; |
e24dc9fe SW |
713 | #endif |
714 | #if TCG_TARGET_HAS_deposit_i32 | |
715 | case INDEX_op_deposit_i32: | |
716 | t0 = *tb_ptr++; | |
717 | t1 = tci_read_r32(&tb_ptr); | |
718 | t2 = tci_read_r32(&tb_ptr); | |
719 | tmp16 = *tb_ptr++; | |
720 | tmp8 = *tb_ptr++; | |
721 | tmp32 = (((1 << tmp8) - 1) << tmp16); | |
722 | tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32)); | |
723 | break; | |
7657f4bf SW |
724 | #endif |
725 | case INDEX_op_brcond_i32: | |
726 | t0 = tci_read_r32(&tb_ptr); | |
727 | t1 = tci_read_ri32(&tb_ptr); | |
728 | condition = *tb_ptr++; | |
729 | label = tci_read_label(&tb_ptr); | |
730 | if (tci_compare32(t0, t1, condition)) { | |
3ccdbecf | 731 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
732 | tb_ptr = (uint8_t *)label; |
733 | continue; | |
734 | } | |
735 | break; | |
736 | #if TCG_TARGET_REG_BITS == 32 | |
737 | case INDEX_op_add2_i32: | |
738 | t0 = *tb_ptr++; | |
739 | t1 = *tb_ptr++; | |
740 | tmp64 = tci_read_r64(&tb_ptr); | |
741 | tmp64 += tci_read_r64(&tb_ptr); | |
742 | tci_write_reg64(t1, t0, tmp64); | |
743 | break; | |
744 | case INDEX_op_sub2_i32: | |
745 | t0 = *tb_ptr++; | |
746 | t1 = *tb_ptr++; | |
747 | tmp64 = tci_read_r64(&tb_ptr); | |
748 | tmp64 -= tci_read_r64(&tb_ptr); | |
749 | tci_write_reg64(t1, t0, tmp64); | |
750 | break; | |
751 | case INDEX_op_brcond2_i32: | |
752 | tmp64 = tci_read_r64(&tb_ptr); | |
753 | v64 = tci_read_ri64(&tb_ptr); | |
754 | condition = *tb_ptr++; | |
755 | label = tci_read_label(&tb_ptr); | |
756 | if (tci_compare64(tmp64, v64, condition)) { | |
3ccdbecf | 757 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
758 | tb_ptr = (uint8_t *)label; |
759 | continue; | |
760 | } | |
761 | break; | |
762 | case INDEX_op_mulu2_i32: | |
763 | t0 = *tb_ptr++; | |
764 | t1 = *tb_ptr++; | |
765 | t2 = tci_read_r32(&tb_ptr); | |
766 | tmp64 = tci_read_r32(&tb_ptr); | |
767 | tci_write_reg64(t1, t0, t2 * tmp64); | |
768 | break; | |
769 | #endif /* TCG_TARGET_REG_BITS == 32 */ | |
770 | #if TCG_TARGET_HAS_ext8s_i32 | |
771 | case INDEX_op_ext8s_i32: | |
772 | t0 = *tb_ptr++; | |
773 | t1 = tci_read_r8s(&tb_ptr); | |
774 | tci_write_reg32(t0, t1); | |
775 | break; | |
776 | #endif | |
777 | #if TCG_TARGET_HAS_ext16s_i32 | |
778 | case INDEX_op_ext16s_i32: | |
779 | t0 = *tb_ptr++; | |
780 | t1 = tci_read_r16s(&tb_ptr); | |
781 | tci_write_reg32(t0, t1); | |
782 | break; | |
783 | #endif | |
784 | #if TCG_TARGET_HAS_ext8u_i32 | |
785 | case INDEX_op_ext8u_i32: | |
786 | t0 = *tb_ptr++; | |
787 | t1 = tci_read_r8(&tb_ptr); | |
788 | tci_write_reg32(t0, t1); | |
789 | break; | |
790 | #endif | |
791 | #if TCG_TARGET_HAS_ext16u_i32 | |
792 | case INDEX_op_ext16u_i32: | |
793 | t0 = *tb_ptr++; | |
794 | t1 = tci_read_r16(&tb_ptr); | |
795 | tci_write_reg32(t0, t1); | |
796 | break; | |
797 | #endif | |
798 | #if TCG_TARGET_HAS_bswap16_i32 | |
799 | case INDEX_op_bswap16_i32: | |
800 | t0 = *tb_ptr++; | |
801 | t1 = tci_read_r16(&tb_ptr); | |
802 | tci_write_reg32(t0, bswap16(t1)); | |
803 | break; | |
804 | #endif | |
805 | #if TCG_TARGET_HAS_bswap32_i32 | |
806 | case INDEX_op_bswap32_i32: | |
807 | t0 = *tb_ptr++; | |
808 | t1 = tci_read_r32(&tb_ptr); | |
809 | tci_write_reg32(t0, bswap32(t1)); | |
810 | break; | |
811 | #endif | |
812 | #if TCG_TARGET_HAS_not_i32 | |
813 | case INDEX_op_not_i32: | |
814 | t0 = *tb_ptr++; | |
815 | t1 = tci_read_r32(&tb_ptr); | |
816 | tci_write_reg32(t0, ~t1); | |
817 | break; | |
818 | #endif | |
819 | #if TCG_TARGET_HAS_neg_i32 | |
820 | case INDEX_op_neg_i32: | |
821 | t0 = *tb_ptr++; | |
822 | t1 = tci_read_r32(&tb_ptr); | |
823 | tci_write_reg32(t0, -t1); | |
824 | break; | |
825 | #endif | |
826 | #if TCG_TARGET_REG_BITS == 64 | |
827 | case INDEX_op_mov_i64: | |
828 | t0 = *tb_ptr++; | |
829 | t1 = tci_read_r64(&tb_ptr); | |
830 | tci_write_reg64(t0, t1); | |
831 | break; | |
832 | case INDEX_op_movi_i64: | |
833 | t0 = *tb_ptr++; | |
834 | t1 = tci_read_i64(&tb_ptr); | |
835 | tci_write_reg64(t0, t1); | |
836 | break; | |
837 | ||
838 | /* Load/store operations (64 bit). */ | |
839 | ||
840 | case INDEX_op_ld8u_i64: | |
841 | t0 = *tb_ptr++; | |
842 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 843 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
844 | tci_write_reg8(t0, *(uint8_t *)(t1 + t2)); |
845 | break; | |
846 | case INDEX_op_ld8s_i64: | |
847 | case INDEX_op_ld16u_i64: | |
848 | case INDEX_op_ld16s_i64: | |
849 | TODO(); | |
850 | break; | |
851 | case INDEX_op_ld32u_i64: | |
852 | t0 = *tb_ptr++; | |
853 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 854 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
855 | tci_write_reg32(t0, *(uint32_t *)(t1 + t2)); |
856 | break; | |
857 | case INDEX_op_ld32s_i64: | |
858 | t0 = *tb_ptr++; | |
859 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 860 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
861 | tci_write_reg32s(t0, *(int32_t *)(t1 + t2)); |
862 | break; | |
863 | case INDEX_op_ld_i64: | |
864 | t0 = *tb_ptr++; | |
865 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 866 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
867 | tci_write_reg64(t0, *(uint64_t *)(t1 + t2)); |
868 | break; | |
869 | case INDEX_op_st8_i64: | |
870 | t0 = tci_read_r8(&tb_ptr); | |
871 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 872 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
873 | *(uint8_t *)(t1 + t2) = t0; |
874 | break; | |
875 | case INDEX_op_st16_i64: | |
876 | t0 = tci_read_r16(&tb_ptr); | |
877 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 878 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
879 | *(uint16_t *)(t1 + t2) = t0; |
880 | break; | |
881 | case INDEX_op_st32_i64: | |
882 | t0 = tci_read_r32(&tb_ptr); | |
883 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 884 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
885 | *(uint32_t *)(t1 + t2) = t0; |
886 | break; | |
887 | case INDEX_op_st_i64: | |
888 | t0 = tci_read_r64(&tb_ptr); | |
889 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 890 | t2 = tci_read_s32(&tb_ptr); |
3ccdbecf | 891 | tci_assert(t1 != sp_value || (int32_t)t2 < 0); |
7657f4bf SW |
892 | *(uint64_t *)(t1 + t2) = t0; |
893 | break; | |
894 | ||
895 | /* Arithmetic operations (64 bit). */ | |
896 | ||
897 | case INDEX_op_add_i64: | |
898 | t0 = *tb_ptr++; | |
899 | t1 = tci_read_ri64(&tb_ptr); | |
900 | t2 = tci_read_ri64(&tb_ptr); | |
901 | tci_write_reg64(t0, t1 + t2); | |
902 | break; | |
903 | case INDEX_op_sub_i64: | |
904 | t0 = *tb_ptr++; | |
905 | t1 = tci_read_ri64(&tb_ptr); | |
906 | t2 = tci_read_ri64(&tb_ptr); | |
907 | tci_write_reg64(t0, t1 - t2); | |
908 | break; | |
909 | case INDEX_op_mul_i64: | |
910 | t0 = *tb_ptr++; | |
911 | t1 = tci_read_ri64(&tb_ptr); | |
912 | t2 = tci_read_ri64(&tb_ptr); | |
913 | tci_write_reg64(t0, t1 * t2); | |
914 | break; | |
915 | #if TCG_TARGET_HAS_div_i64 | |
916 | case INDEX_op_div_i64: | |
917 | case INDEX_op_divu_i64: | |
918 | case INDEX_op_rem_i64: | |
919 | case INDEX_op_remu_i64: | |
920 | TODO(); | |
921 | break; | |
922 | #elif TCG_TARGET_HAS_div2_i64 | |
923 | case INDEX_op_div2_i64: | |
924 | case INDEX_op_divu2_i64: | |
925 | TODO(); | |
926 | break; | |
927 | #endif | |
928 | case INDEX_op_and_i64: | |
929 | t0 = *tb_ptr++; | |
930 | t1 = tci_read_ri64(&tb_ptr); | |
931 | t2 = tci_read_ri64(&tb_ptr); | |
932 | tci_write_reg64(t0, t1 & t2); | |
933 | break; | |
934 | case INDEX_op_or_i64: | |
935 | t0 = *tb_ptr++; | |
936 | t1 = tci_read_ri64(&tb_ptr); | |
937 | t2 = tci_read_ri64(&tb_ptr); | |
938 | tci_write_reg64(t0, t1 | t2); | |
939 | break; | |
940 | case INDEX_op_xor_i64: | |
941 | t0 = *tb_ptr++; | |
942 | t1 = tci_read_ri64(&tb_ptr); | |
943 | t2 = tci_read_ri64(&tb_ptr); | |
944 | tci_write_reg64(t0, t1 ^ t2); | |
945 | break; | |
946 | ||
947 | /* Shift/rotate operations (64 bit). */ | |
948 | ||
949 | case INDEX_op_shl_i64: | |
950 | t0 = *tb_ptr++; | |
951 | t1 = tci_read_ri64(&tb_ptr); | |
952 | t2 = tci_read_ri64(&tb_ptr); | |
1976ccce | 953 | tci_write_reg64(t0, t1 << (t2 & 63)); |
7657f4bf SW |
954 | break; |
955 | case INDEX_op_shr_i64: | |
956 | t0 = *tb_ptr++; | |
957 | t1 = tci_read_ri64(&tb_ptr); | |
958 | t2 = tci_read_ri64(&tb_ptr); | |
1976ccce | 959 | tci_write_reg64(t0, t1 >> (t2 & 63)); |
7657f4bf SW |
960 | break; |
961 | case INDEX_op_sar_i64: | |
962 | t0 = *tb_ptr++; | |
963 | t1 = tci_read_ri64(&tb_ptr); | |
964 | t2 = tci_read_ri64(&tb_ptr); | |
1976ccce | 965 | tci_write_reg64(t0, ((int64_t)t1 >> (t2 & 63))); |
7657f4bf SW |
966 | break; |
967 | #if TCG_TARGET_HAS_rot_i64 | |
968 | case INDEX_op_rotl_i64: | |
d285bf78 SW |
969 | t0 = *tb_ptr++; |
970 | t1 = tci_read_ri64(&tb_ptr); | |
971 | t2 = tci_read_ri64(&tb_ptr); | |
1976ccce | 972 | tci_write_reg64(t0, rol64(t1, t2 & 63)); |
d285bf78 | 973 | break; |
7657f4bf | 974 | case INDEX_op_rotr_i64: |
d285bf78 SW |
975 | t0 = *tb_ptr++; |
976 | t1 = tci_read_ri64(&tb_ptr); | |
977 | t2 = tci_read_ri64(&tb_ptr); | |
1976ccce | 978 | tci_write_reg64(t0, ror64(t1, t2 & 63)); |
7657f4bf | 979 | break; |
e24dc9fe SW |
980 | #endif |
981 | #if TCG_TARGET_HAS_deposit_i64 | |
982 | case INDEX_op_deposit_i64: | |
983 | t0 = *tb_ptr++; | |
984 | t1 = tci_read_r64(&tb_ptr); | |
985 | t2 = tci_read_r64(&tb_ptr); | |
986 | tmp16 = *tb_ptr++; | |
987 | tmp8 = *tb_ptr++; | |
988 | tmp64 = (((1ULL << tmp8) - 1) << tmp16); | |
989 | tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64)); | |
990 | break; | |
7657f4bf SW |
991 | #endif |
992 | case INDEX_op_brcond_i64: | |
993 | t0 = tci_read_r64(&tb_ptr); | |
994 | t1 = tci_read_ri64(&tb_ptr); | |
995 | condition = *tb_ptr++; | |
996 | label = tci_read_label(&tb_ptr); | |
997 | if (tci_compare64(t0, t1, condition)) { | |
3ccdbecf | 998 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
999 | tb_ptr = (uint8_t *)label; |
1000 | continue; | |
1001 | } | |
1002 | break; | |
1003 | #if TCG_TARGET_HAS_ext8u_i64 | |
1004 | case INDEX_op_ext8u_i64: | |
1005 | t0 = *tb_ptr++; | |
1006 | t1 = tci_read_r8(&tb_ptr); | |
1007 | tci_write_reg64(t0, t1); | |
1008 | break; | |
1009 | #endif | |
1010 | #if TCG_TARGET_HAS_ext8s_i64 | |
1011 | case INDEX_op_ext8s_i64: | |
1012 | t0 = *tb_ptr++; | |
1013 | t1 = tci_read_r8s(&tb_ptr); | |
1014 | tci_write_reg64(t0, t1); | |
1015 | break; | |
1016 | #endif | |
1017 | #if TCG_TARGET_HAS_ext16s_i64 | |
1018 | case INDEX_op_ext16s_i64: | |
1019 | t0 = *tb_ptr++; | |
1020 | t1 = tci_read_r16s(&tb_ptr); | |
1021 | tci_write_reg64(t0, t1); | |
1022 | break; | |
1023 | #endif | |
1024 | #if TCG_TARGET_HAS_ext16u_i64 | |
1025 | case INDEX_op_ext16u_i64: | |
1026 | t0 = *tb_ptr++; | |
1027 | t1 = tci_read_r16(&tb_ptr); | |
1028 | tci_write_reg64(t0, t1); | |
1029 | break; | |
1030 | #endif | |
1031 | #if TCG_TARGET_HAS_ext32s_i64 | |
1032 | case INDEX_op_ext32s_i64: | |
4f2331e5 AJ |
1033 | #endif |
1034 | case INDEX_op_ext_i32_i64: | |
7657f4bf SW |
1035 | t0 = *tb_ptr++; |
1036 | t1 = tci_read_r32s(&tb_ptr); | |
1037 | tci_write_reg64(t0, t1); | |
1038 | break; | |
7657f4bf SW |
1039 | #if TCG_TARGET_HAS_ext32u_i64 |
1040 | case INDEX_op_ext32u_i64: | |
4f2331e5 AJ |
1041 | #endif |
1042 | case INDEX_op_extu_i32_i64: | |
7657f4bf SW |
1043 | t0 = *tb_ptr++; |
1044 | t1 = tci_read_r32(&tb_ptr); | |
1045 | tci_write_reg64(t0, t1); | |
1046 | break; | |
7657f4bf SW |
1047 | #if TCG_TARGET_HAS_bswap16_i64 |
1048 | case INDEX_op_bswap16_i64: | |
1049 | TODO(); | |
1050 | t0 = *tb_ptr++; | |
1051 | t1 = tci_read_r16(&tb_ptr); | |
1052 | tci_write_reg64(t0, bswap16(t1)); | |
1053 | break; | |
1054 | #endif | |
1055 | #if TCG_TARGET_HAS_bswap32_i64 | |
1056 | case INDEX_op_bswap32_i64: | |
1057 | t0 = *tb_ptr++; | |
1058 | t1 = tci_read_r32(&tb_ptr); | |
1059 | tci_write_reg64(t0, bswap32(t1)); | |
1060 | break; | |
1061 | #endif | |
1062 | #if TCG_TARGET_HAS_bswap64_i64 | |
1063 | case INDEX_op_bswap64_i64: | |
7657f4bf SW |
1064 | t0 = *tb_ptr++; |
1065 | t1 = tci_read_r64(&tb_ptr); | |
1066 | tci_write_reg64(t0, bswap64(t1)); | |
1067 | break; | |
1068 | #endif | |
1069 | #if TCG_TARGET_HAS_not_i64 | |
1070 | case INDEX_op_not_i64: | |
1071 | t0 = *tb_ptr++; | |
1072 | t1 = tci_read_r64(&tb_ptr); | |
1073 | tci_write_reg64(t0, ~t1); | |
1074 | break; | |
1075 | #endif | |
1076 | #if TCG_TARGET_HAS_neg_i64 | |
1077 | case INDEX_op_neg_i64: | |
1078 | t0 = *tb_ptr++; | |
1079 | t1 = tci_read_r64(&tb_ptr); | |
1080 | tci_write_reg64(t0, -t1); | |
1081 | break; | |
1082 | #endif | |
1083 | #endif /* TCG_TARGET_REG_BITS == 64 */ | |
1084 | ||
1085 | /* QEMU specific operations. */ | |
1086 | ||
7657f4bf SW |
1087 | case INDEX_op_exit_tb: |
1088 | next_tb = *(uint64_t *)tb_ptr; | |
1089 | goto exit; | |
1090 | break; | |
1091 | case INDEX_op_goto_tb: | |
1092 | t0 = tci_read_i32(&tb_ptr); | |
3ccdbecf | 1093 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
1094 | tb_ptr += (int32_t)t0; |
1095 | continue; | |
76782fab | 1096 | case INDEX_op_qemu_ld_i32: |
7657f4bf SW |
1097 | t0 = *tb_ptr++; |
1098 | taddr = tci_read_ulong(&tb_ptr); | |
59227d5d | 1099 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1100 | switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) { |
76782fab RH |
1101 | case MO_UB: |
1102 | tmp32 = qemu_ld_ub; | |
1103 | break; | |
1104 | case MO_SB: | |
1105 | tmp32 = (int8_t)qemu_ld_ub; | |
1106 | break; | |
1107 | case MO_LEUW: | |
1108 | tmp32 = qemu_ld_leuw; | |
1109 | break; | |
1110 | case MO_LESW: | |
1111 | tmp32 = (int16_t)qemu_ld_leuw; | |
1112 | break; | |
1113 | case MO_LEUL: | |
1114 | tmp32 = qemu_ld_leul; | |
1115 | break; | |
1116 | case MO_BEUW: | |
1117 | tmp32 = qemu_ld_beuw; | |
1118 | break; | |
1119 | case MO_BESW: | |
1120 | tmp32 = (int16_t)qemu_ld_beuw; | |
1121 | break; | |
1122 | case MO_BEUL: | |
1123 | tmp32 = qemu_ld_beul; | |
1124 | break; | |
1125 | default: | |
1126 | tcg_abort(); | |
1127 | } | |
1128 | tci_write_reg(t0, tmp32); | |
7657f4bf | 1129 | break; |
76782fab | 1130 | case INDEX_op_qemu_ld_i64: |
7657f4bf | 1131 | t0 = *tb_ptr++; |
76782fab RH |
1132 | if (TCG_TARGET_REG_BITS == 32) { |
1133 | t1 = *tb_ptr++; | |
1134 | } | |
7657f4bf | 1135 | taddr = tci_read_ulong(&tb_ptr); |
59227d5d | 1136 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1137 | switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) { |
76782fab RH |
1138 | case MO_UB: |
1139 | tmp64 = qemu_ld_ub; | |
1140 | break; | |
1141 | case MO_SB: | |
1142 | tmp64 = (int8_t)qemu_ld_ub; | |
1143 | break; | |
1144 | case MO_LEUW: | |
1145 | tmp64 = qemu_ld_leuw; | |
1146 | break; | |
1147 | case MO_LESW: | |
1148 | tmp64 = (int16_t)qemu_ld_leuw; | |
1149 | break; | |
1150 | case MO_LEUL: | |
1151 | tmp64 = qemu_ld_leul; | |
1152 | break; | |
1153 | case MO_LESL: | |
1154 | tmp64 = (int32_t)qemu_ld_leul; | |
1155 | break; | |
1156 | case MO_LEQ: | |
1157 | tmp64 = qemu_ld_leq; | |
1158 | break; | |
1159 | case MO_BEUW: | |
1160 | tmp64 = qemu_ld_beuw; | |
1161 | break; | |
1162 | case MO_BESW: | |
1163 | tmp64 = (int16_t)qemu_ld_beuw; | |
1164 | break; | |
1165 | case MO_BEUL: | |
1166 | tmp64 = qemu_ld_beul; | |
1167 | break; | |
1168 | case MO_BESL: | |
1169 | tmp64 = (int32_t)qemu_ld_beul; | |
1170 | break; | |
1171 | case MO_BEQ: | |
1172 | tmp64 = qemu_ld_beq; | |
1173 | break; | |
1174 | default: | |
1175 | tcg_abort(); | |
1176 | } | |
7657f4bf | 1177 | tci_write_reg(t0, tmp64); |
76782fab RH |
1178 | if (TCG_TARGET_REG_BITS == 32) { |
1179 | tci_write_reg(t1, tmp64 >> 32); | |
1180 | } | |
7657f4bf | 1181 | break; |
76782fab RH |
1182 | case INDEX_op_qemu_st_i32: |
1183 | t0 = tci_read_r(&tb_ptr); | |
7657f4bf | 1184 | taddr = tci_read_ulong(&tb_ptr); |
59227d5d | 1185 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1186 | switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) { |
76782fab RH |
1187 | case MO_UB: |
1188 | qemu_st_b(t0); | |
1189 | break; | |
1190 | case MO_LEUW: | |
1191 | qemu_st_lew(t0); | |
1192 | break; | |
1193 | case MO_LEUL: | |
1194 | qemu_st_lel(t0); | |
1195 | break; | |
1196 | case MO_BEUW: | |
1197 | qemu_st_bew(t0); | |
1198 | break; | |
1199 | case MO_BEUL: | |
1200 | qemu_st_bel(t0); | |
1201 | break; | |
1202 | default: | |
1203 | tcg_abort(); | |
1204 | } | |
7657f4bf | 1205 | break; |
76782fab | 1206 | case INDEX_op_qemu_st_i64: |
7657f4bf SW |
1207 | tmp64 = tci_read_r64(&tb_ptr); |
1208 | taddr = tci_read_ulong(&tb_ptr); | |
59227d5d | 1209 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1210 | switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) { |
76782fab RH |
1211 | case MO_UB: |
1212 | qemu_st_b(tmp64); | |
1213 | break; | |
1214 | case MO_LEUW: | |
1215 | qemu_st_lew(tmp64); | |
1216 | break; | |
1217 | case MO_LEUL: | |
1218 | qemu_st_lel(tmp64); | |
1219 | break; | |
1220 | case MO_LEQ: | |
1221 | qemu_st_leq(tmp64); | |
1222 | break; | |
1223 | case MO_BEUW: | |
1224 | qemu_st_bew(tmp64); | |
1225 | break; | |
1226 | case MO_BEUL: | |
1227 | qemu_st_bel(tmp64); | |
1228 | break; | |
1229 | case MO_BEQ: | |
1230 | qemu_st_beq(tmp64); | |
1231 | break; | |
1232 | default: | |
1233 | tcg_abort(); | |
1234 | } | |
7657f4bf SW |
1235 | break; |
1236 | default: | |
1237 | TODO(); | |
1238 | break; | |
1239 | } | |
3ccdbecf | 1240 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
1241 | } |
1242 | exit: | |
1243 | return next_tb; | |
1244 | } |