]>
Commit | Line | Data |
---|---|---|
7657f4bf SW |
1 | /* |
2 | * Tiny Code Interpreter for QEMU | |
3 | * | |
4 | * Copyright (c) 2009, 2011 Stefan Weil | |
5 | * | |
6 | * This program is free software: you can redistribute it and/or modify | |
7 | * it under the terms of the GNU General Public License as published by | |
8 | * the Free Software Foundation, either version 2 of the License, or | |
9 | * (at your option) any later version. | |
10 | * | |
11 | * This program is distributed in the hope that it will be useful, | |
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | * GNU General Public License for more details. | |
15 | * | |
16 | * You should have received a copy of the GNU General Public License | |
17 | * along with this program. If not, see <http://www.gnu.org/licenses/>. | |
18 | */ | |
19 | ||
20 | #include "config.h" | |
21 | ||
22 | /* Defining NDEBUG disables assertions (which makes the code faster). */ | |
17904bcf | 23 | #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG) |
7657f4bf SW |
24 | # define NDEBUG |
25 | #endif | |
26 | ||
27 | #include "qemu-common.h" | |
022c62cb | 28 | #include "exec/exec-all.h" /* MAX_OPC_PARAM_IARGS */ |
f08b6170 | 29 | #include "exec/cpu_ldst.h" |
7657f4bf SW |
30 | #include "tcg-op.h" |
31 | ||
32 | /* Marker for missing code. */ | |
33 | #define TODO() \ | |
34 | do { \ | |
35 | fprintf(stderr, "TODO %s:%u: %s()\n", \ | |
36 | __FILE__, __LINE__, __func__); \ | |
37 | tcg_abort(); \ | |
38 | } while (0) | |
39 | ||
6673f47d | 40 | #if MAX_OPC_PARAM_IARGS != 5 |
7657f4bf SW |
41 | # error Fix needed, number of supported input arguments changed! |
42 | #endif | |
43 | #if TCG_TARGET_REG_BITS == 32 | |
44 | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, | |
6673f47d | 45 | tcg_target_ulong, tcg_target_ulong, |
7657f4bf SW |
46 | tcg_target_ulong, tcg_target_ulong, |
47 | tcg_target_ulong, tcg_target_ulong, | |
48 | tcg_target_ulong, tcg_target_ulong); | |
49 | #else | |
50 | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, | |
6673f47d SW |
51 | tcg_target_ulong, tcg_target_ulong, |
52 | tcg_target_ulong); | |
7657f4bf SW |
53 | #endif |
54 | ||
7657f4bf SW |
55 | /* Targets which don't use GETPC also don't need tci_tb_ptr |
56 | which makes them a little faster. */ | |
57 | #if defined(GETPC) | |
c3ca0467 | 58 | uintptr_t tci_tb_ptr; |
7657f4bf SW |
59 | #endif |
60 | ||
61 | static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS]; | |
62 | ||
771142c2 | 63 | static tcg_target_ulong tci_read_reg(TCGReg index) |
7657f4bf SW |
64 | { |
65 | assert(index < ARRAY_SIZE(tci_reg)); | |
66 | return tci_reg[index]; | |
67 | } | |
68 | ||
69 | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 | |
771142c2 | 70 | static int8_t tci_read_reg8s(TCGReg index) |
7657f4bf SW |
71 | { |
72 | return (int8_t)tci_read_reg(index); | |
73 | } | |
74 | #endif | |
75 | ||
76 | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 | |
771142c2 | 77 | static int16_t tci_read_reg16s(TCGReg index) |
7657f4bf SW |
78 | { |
79 | return (int16_t)tci_read_reg(index); | |
80 | } | |
81 | #endif | |
82 | ||
83 | #if TCG_TARGET_REG_BITS == 64 | |
771142c2 | 84 | static int32_t tci_read_reg32s(TCGReg index) |
7657f4bf SW |
85 | { |
86 | return (int32_t)tci_read_reg(index); | |
87 | } | |
88 | #endif | |
89 | ||
771142c2 | 90 | static uint8_t tci_read_reg8(TCGReg index) |
7657f4bf SW |
91 | { |
92 | return (uint8_t)tci_read_reg(index); | |
93 | } | |
94 | ||
771142c2 | 95 | static uint16_t tci_read_reg16(TCGReg index) |
7657f4bf SW |
96 | { |
97 | return (uint16_t)tci_read_reg(index); | |
98 | } | |
99 | ||
771142c2 | 100 | static uint32_t tci_read_reg32(TCGReg index) |
7657f4bf SW |
101 | { |
102 | return (uint32_t)tci_read_reg(index); | |
103 | } | |
104 | ||
105 | #if TCG_TARGET_REG_BITS == 64 | |
771142c2 | 106 | static uint64_t tci_read_reg64(TCGReg index) |
7657f4bf SW |
107 | { |
108 | return tci_read_reg(index); | |
109 | } | |
110 | #endif | |
111 | ||
771142c2 | 112 | static void tci_write_reg(TCGReg index, tcg_target_ulong value) |
7657f4bf SW |
113 | { |
114 | assert(index < ARRAY_SIZE(tci_reg)); | |
115 | assert(index != TCG_AREG0); | |
ee79c356 | 116 | assert(index != TCG_REG_CALL_STACK); |
7657f4bf SW |
117 | tci_reg[index] = value; |
118 | } | |
119 | ||
7657f4bf | 120 | #if TCG_TARGET_REG_BITS == 64 |
771142c2 | 121 | static void tci_write_reg32s(TCGReg index, int32_t value) |
7657f4bf SW |
122 | { |
123 | tci_write_reg(index, value); | |
124 | } | |
125 | #endif | |
126 | ||
771142c2 | 127 | static void tci_write_reg8(TCGReg index, uint8_t value) |
7657f4bf SW |
128 | { |
129 | tci_write_reg(index, value); | |
130 | } | |
131 | ||
771142c2 | 132 | static void tci_write_reg32(TCGReg index, uint32_t value) |
7657f4bf SW |
133 | { |
134 | tci_write_reg(index, value); | |
135 | } | |
136 | ||
137 | #if TCG_TARGET_REG_BITS == 32 | |
138 | static void tci_write_reg64(uint32_t high_index, uint32_t low_index, | |
139 | uint64_t value) | |
140 | { | |
141 | tci_write_reg(low_index, value); | |
142 | tci_write_reg(high_index, value >> 32); | |
143 | } | |
144 | #elif TCG_TARGET_REG_BITS == 64 | |
771142c2 | 145 | static void tci_write_reg64(TCGReg index, uint64_t value) |
7657f4bf SW |
146 | { |
147 | tci_write_reg(index, value); | |
148 | } | |
149 | #endif | |
150 | ||
151 | #if TCG_TARGET_REG_BITS == 32 | |
152 | /* Create a 64 bit value from two 32 bit values. */ | |
153 | static uint64_t tci_uint64(uint32_t high, uint32_t low) | |
154 | { | |
155 | return ((uint64_t)high << 32) + low; | |
156 | } | |
157 | #endif | |
158 | ||
159 | /* Read constant (native size) from bytecode. */ | |
160 | static tcg_target_ulong tci_read_i(uint8_t **tb_ptr) | |
161 | { | |
162 | tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr); | |
163 | *tb_ptr += sizeof(value); | |
164 | return value; | |
165 | } | |
166 | ||
03fc0548 | 167 | /* Read unsigned constant (32 bit) from bytecode. */ |
7657f4bf SW |
168 | static uint32_t tci_read_i32(uint8_t **tb_ptr) |
169 | { | |
170 | uint32_t value = *(uint32_t *)(*tb_ptr); | |
171 | *tb_ptr += sizeof(value); | |
172 | return value; | |
173 | } | |
174 | ||
03fc0548 RH |
175 | /* Read signed constant (32 bit) from bytecode. */ |
176 | static int32_t tci_read_s32(uint8_t **tb_ptr) | |
177 | { | |
178 | int32_t value = *(int32_t *)(*tb_ptr); | |
179 | *tb_ptr += sizeof(value); | |
180 | return value; | |
181 | } | |
182 | ||
7657f4bf SW |
183 | #if TCG_TARGET_REG_BITS == 64 |
184 | /* Read constant (64 bit) from bytecode. */ | |
185 | static uint64_t tci_read_i64(uint8_t **tb_ptr) | |
186 | { | |
187 | uint64_t value = *(uint64_t *)(*tb_ptr); | |
188 | *tb_ptr += sizeof(value); | |
189 | return value; | |
190 | } | |
191 | #endif | |
192 | ||
193 | /* Read indexed register (native size) from bytecode. */ | |
194 | static tcg_target_ulong tci_read_r(uint8_t **tb_ptr) | |
195 | { | |
196 | tcg_target_ulong value = tci_read_reg(**tb_ptr); | |
197 | *tb_ptr += 1; | |
198 | return value; | |
199 | } | |
200 | ||
201 | /* Read indexed register (8 bit) from bytecode. */ | |
202 | static uint8_t tci_read_r8(uint8_t **tb_ptr) | |
203 | { | |
204 | uint8_t value = tci_read_reg8(**tb_ptr); | |
205 | *tb_ptr += 1; | |
206 | return value; | |
207 | } | |
208 | ||
209 | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 | |
210 | /* Read indexed register (8 bit signed) from bytecode. */ | |
211 | static int8_t tci_read_r8s(uint8_t **tb_ptr) | |
212 | { | |
213 | int8_t value = tci_read_reg8s(**tb_ptr); | |
214 | *tb_ptr += 1; | |
215 | return value; | |
216 | } | |
217 | #endif | |
218 | ||
219 | /* Read indexed register (16 bit) from bytecode. */ | |
220 | static uint16_t tci_read_r16(uint8_t **tb_ptr) | |
221 | { | |
222 | uint16_t value = tci_read_reg16(**tb_ptr); | |
223 | *tb_ptr += 1; | |
224 | return value; | |
225 | } | |
226 | ||
227 | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 | |
228 | /* Read indexed register (16 bit signed) from bytecode. */ | |
229 | static int16_t tci_read_r16s(uint8_t **tb_ptr) | |
230 | { | |
231 | int16_t value = tci_read_reg16s(**tb_ptr); | |
232 | *tb_ptr += 1; | |
233 | return value; | |
234 | } | |
235 | #endif | |
236 | ||
237 | /* Read indexed register (32 bit) from bytecode. */ | |
238 | static uint32_t tci_read_r32(uint8_t **tb_ptr) | |
239 | { | |
240 | uint32_t value = tci_read_reg32(**tb_ptr); | |
241 | *tb_ptr += 1; | |
242 | return value; | |
243 | } | |
244 | ||
245 | #if TCG_TARGET_REG_BITS == 32 | |
246 | /* Read two indexed registers (2 * 32 bit) from bytecode. */ | |
247 | static uint64_t tci_read_r64(uint8_t **tb_ptr) | |
248 | { | |
249 | uint32_t low = tci_read_r32(tb_ptr); | |
250 | return tci_uint64(tci_read_r32(tb_ptr), low); | |
251 | } | |
252 | #elif TCG_TARGET_REG_BITS == 64 | |
253 | /* Read indexed register (32 bit signed) from bytecode. */ | |
254 | static int32_t tci_read_r32s(uint8_t **tb_ptr) | |
255 | { | |
256 | int32_t value = tci_read_reg32s(**tb_ptr); | |
257 | *tb_ptr += 1; | |
258 | return value; | |
259 | } | |
260 | ||
261 | /* Read indexed register (64 bit) from bytecode. */ | |
262 | static uint64_t tci_read_r64(uint8_t **tb_ptr) | |
263 | { | |
264 | uint64_t value = tci_read_reg64(**tb_ptr); | |
265 | *tb_ptr += 1; | |
266 | return value; | |
267 | } | |
268 | #endif | |
269 | ||
270 | /* Read indexed register(s) with target address from bytecode. */ | |
271 | static target_ulong tci_read_ulong(uint8_t **tb_ptr) | |
272 | { | |
273 | target_ulong taddr = tci_read_r(tb_ptr); | |
274 | #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS | |
275 | taddr += (uint64_t)tci_read_r(tb_ptr) << 32; | |
276 | #endif | |
277 | return taddr; | |
278 | } | |
279 | ||
280 | /* Read indexed register or constant (native size) from bytecode. */ | |
281 | static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr) | |
282 | { | |
283 | tcg_target_ulong value; | |
771142c2 | 284 | TCGReg r = **tb_ptr; |
7657f4bf SW |
285 | *tb_ptr += 1; |
286 | if (r == TCG_CONST) { | |
287 | value = tci_read_i(tb_ptr); | |
288 | } else { | |
289 | value = tci_read_reg(r); | |
290 | } | |
291 | return value; | |
292 | } | |
293 | ||
294 | /* Read indexed register or constant (32 bit) from bytecode. */ | |
295 | static uint32_t tci_read_ri32(uint8_t **tb_ptr) | |
296 | { | |
297 | uint32_t value; | |
771142c2 | 298 | TCGReg r = **tb_ptr; |
7657f4bf SW |
299 | *tb_ptr += 1; |
300 | if (r == TCG_CONST) { | |
301 | value = tci_read_i32(tb_ptr); | |
302 | } else { | |
303 | value = tci_read_reg32(r); | |
304 | } | |
305 | return value; | |
306 | } | |
307 | ||
308 | #if TCG_TARGET_REG_BITS == 32 | |
309 | /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */ | |
310 | static uint64_t tci_read_ri64(uint8_t **tb_ptr) | |
311 | { | |
312 | uint32_t low = tci_read_ri32(tb_ptr); | |
313 | return tci_uint64(tci_read_ri32(tb_ptr), low); | |
314 | } | |
315 | #elif TCG_TARGET_REG_BITS == 64 | |
316 | /* Read indexed register or constant (64 bit) from bytecode. */ | |
317 | static uint64_t tci_read_ri64(uint8_t **tb_ptr) | |
318 | { | |
319 | uint64_t value; | |
771142c2 | 320 | TCGReg r = **tb_ptr; |
7657f4bf SW |
321 | *tb_ptr += 1; |
322 | if (r == TCG_CONST) { | |
323 | value = tci_read_i64(tb_ptr); | |
324 | } else { | |
325 | value = tci_read_reg64(r); | |
326 | } | |
327 | return value; | |
328 | } | |
329 | #endif | |
330 | ||
c6c5063c | 331 | static tcg_target_ulong tci_read_label(uint8_t **tb_ptr) |
7657f4bf | 332 | { |
c6c5063c | 333 | tcg_target_ulong label = tci_read_i(tb_ptr); |
7657f4bf SW |
334 | assert(label != 0); |
335 | return label; | |
336 | } | |
337 | ||
338 | static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition) | |
339 | { | |
340 | bool result = false; | |
341 | int32_t i0 = u0; | |
342 | int32_t i1 = u1; | |
343 | switch (condition) { | |
344 | case TCG_COND_EQ: | |
345 | result = (u0 == u1); | |
346 | break; | |
347 | case TCG_COND_NE: | |
348 | result = (u0 != u1); | |
349 | break; | |
350 | case TCG_COND_LT: | |
351 | result = (i0 < i1); | |
352 | break; | |
353 | case TCG_COND_GE: | |
354 | result = (i0 >= i1); | |
355 | break; | |
356 | case TCG_COND_LE: | |
357 | result = (i0 <= i1); | |
358 | break; | |
359 | case TCG_COND_GT: | |
360 | result = (i0 > i1); | |
361 | break; | |
362 | case TCG_COND_LTU: | |
363 | result = (u0 < u1); | |
364 | break; | |
365 | case TCG_COND_GEU: | |
366 | result = (u0 >= u1); | |
367 | break; | |
368 | case TCG_COND_LEU: | |
369 | result = (u0 <= u1); | |
370 | break; | |
371 | case TCG_COND_GTU: | |
372 | result = (u0 > u1); | |
373 | break; | |
374 | default: | |
375 | TODO(); | |
376 | } | |
377 | return result; | |
378 | } | |
379 | ||
380 | static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition) | |
381 | { | |
382 | bool result = false; | |
383 | int64_t i0 = u0; | |
384 | int64_t i1 = u1; | |
385 | switch (condition) { | |
386 | case TCG_COND_EQ: | |
387 | result = (u0 == u1); | |
388 | break; | |
389 | case TCG_COND_NE: | |
390 | result = (u0 != u1); | |
391 | break; | |
392 | case TCG_COND_LT: | |
393 | result = (i0 < i1); | |
394 | break; | |
395 | case TCG_COND_GE: | |
396 | result = (i0 >= i1); | |
397 | break; | |
398 | case TCG_COND_LE: | |
399 | result = (i0 <= i1); | |
400 | break; | |
401 | case TCG_COND_GT: | |
402 | result = (i0 > i1); | |
403 | break; | |
404 | case TCG_COND_LTU: | |
405 | result = (u0 < u1); | |
406 | break; | |
407 | case TCG_COND_GEU: | |
408 | result = (u0 >= u1); | |
409 | break; | |
410 | case TCG_COND_LEU: | |
411 | result = (u0 <= u1); | |
412 | break; | |
413 | case TCG_COND_GTU: | |
414 | result = (u0 > u1); | |
415 | break; | |
416 | default: | |
417 | TODO(); | |
418 | } | |
419 | return result; | |
420 | } | |
421 | ||
76782fab RH |
422 | #ifdef CONFIG_SOFTMMU |
423 | # define mmuidx tci_read_i(&tb_ptr) | |
424 | # define qemu_ld_ub \ | |
425 | helper_ret_ldub_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr) | |
426 | # define qemu_ld_leuw \ | |
427 | helper_le_lduw_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr) | |
428 | # define qemu_ld_leul \ | |
429 | helper_le_ldul_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr) | |
430 | # define qemu_ld_leq \ | |
431 | helper_le_ldq_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr) | |
432 | # define qemu_ld_beuw \ | |
433 | helper_be_lduw_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr) | |
434 | # define qemu_ld_beul \ | |
435 | helper_be_ldul_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr) | |
436 | # define qemu_ld_beq \ | |
437 | helper_be_ldq_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr) | |
438 | # define qemu_st_b(X) \ | |
439 | helper_ret_stb_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr) | |
440 | # define qemu_st_lew(X) \ | |
441 | helper_le_stw_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr) | |
442 | # define qemu_st_lel(X) \ | |
443 | helper_le_stl_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr) | |
444 | # define qemu_st_leq(X) \ | |
445 | helper_le_stq_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr) | |
446 | # define qemu_st_bew(X) \ | |
447 | helper_be_stw_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr) | |
448 | # define qemu_st_bel(X) \ | |
449 | helper_be_stl_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr) | |
450 | # define qemu_st_beq(X) \ | |
451 | helper_be_stq_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr) | |
452 | #else | |
453 | # define qemu_ld_ub ldub_p(g2h(taddr)) | |
454 | # define qemu_ld_leuw lduw_le_p(g2h(taddr)) | |
455 | # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr)) | |
456 | # define qemu_ld_leq ldq_le_p(g2h(taddr)) | |
457 | # define qemu_ld_beuw lduw_be_p(g2h(taddr)) | |
458 | # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr)) | |
459 | # define qemu_ld_beq ldq_be_p(g2h(taddr)) | |
460 | # define qemu_st_b(X) stb_p(g2h(taddr), X) | |
461 | # define qemu_st_lew(X) stw_le_p(g2h(taddr), X) | |
462 | # define qemu_st_lel(X) stl_le_p(g2h(taddr), X) | |
463 | # define qemu_st_leq(X) stq_le_p(g2h(taddr), X) | |
464 | # define qemu_st_bew(X) stw_be_p(g2h(taddr), X) | |
465 | # define qemu_st_bel(X) stl_be_p(g2h(taddr), X) | |
466 | # define qemu_st_beq(X) stq_be_p(g2h(taddr), X) | |
467 | #endif | |
468 | ||
7657f4bf | 469 | /* Interpret pseudo code in tb. */ |
04d5a1da | 470 | uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr) |
7657f4bf | 471 | { |
ee79c356 RH |
472 | long tcg_temps[CPU_TEMP_BUF_NLONGS]; |
473 | uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS); | |
04d5a1da | 474 | uintptr_t next_tb = 0; |
7657f4bf | 475 | |
7657f4bf | 476 | tci_reg[TCG_AREG0] = (tcg_target_ulong)env; |
ee79c356 | 477 | tci_reg[TCG_REG_CALL_STACK] = sp_value; |
7657f4bf SW |
478 | assert(tb_ptr); |
479 | ||
480 | for (;;) { | |
7657f4bf SW |
481 | TCGOpcode opc = tb_ptr[0]; |
482 | #if !defined(NDEBUG) | |
483 | uint8_t op_size = tb_ptr[1]; | |
484 | uint8_t *old_code_ptr = tb_ptr; | |
485 | #endif | |
486 | tcg_target_ulong t0; | |
487 | tcg_target_ulong t1; | |
488 | tcg_target_ulong t2; | |
489 | tcg_target_ulong label; | |
490 | TCGCond condition; | |
491 | target_ulong taddr; | |
7657f4bf SW |
492 | uint8_t tmp8; |
493 | uint16_t tmp16; | |
494 | uint32_t tmp32; | |
495 | uint64_t tmp64; | |
496 | #if TCG_TARGET_REG_BITS == 32 | |
497 | uint64_t v64; | |
498 | #endif | |
76782fab | 499 | TCGMemOp memop; |
7657f4bf | 500 | |
dea8fde8 RH |
501 | #if defined(GETPC) |
502 | tci_tb_ptr = (uintptr_t)tb_ptr; | |
503 | #endif | |
504 | ||
7657f4bf SW |
505 | /* Skip opcode and size entry. */ |
506 | tb_ptr += 2; | |
507 | ||
508 | switch (opc) { | |
7657f4bf SW |
509 | case INDEX_op_call: |
510 | t0 = tci_read_ri(&tb_ptr); | |
511 | #if TCG_TARGET_REG_BITS == 32 | |
512 | tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0), | |
513 | tci_read_reg(TCG_REG_R1), | |
514 | tci_read_reg(TCG_REG_R2), | |
515 | tci_read_reg(TCG_REG_R3), | |
516 | tci_read_reg(TCG_REG_R5), | |
517 | tci_read_reg(TCG_REG_R6), | |
518 | tci_read_reg(TCG_REG_R7), | |
6673f47d SW |
519 | tci_read_reg(TCG_REG_R8), |
520 | tci_read_reg(TCG_REG_R9), | |
521 | tci_read_reg(TCG_REG_R10)); | |
7657f4bf SW |
522 | tci_write_reg(TCG_REG_R0, tmp64); |
523 | tci_write_reg(TCG_REG_R1, tmp64 >> 32); | |
524 | #else | |
525 | tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0), | |
526 | tci_read_reg(TCG_REG_R1), | |
527 | tci_read_reg(TCG_REG_R2), | |
6673f47d SW |
528 | tci_read_reg(TCG_REG_R3), |
529 | tci_read_reg(TCG_REG_R5)); | |
7657f4bf SW |
530 | tci_write_reg(TCG_REG_R0, tmp64); |
531 | #endif | |
532 | break; | |
7657f4bf SW |
533 | case INDEX_op_br: |
534 | label = tci_read_label(&tb_ptr); | |
535 | assert(tb_ptr == old_code_ptr + op_size); | |
536 | tb_ptr = (uint8_t *)label; | |
537 | continue; | |
538 | case INDEX_op_setcond_i32: | |
539 | t0 = *tb_ptr++; | |
540 | t1 = tci_read_r32(&tb_ptr); | |
541 | t2 = tci_read_ri32(&tb_ptr); | |
542 | condition = *tb_ptr++; | |
543 | tci_write_reg32(t0, tci_compare32(t1, t2, condition)); | |
544 | break; | |
545 | #if TCG_TARGET_REG_BITS == 32 | |
546 | case INDEX_op_setcond2_i32: | |
547 | t0 = *tb_ptr++; | |
548 | tmp64 = tci_read_r64(&tb_ptr); | |
549 | v64 = tci_read_ri64(&tb_ptr); | |
550 | condition = *tb_ptr++; | |
551 | tci_write_reg32(t0, tci_compare64(tmp64, v64, condition)); | |
552 | break; | |
553 | #elif TCG_TARGET_REG_BITS == 64 | |
554 | case INDEX_op_setcond_i64: | |
555 | t0 = *tb_ptr++; | |
556 | t1 = tci_read_r64(&tb_ptr); | |
557 | t2 = tci_read_ri64(&tb_ptr); | |
558 | condition = *tb_ptr++; | |
559 | tci_write_reg64(t0, tci_compare64(t1, t2, condition)); | |
560 | break; | |
561 | #endif | |
562 | case INDEX_op_mov_i32: | |
563 | t0 = *tb_ptr++; | |
564 | t1 = tci_read_r32(&tb_ptr); | |
565 | tci_write_reg32(t0, t1); | |
566 | break; | |
567 | case INDEX_op_movi_i32: | |
568 | t0 = *tb_ptr++; | |
569 | t1 = tci_read_i32(&tb_ptr); | |
570 | tci_write_reg32(t0, t1); | |
571 | break; | |
572 | ||
573 | /* Load/store operations (32 bit). */ | |
574 | ||
575 | case INDEX_op_ld8u_i32: | |
576 | t0 = *tb_ptr++; | |
577 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 578 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
579 | tci_write_reg8(t0, *(uint8_t *)(t1 + t2)); |
580 | break; | |
581 | case INDEX_op_ld8s_i32: | |
582 | case INDEX_op_ld16u_i32: | |
583 | TODO(); | |
584 | break; | |
585 | case INDEX_op_ld16s_i32: | |
586 | TODO(); | |
587 | break; | |
588 | case INDEX_op_ld_i32: | |
589 | t0 = *tb_ptr++; | |
590 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 591 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
592 | tci_write_reg32(t0, *(uint32_t *)(t1 + t2)); |
593 | break; | |
594 | case INDEX_op_st8_i32: | |
595 | t0 = tci_read_r8(&tb_ptr); | |
596 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 597 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
598 | *(uint8_t *)(t1 + t2) = t0; |
599 | break; | |
600 | case INDEX_op_st16_i32: | |
601 | t0 = tci_read_r16(&tb_ptr); | |
602 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 603 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
604 | *(uint16_t *)(t1 + t2) = t0; |
605 | break; | |
606 | case INDEX_op_st_i32: | |
607 | t0 = tci_read_r32(&tb_ptr); | |
608 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 609 | t2 = tci_read_s32(&tb_ptr); |
ee79c356 | 610 | assert(t1 != sp_value || (int32_t)t2 < 0); |
7657f4bf SW |
611 | *(uint32_t *)(t1 + t2) = t0; |
612 | break; | |
613 | ||
614 | /* Arithmetic operations (32 bit). */ | |
615 | ||
616 | case INDEX_op_add_i32: | |
617 | t0 = *tb_ptr++; | |
618 | t1 = tci_read_ri32(&tb_ptr); | |
619 | t2 = tci_read_ri32(&tb_ptr); | |
620 | tci_write_reg32(t0, t1 + t2); | |
621 | break; | |
622 | case INDEX_op_sub_i32: | |
623 | t0 = *tb_ptr++; | |
624 | t1 = tci_read_ri32(&tb_ptr); | |
625 | t2 = tci_read_ri32(&tb_ptr); | |
626 | tci_write_reg32(t0, t1 - t2); | |
627 | break; | |
628 | case INDEX_op_mul_i32: | |
629 | t0 = *tb_ptr++; | |
630 | t1 = tci_read_ri32(&tb_ptr); | |
631 | t2 = tci_read_ri32(&tb_ptr); | |
632 | tci_write_reg32(t0, t1 * t2); | |
633 | break; | |
634 | #if TCG_TARGET_HAS_div_i32 | |
635 | case INDEX_op_div_i32: | |
636 | t0 = *tb_ptr++; | |
637 | t1 = tci_read_ri32(&tb_ptr); | |
638 | t2 = tci_read_ri32(&tb_ptr); | |
639 | tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2); | |
640 | break; | |
641 | case INDEX_op_divu_i32: | |
642 | t0 = *tb_ptr++; | |
643 | t1 = tci_read_ri32(&tb_ptr); | |
644 | t2 = tci_read_ri32(&tb_ptr); | |
645 | tci_write_reg32(t0, t1 / t2); | |
646 | break; | |
647 | case INDEX_op_rem_i32: | |
648 | t0 = *tb_ptr++; | |
649 | t1 = tci_read_ri32(&tb_ptr); | |
650 | t2 = tci_read_ri32(&tb_ptr); | |
651 | tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2); | |
652 | break; | |
653 | case INDEX_op_remu_i32: | |
654 | t0 = *tb_ptr++; | |
655 | t1 = tci_read_ri32(&tb_ptr); | |
656 | t2 = tci_read_ri32(&tb_ptr); | |
657 | tci_write_reg32(t0, t1 % t2); | |
658 | break; | |
659 | #elif TCG_TARGET_HAS_div2_i32 | |
660 | case INDEX_op_div2_i32: | |
661 | case INDEX_op_divu2_i32: | |
662 | TODO(); | |
663 | break; | |
664 | #endif | |
665 | case INDEX_op_and_i32: | |
666 | t0 = *tb_ptr++; | |
667 | t1 = tci_read_ri32(&tb_ptr); | |
668 | t2 = tci_read_ri32(&tb_ptr); | |
669 | tci_write_reg32(t0, t1 & t2); | |
670 | break; | |
671 | case INDEX_op_or_i32: | |
672 | t0 = *tb_ptr++; | |
673 | t1 = tci_read_ri32(&tb_ptr); | |
674 | t2 = tci_read_ri32(&tb_ptr); | |
675 | tci_write_reg32(t0, t1 | t2); | |
676 | break; | |
677 | case INDEX_op_xor_i32: | |
678 | t0 = *tb_ptr++; | |
679 | t1 = tci_read_ri32(&tb_ptr); | |
680 | t2 = tci_read_ri32(&tb_ptr); | |
681 | tci_write_reg32(t0, t1 ^ t2); | |
682 | break; | |
683 | ||
684 | /* Shift/rotate operations (32 bit). */ | |
685 | ||
686 | case INDEX_op_shl_i32: | |
687 | t0 = *tb_ptr++; | |
688 | t1 = tci_read_ri32(&tb_ptr); | |
689 | t2 = tci_read_ri32(&tb_ptr); | |
1976ccce | 690 | tci_write_reg32(t0, t1 << (t2 & 31)); |
7657f4bf SW |
691 | break; |
692 | case INDEX_op_shr_i32: | |
693 | t0 = *tb_ptr++; | |
694 | t1 = tci_read_ri32(&tb_ptr); | |
695 | t2 = tci_read_ri32(&tb_ptr); | |
1976ccce | 696 | tci_write_reg32(t0, t1 >> (t2 & 31)); |
7657f4bf SW |
697 | break; |
698 | case INDEX_op_sar_i32: | |
699 | t0 = *tb_ptr++; | |
700 | t1 = tci_read_ri32(&tb_ptr); | |
701 | t2 = tci_read_ri32(&tb_ptr); | |
1976ccce | 702 | tci_write_reg32(t0, ((int32_t)t1 >> (t2 & 31))); |
7657f4bf SW |
703 | break; |
704 | #if TCG_TARGET_HAS_rot_i32 | |
705 | case INDEX_op_rotl_i32: | |
706 | t0 = *tb_ptr++; | |
707 | t1 = tci_read_ri32(&tb_ptr); | |
708 | t2 = tci_read_ri32(&tb_ptr); | |
1976ccce | 709 | tci_write_reg32(t0, rol32(t1, t2 & 31)); |
7657f4bf SW |
710 | break; |
711 | case INDEX_op_rotr_i32: | |
712 | t0 = *tb_ptr++; | |
713 | t1 = tci_read_ri32(&tb_ptr); | |
714 | t2 = tci_read_ri32(&tb_ptr); | |
1976ccce | 715 | tci_write_reg32(t0, ror32(t1, t2 & 31)); |
7657f4bf | 716 | break; |
e24dc9fe SW |
717 | #endif |
718 | #if TCG_TARGET_HAS_deposit_i32 | |
719 | case INDEX_op_deposit_i32: | |
720 | t0 = *tb_ptr++; | |
721 | t1 = tci_read_r32(&tb_ptr); | |
722 | t2 = tci_read_r32(&tb_ptr); | |
723 | tmp16 = *tb_ptr++; | |
724 | tmp8 = *tb_ptr++; | |
725 | tmp32 = (((1 << tmp8) - 1) << tmp16); | |
726 | tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32)); | |
727 | break; | |
7657f4bf SW |
728 | #endif |
729 | case INDEX_op_brcond_i32: | |
730 | t0 = tci_read_r32(&tb_ptr); | |
731 | t1 = tci_read_ri32(&tb_ptr); | |
732 | condition = *tb_ptr++; | |
733 | label = tci_read_label(&tb_ptr); | |
734 | if (tci_compare32(t0, t1, condition)) { | |
735 | assert(tb_ptr == old_code_ptr + op_size); | |
736 | tb_ptr = (uint8_t *)label; | |
737 | continue; | |
738 | } | |
739 | break; | |
740 | #if TCG_TARGET_REG_BITS == 32 | |
741 | case INDEX_op_add2_i32: | |
742 | t0 = *tb_ptr++; | |
743 | t1 = *tb_ptr++; | |
744 | tmp64 = tci_read_r64(&tb_ptr); | |
745 | tmp64 += tci_read_r64(&tb_ptr); | |
746 | tci_write_reg64(t1, t0, tmp64); | |
747 | break; | |
748 | case INDEX_op_sub2_i32: | |
749 | t0 = *tb_ptr++; | |
750 | t1 = *tb_ptr++; | |
751 | tmp64 = tci_read_r64(&tb_ptr); | |
752 | tmp64 -= tci_read_r64(&tb_ptr); | |
753 | tci_write_reg64(t1, t0, tmp64); | |
754 | break; | |
755 | case INDEX_op_brcond2_i32: | |
756 | tmp64 = tci_read_r64(&tb_ptr); | |
757 | v64 = tci_read_ri64(&tb_ptr); | |
758 | condition = *tb_ptr++; | |
759 | label = tci_read_label(&tb_ptr); | |
760 | if (tci_compare64(tmp64, v64, condition)) { | |
761 | assert(tb_ptr == old_code_ptr + op_size); | |
762 | tb_ptr = (uint8_t *)label; | |
763 | continue; | |
764 | } | |
765 | break; | |
766 | case INDEX_op_mulu2_i32: | |
767 | t0 = *tb_ptr++; | |
768 | t1 = *tb_ptr++; | |
769 | t2 = tci_read_r32(&tb_ptr); | |
770 | tmp64 = tci_read_r32(&tb_ptr); | |
771 | tci_write_reg64(t1, t0, t2 * tmp64); | |
772 | break; | |
773 | #endif /* TCG_TARGET_REG_BITS == 32 */ | |
774 | #if TCG_TARGET_HAS_ext8s_i32 | |
775 | case INDEX_op_ext8s_i32: | |
776 | t0 = *tb_ptr++; | |
777 | t1 = tci_read_r8s(&tb_ptr); | |
778 | tci_write_reg32(t0, t1); | |
779 | break; | |
780 | #endif | |
781 | #if TCG_TARGET_HAS_ext16s_i32 | |
782 | case INDEX_op_ext16s_i32: | |
783 | t0 = *tb_ptr++; | |
784 | t1 = tci_read_r16s(&tb_ptr); | |
785 | tci_write_reg32(t0, t1); | |
786 | break; | |
787 | #endif | |
788 | #if TCG_TARGET_HAS_ext8u_i32 | |
789 | case INDEX_op_ext8u_i32: | |
790 | t0 = *tb_ptr++; | |
791 | t1 = tci_read_r8(&tb_ptr); | |
792 | tci_write_reg32(t0, t1); | |
793 | break; | |
794 | #endif | |
795 | #if TCG_TARGET_HAS_ext16u_i32 | |
796 | case INDEX_op_ext16u_i32: | |
797 | t0 = *tb_ptr++; | |
798 | t1 = tci_read_r16(&tb_ptr); | |
799 | tci_write_reg32(t0, t1); | |
800 | break; | |
801 | #endif | |
802 | #if TCG_TARGET_HAS_bswap16_i32 | |
803 | case INDEX_op_bswap16_i32: | |
804 | t0 = *tb_ptr++; | |
805 | t1 = tci_read_r16(&tb_ptr); | |
806 | tci_write_reg32(t0, bswap16(t1)); | |
807 | break; | |
808 | #endif | |
809 | #if TCG_TARGET_HAS_bswap32_i32 | |
810 | case INDEX_op_bswap32_i32: | |
811 | t0 = *tb_ptr++; | |
812 | t1 = tci_read_r32(&tb_ptr); | |
813 | tci_write_reg32(t0, bswap32(t1)); | |
814 | break; | |
815 | #endif | |
816 | #if TCG_TARGET_HAS_not_i32 | |
817 | case INDEX_op_not_i32: | |
818 | t0 = *tb_ptr++; | |
819 | t1 = tci_read_r32(&tb_ptr); | |
820 | tci_write_reg32(t0, ~t1); | |
821 | break; | |
822 | #endif | |
823 | #if TCG_TARGET_HAS_neg_i32 | |
824 | case INDEX_op_neg_i32: | |
825 | t0 = *tb_ptr++; | |
826 | t1 = tci_read_r32(&tb_ptr); | |
827 | tci_write_reg32(t0, -t1); | |
828 | break; | |
829 | #endif | |
830 | #if TCG_TARGET_REG_BITS == 64 | |
831 | case INDEX_op_mov_i64: | |
832 | t0 = *tb_ptr++; | |
833 | t1 = tci_read_r64(&tb_ptr); | |
834 | tci_write_reg64(t0, t1); | |
835 | break; | |
836 | case INDEX_op_movi_i64: | |
837 | t0 = *tb_ptr++; | |
838 | t1 = tci_read_i64(&tb_ptr); | |
839 | tci_write_reg64(t0, t1); | |
840 | break; | |
841 | ||
842 | /* Load/store operations (64 bit). */ | |
843 | ||
844 | case INDEX_op_ld8u_i64: | |
845 | t0 = *tb_ptr++; | |
846 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 847 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
848 | tci_write_reg8(t0, *(uint8_t *)(t1 + t2)); |
849 | break; | |
850 | case INDEX_op_ld8s_i64: | |
851 | case INDEX_op_ld16u_i64: | |
852 | case INDEX_op_ld16s_i64: | |
853 | TODO(); | |
854 | break; | |
855 | case INDEX_op_ld32u_i64: | |
856 | t0 = *tb_ptr++; | |
857 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 858 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
859 | tci_write_reg32(t0, *(uint32_t *)(t1 + t2)); |
860 | break; | |
861 | case INDEX_op_ld32s_i64: | |
862 | t0 = *tb_ptr++; | |
863 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 864 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
865 | tci_write_reg32s(t0, *(int32_t *)(t1 + t2)); |
866 | break; | |
867 | case INDEX_op_ld_i64: | |
868 | t0 = *tb_ptr++; | |
869 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 870 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
871 | tci_write_reg64(t0, *(uint64_t *)(t1 + t2)); |
872 | break; | |
873 | case INDEX_op_st8_i64: | |
874 | t0 = tci_read_r8(&tb_ptr); | |
875 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 876 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
877 | *(uint8_t *)(t1 + t2) = t0; |
878 | break; | |
879 | case INDEX_op_st16_i64: | |
880 | t0 = tci_read_r16(&tb_ptr); | |
881 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 882 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
883 | *(uint16_t *)(t1 + t2) = t0; |
884 | break; | |
885 | case INDEX_op_st32_i64: | |
886 | t0 = tci_read_r32(&tb_ptr); | |
887 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 888 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
889 | *(uint32_t *)(t1 + t2) = t0; |
890 | break; | |
891 | case INDEX_op_st_i64: | |
892 | t0 = tci_read_r64(&tb_ptr); | |
893 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 894 | t2 = tci_read_s32(&tb_ptr); |
ee79c356 | 895 | assert(t1 != sp_value || (int32_t)t2 < 0); |
7657f4bf SW |
896 | *(uint64_t *)(t1 + t2) = t0; |
897 | break; | |
898 | ||
899 | /* Arithmetic operations (64 bit). */ | |
900 | ||
901 | case INDEX_op_add_i64: | |
902 | t0 = *tb_ptr++; | |
903 | t1 = tci_read_ri64(&tb_ptr); | |
904 | t2 = tci_read_ri64(&tb_ptr); | |
905 | tci_write_reg64(t0, t1 + t2); | |
906 | break; | |
907 | case INDEX_op_sub_i64: | |
908 | t0 = *tb_ptr++; | |
909 | t1 = tci_read_ri64(&tb_ptr); | |
910 | t2 = tci_read_ri64(&tb_ptr); | |
911 | tci_write_reg64(t0, t1 - t2); | |
912 | break; | |
913 | case INDEX_op_mul_i64: | |
914 | t0 = *tb_ptr++; | |
915 | t1 = tci_read_ri64(&tb_ptr); | |
916 | t2 = tci_read_ri64(&tb_ptr); | |
917 | tci_write_reg64(t0, t1 * t2); | |
918 | break; | |
919 | #if TCG_TARGET_HAS_div_i64 | |
920 | case INDEX_op_div_i64: | |
921 | case INDEX_op_divu_i64: | |
922 | case INDEX_op_rem_i64: | |
923 | case INDEX_op_remu_i64: | |
924 | TODO(); | |
925 | break; | |
926 | #elif TCG_TARGET_HAS_div2_i64 | |
927 | case INDEX_op_div2_i64: | |
928 | case INDEX_op_divu2_i64: | |
929 | TODO(); | |
930 | break; | |
931 | #endif | |
932 | case INDEX_op_and_i64: | |
933 | t0 = *tb_ptr++; | |
934 | t1 = tci_read_ri64(&tb_ptr); | |
935 | t2 = tci_read_ri64(&tb_ptr); | |
936 | tci_write_reg64(t0, t1 & t2); | |
937 | break; | |
938 | case INDEX_op_or_i64: | |
939 | t0 = *tb_ptr++; | |
940 | t1 = tci_read_ri64(&tb_ptr); | |
941 | t2 = tci_read_ri64(&tb_ptr); | |
942 | tci_write_reg64(t0, t1 | t2); | |
943 | break; | |
944 | case INDEX_op_xor_i64: | |
945 | t0 = *tb_ptr++; | |
946 | t1 = tci_read_ri64(&tb_ptr); | |
947 | t2 = tci_read_ri64(&tb_ptr); | |
948 | tci_write_reg64(t0, t1 ^ t2); | |
949 | break; | |
950 | ||
951 | /* Shift/rotate operations (64 bit). */ | |
952 | ||
953 | case INDEX_op_shl_i64: | |
954 | t0 = *tb_ptr++; | |
955 | t1 = tci_read_ri64(&tb_ptr); | |
956 | t2 = tci_read_ri64(&tb_ptr); | |
1976ccce | 957 | tci_write_reg64(t0, t1 << (t2 & 63)); |
7657f4bf SW |
958 | break; |
959 | case INDEX_op_shr_i64: | |
960 | t0 = *tb_ptr++; | |
961 | t1 = tci_read_ri64(&tb_ptr); | |
962 | t2 = tci_read_ri64(&tb_ptr); | |
1976ccce | 963 | tci_write_reg64(t0, t1 >> (t2 & 63)); |
7657f4bf SW |
964 | break; |
965 | case INDEX_op_sar_i64: | |
966 | t0 = *tb_ptr++; | |
967 | t1 = tci_read_ri64(&tb_ptr); | |
968 | t2 = tci_read_ri64(&tb_ptr); | |
1976ccce | 969 | tci_write_reg64(t0, ((int64_t)t1 >> (t2 & 63))); |
7657f4bf SW |
970 | break; |
971 | #if TCG_TARGET_HAS_rot_i64 | |
972 | case INDEX_op_rotl_i64: | |
d285bf78 SW |
973 | t0 = *tb_ptr++; |
974 | t1 = tci_read_ri64(&tb_ptr); | |
975 | t2 = tci_read_ri64(&tb_ptr); | |
1976ccce | 976 | tci_write_reg64(t0, rol64(t1, t2 & 63)); |
d285bf78 | 977 | break; |
7657f4bf | 978 | case INDEX_op_rotr_i64: |
d285bf78 SW |
979 | t0 = *tb_ptr++; |
980 | t1 = tci_read_ri64(&tb_ptr); | |
981 | t2 = tci_read_ri64(&tb_ptr); | |
1976ccce | 982 | tci_write_reg64(t0, ror64(t1, t2 & 63)); |
7657f4bf | 983 | break; |
e24dc9fe SW |
984 | #endif |
985 | #if TCG_TARGET_HAS_deposit_i64 | |
986 | case INDEX_op_deposit_i64: | |
987 | t0 = *tb_ptr++; | |
988 | t1 = tci_read_r64(&tb_ptr); | |
989 | t2 = tci_read_r64(&tb_ptr); | |
990 | tmp16 = *tb_ptr++; | |
991 | tmp8 = *tb_ptr++; | |
992 | tmp64 = (((1ULL << tmp8) - 1) << tmp16); | |
993 | tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64)); | |
994 | break; | |
7657f4bf SW |
995 | #endif |
996 | case INDEX_op_brcond_i64: | |
997 | t0 = tci_read_r64(&tb_ptr); | |
998 | t1 = tci_read_ri64(&tb_ptr); | |
999 | condition = *tb_ptr++; | |
1000 | label = tci_read_label(&tb_ptr); | |
1001 | if (tci_compare64(t0, t1, condition)) { | |
1002 | assert(tb_ptr == old_code_ptr + op_size); | |
1003 | tb_ptr = (uint8_t *)label; | |
1004 | continue; | |
1005 | } | |
1006 | break; | |
1007 | #if TCG_TARGET_HAS_ext8u_i64 | |
1008 | case INDEX_op_ext8u_i64: | |
1009 | t0 = *tb_ptr++; | |
1010 | t1 = tci_read_r8(&tb_ptr); | |
1011 | tci_write_reg64(t0, t1); | |
1012 | break; | |
1013 | #endif | |
1014 | #if TCG_TARGET_HAS_ext8s_i64 | |
1015 | case INDEX_op_ext8s_i64: | |
1016 | t0 = *tb_ptr++; | |
1017 | t1 = tci_read_r8s(&tb_ptr); | |
1018 | tci_write_reg64(t0, t1); | |
1019 | break; | |
1020 | #endif | |
1021 | #if TCG_TARGET_HAS_ext16s_i64 | |
1022 | case INDEX_op_ext16s_i64: | |
1023 | t0 = *tb_ptr++; | |
1024 | t1 = tci_read_r16s(&tb_ptr); | |
1025 | tci_write_reg64(t0, t1); | |
1026 | break; | |
1027 | #endif | |
1028 | #if TCG_TARGET_HAS_ext16u_i64 | |
1029 | case INDEX_op_ext16u_i64: | |
1030 | t0 = *tb_ptr++; | |
1031 | t1 = tci_read_r16(&tb_ptr); | |
1032 | tci_write_reg64(t0, t1); | |
1033 | break; | |
1034 | #endif | |
1035 | #if TCG_TARGET_HAS_ext32s_i64 | |
1036 | case INDEX_op_ext32s_i64: | |
1037 | t0 = *tb_ptr++; | |
1038 | t1 = tci_read_r32s(&tb_ptr); | |
1039 | tci_write_reg64(t0, t1); | |
1040 | break; | |
1041 | #endif | |
1042 | #if TCG_TARGET_HAS_ext32u_i64 | |
1043 | case INDEX_op_ext32u_i64: | |
1044 | t0 = *tb_ptr++; | |
1045 | t1 = tci_read_r32(&tb_ptr); | |
1046 | tci_write_reg64(t0, t1); | |
1047 | break; | |
1048 | #endif | |
1049 | #if TCG_TARGET_HAS_bswap16_i64 | |
1050 | case INDEX_op_bswap16_i64: | |
1051 | TODO(); | |
1052 | t0 = *tb_ptr++; | |
1053 | t1 = tci_read_r16(&tb_ptr); | |
1054 | tci_write_reg64(t0, bswap16(t1)); | |
1055 | break; | |
1056 | #endif | |
1057 | #if TCG_TARGET_HAS_bswap32_i64 | |
1058 | case INDEX_op_bswap32_i64: | |
1059 | t0 = *tb_ptr++; | |
1060 | t1 = tci_read_r32(&tb_ptr); | |
1061 | tci_write_reg64(t0, bswap32(t1)); | |
1062 | break; | |
1063 | #endif | |
1064 | #if TCG_TARGET_HAS_bswap64_i64 | |
1065 | case INDEX_op_bswap64_i64: | |
7657f4bf SW |
1066 | t0 = *tb_ptr++; |
1067 | t1 = tci_read_r64(&tb_ptr); | |
1068 | tci_write_reg64(t0, bswap64(t1)); | |
1069 | break; | |
1070 | #endif | |
1071 | #if TCG_TARGET_HAS_not_i64 | |
1072 | case INDEX_op_not_i64: | |
1073 | t0 = *tb_ptr++; | |
1074 | t1 = tci_read_r64(&tb_ptr); | |
1075 | tci_write_reg64(t0, ~t1); | |
1076 | break; | |
1077 | #endif | |
1078 | #if TCG_TARGET_HAS_neg_i64 | |
1079 | case INDEX_op_neg_i64: | |
1080 | t0 = *tb_ptr++; | |
1081 | t1 = tci_read_r64(&tb_ptr); | |
1082 | tci_write_reg64(t0, -t1); | |
1083 | break; | |
1084 | #endif | |
1085 | #endif /* TCG_TARGET_REG_BITS == 64 */ | |
1086 | ||
1087 | /* QEMU specific operations. */ | |
1088 | ||
1089 | #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS | |
1090 | case INDEX_op_debug_insn_start: | |
1091 | TODO(); | |
1092 | break; | |
1093 | #else | |
1094 | case INDEX_op_debug_insn_start: | |
1095 | TODO(); | |
1096 | break; | |
1097 | #endif | |
1098 | case INDEX_op_exit_tb: | |
1099 | next_tb = *(uint64_t *)tb_ptr; | |
1100 | goto exit; | |
1101 | break; | |
1102 | case INDEX_op_goto_tb: | |
1103 | t0 = tci_read_i32(&tb_ptr); | |
1104 | assert(tb_ptr == old_code_ptr + op_size); | |
1105 | tb_ptr += (int32_t)t0; | |
1106 | continue; | |
76782fab | 1107 | case INDEX_op_qemu_ld_i32: |
7657f4bf SW |
1108 | t0 = *tb_ptr++; |
1109 | taddr = tci_read_ulong(&tb_ptr); | |
76782fab RH |
1110 | memop = tci_read_i(&tb_ptr); |
1111 | switch (memop) { | |
1112 | case MO_UB: | |
1113 | tmp32 = qemu_ld_ub; | |
1114 | break; | |
1115 | case MO_SB: | |
1116 | tmp32 = (int8_t)qemu_ld_ub; | |
1117 | break; | |
1118 | case MO_LEUW: | |
1119 | tmp32 = qemu_ld_leuw; | |
1120 | break; | |
1121 | case MO_LESW: | |
1122 | tmp32 = (int16_t)qemu_ld_leuw; | |
1123 | break; | |
1124 | case MO_LEUL: | |
1125 | tmp32 = qemu_ld_leul; | |
1126 | break; | |
1127 | case MO_BEUW: | |
1128 | tmp32 = qemu_ld_beuw; | |
1129 | break; | |
1130 | case MO_BESW: | |
1131 | tmp32 = (int16_t)qemu_ld_beuw; | |
1132 | break; | |
1133 | case MO_BEUL: | |
1134 | tmp32 = qemu_ld_beul; | |
1135 | break; | |
1136 | default: | |
1137 | tcg_abort(); | |
1138 | } | |
1139 | tci_write_reg(t0, tmp32); | |
7657f4bf | 1140 | break; |
76782fab | 1141 | case INDEX_op_qemu_ld_i64: |
7657f4bf | 1142 | t0 = *tb_ptr++; |
76782fab RH |
1143 | if (TCG_TARGET_REG_BITS == 32) { |
1144 | t1 = *tb_ptr++; | |
1145 | } | |
7657f4bf | 1146 | taddr = tci_read_ulong(&tb_ptr); |
76782fab RH |
1147 | memop = tci_read_i(&tb_ptr); |
1148 | switch (memop) { | |
1149 | case MO_UB: | |
1150 | tmp64 = qemu_ld_ub; | |
1151 | break; | |
1152 | case MO_SB: | |
1153 | tmp64 = (int8_t)qemu_ld_ub; | |
1154 | break; | |
1155 | case MO_LEUW: | |
1156 | tmp64 = qemu_ld_leuw; | |
1157 | break; | |
1158 | case MO_LESW: | |
1159 | tmp64 = (int16_t)qemu_ld_leuw; | |
1160 | break; | |
1161 | case MO_LEUL: | |
1162 | tmp64 = qemu_ld_leul; | |
1163 | break; | |
1164 | case MO_LESL: | |
1165 | tmp64 = (int32_t)qemu_ld_leul; | |
1166 | break; | |
1167 | case MO_LEQ: | |
1168 | tmp64 = qemu_ld_leq; | |
1169 | break; | |
1170 | case MO_BEUW: | |
1171 | tmp64 = qemu_ld_beuw; | |
1172 | break; | |
1173 | case MO_BESW: | |
1174 | tmp64 = (int16_t)qemu_ld_beuw; | |
1175 | break; | |
1176 | case MO_BEUL: | |
1177 | tmp64 = qemu_ld_beul; | |
1178 | break; | |
1179 | case MO_BESL: | |
1180 | tmp64 = (int32_t)qemu_ld_beul; | |
1181 | break; | |
1182 | case MO_BEQ: | |
1183 | tmp64 = qemu_ld_beq; | |
1184 | break; | |
1185 | default: | |
1186 | tcg_abort(); | |
1187 | } | |
7657f4bf | 1188 | tci_write_reg(t0, tmp64); |
76782fab RH |
1189 | if (TCG_TARGET_REG_BITS == 32) { |
1190 | tci_write_reg(t1, tmp64 >> 32); | |
1191 | } | |
7657f4bf | 1192 | break; |
76782fab RH |
1193 | case INDEX_op_qemu_st_i32: |
1194 | t0 = tci_read_r(&tb_ptr); | |
7657f4bf | 1195 | taddr = tci_read_ulong(&tb_ptr); |
76782fab RH |
1196 | memop = tci_read_i(&tb_ptr); |
1197 | switch (memop) { | |
1198 | case MO_UB: | |
1199 | qemu_st_b(t0); | |
1200 | break; | |
1201 | case MO_LEUW: | |
1202 | qemu_st_lew(t0); | |
1203 | break; | |
1204 | case MO_LEUL: | |
1205 | qemu_st_lel(t0); | |
1206 | break; | |
1207 | case MO_BEUW: | |
1208 | qemu_st_bew(t0); | |
1209 | break; | |
1210 | case MO_BEUL: | |
1211 | qemu_st_bel(t0); | |
1212 | break; | |
1213 | default: | |
1214 | tcg_abort(); | |
1215 | } | |
7657f4bf | 1216 | break; |
76782fab | 1217 | case INDEX_op_qemu_st_i64: |
7657f4bf SW |
1218 | tmp64 = tci_read_r64(&tb_ptr); |
1219 | taddr = tci_read_ulong(&tb_ptr); | |
76782fab RH |
1220 | memop = tci_read_i(&tb_ptr); |
1221 | switch (memop) { | |
1222 | case MO_UB: | |
1223 | qemu_st_b(tmp64); | |
1224 | break; | |
1225 | case MO_LEUW: | |
1226 | qemu_st_lew(tmp64); | |
1227 | break; | |
1228 | case MO_LEUL: | |
1229 | qemu_st_lel(tmp64); | |
1230 | break; | |
1231 | case MO_LEQ: | |
1232 | qemu_st_leq(tmp64); | |
1233 | break; | |
1234 | case MO_BEUW: | |
1235 | qemu_st_bew(tmp64); | |
1236 | break; | |
1237 | case MO_BEUL: | |
1238 | qemu_st_bel(tmp64); | |
1239 | break; | |
1240 | case MO_BEQ: | |
1241 | qemu_st_beq(tmp64); | |
1242 | break; | |
1243 | default: | |
1244 | tcg_abort(); | |
1245 | } | |
7657f4bf SW |
1246 | break; |
1247 | default: | |
1248 | TODO(); | |
1249 | break; | |
1250 | } | |
1251 | assert(tb_ptr == old_code_ptr + op_size); | |
1252 | } | |
1253 | exit: | |
1254 | return next_tb; | |
1255 | } |