]>
Commit | Line | Data |
---|---|---|
7657f4bf SW |
1 | /* |
2 | * Tiny Code Interpreter for QEMU | |
3 | * | |
4 | * Copyright (c) 2009, 2011 Stefan Weil | |
5 | * | |
6 | * This program is free software: you can redistribute it and/or modify | |
7 | * it under the terms of the GNU General Public License as published by | |
8 | * the Free Software Foundation, either version 2 of the License, or | |
9 | * (at your option) any later version. | |
10 | * | |
11 | * This program is distributed in the hope that it will be useful, | |
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | * GNU General Public License for more details. | |
15 | * | |
16 | * You should have received a copy of the GNU General Public License | |
17 | * along with this program. If not, see <http://www.gnu.org/licenses/>. | |
18 | */ | |
19 | ||
20 | #include "config.h" | |
21 | ||
22 | /* Defining NDEBUG disables assertions (which makes the code faster). */ | |
17904bcf | 23 | #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG) |
7657f4bf SW |
24 | # define NDEBUG |
25 | #endif | |
26 | ||
27 | #include "qemu-common.h" | |
022c62cb | 28 | #include "exec/exec-all.h" /* MAX_OPC_PARAM_IARGS */ |
7657f4bf SW |
29 | #include "tcg-op.h" |
30 | ||
31 | /* Marker for missing code. */ | |
32 | #define TODO() \ | |
33 | do { \ | |
34 | fprintf(stderr, "TODO %s:%u: %s()\n", \ | |
35 | __FILE__, __LINE__, __func__); \ | |
36 | tcg_abort(); \ | |
37 | } while (0) | |
38 | ||
6673f47d | 39 | #if MAX_OPC_PARAM_IARGS != 5 |
7657f4bf SW |
40 | # error Fix needed, number of supported input arguments changed! |
41 | #endif | |
42 | #if TCG_TARGET_REG_BITS == 32 | |
43 | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, | |
6673f47d | 44 | tcg_target_ulong, tcg_target_ulong, |
7657f4bf SW |
45 | tcg_target_ulong, tcg_target_ulong, |
46 | tcg_target_ulong, tcg_target_ulong, | |
47 | tcg_target_ulong, tcg_target_ulong); | |
48 | #else | |
49 | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, | |
6673f47d SW |
50 | tcg_target_ulong, tcg_target_ulong, |
51 | tcg_target_ulong); | |
7657f4bf SW |
52 | #endif |
53 | ||
7657f4bf SW |
54 | /* Targets which don't use GETPC also don't need tci_tb_ptr |
55 | which makes them a little faster. */ | |
56 | #if defined(GETPC) | |
c3ca0467 | 57 | uintptr_t tci_tb_ptr; |
7657f4bf SW |
58 | #endif |
59 | ||
60 | static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS]; | |
61 | ||
771142c2 | 62 | static tcg_target_ulong tci_read_reg(TCGReg index) |
7657f4bf SW |
63 | { |
64 | assert(index < ARRAY_SIZE(tci_reg)); | |
65 | return tci_reg[index]; | |
66 | } | |
67 | ||
68 | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 | |
771142c2 | 69 | static int8_t tci_read_reg8s(TCGReg index) |
7657f4bf SW |
70 | { |
71 | return (int8_t)tci_read_reg(index); | |
72 | } | |
73 | #endif | |
74 | ||
75 | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 | |
771142c2 | 76 | static int16_t tci_read_reg16s(TCGReg index) |
7657f4bf SW |
77 | { |
78 | return (int16_t)tci_read_reg(index); | |
79 | } | |
80 | #endif | |
81 | ||
82 | #if TCG_TARGET_REG_BITS == 64 | |
771142c2 | 83 | static int32_t tci_read_reg32s(TCGReg index) |
7657f4bf SW |
84 | { |
85 | return (int32_t)tci_read_reg(index); | |
86 | } | |
87 | #endif | |
88 | ||
771142c2 | 89 | static uint8_t tci_read_reg8(TCGReg index) |
7657f4bf SW |
90 | { |
91 | return (uint8_t)tci_read_reg(index); | |
92 | } | |
93 | ||
771142c2 | 94 | static uint16_t tci_read_reg16(TCGReg index) |
7657f4bf SW |
95 | { |
96 | return (uint16_t)tci_read_reg(index); | |
97 | } | |
98 | ||
771142c2 | 99 | static uint32_t tci_read_reg32(TCGReg index) |
7657f4bf SW |
100 | { |
101 | return (uint32_t)tci_read_reg(index); | |
102 | } | |
103 | ||
104 | #if TCG_TARGET_REG_BITS == 64 | |
771142c2 | 105 | static uint64_t tci_read_reg64(TCGReg index) |
7657f4bf SW |
106 | { |
107 | return tci_read_reg(index); | |
108 | } | |
109 | #endif | |
110 | ||
771142c2 | 111 | static void tci_write_reg(TCGReg index, tcg_target_ulong value) |
7657f4bf SW |
112 | { |
113 | assert(index < ARRAY_SIZE(tci_reg)); | |
114 | assert(index != TCG_AREG0); | |
ee79c356 | 115 | assert(index != TCG_REG_CALL_STACK); |
7657f4bf SW |
116 | tci_reg[index] = value; |
117 | } | |
118 | ||
771142c2 | 119 | static void tci_write_reg8s(TCGReg index, int8_t value) |
7657f4bf SW |
120 | { |
121 | tci_write_reg(index, value); | |
122 | } | |
123 | ||
771142c2 | 124 | static void tci_write_reg16s(TCGReg index, int16_t value) |
7657f4bf SW |
125 | { |
126 | tci_write_reg(index, value); | |
127 | } | |
128 | ||
129 | #if TCG_TARGET_REG_BITS == 64 | |
771142c2 | 130 | static void tci_write_reg32s(TCGReg index, int32_t value) |
7657f4bf SW |
131 | { |
132 | tci_write_reg(index, value); | |
133 | } | |
134 | #endif | |
135 | ||
771142c2 | 136 | static void tci_write_reg8(TCGReg index, uint8_t value) |
7657f4bf SW |
137 | { |
138 | tci_write_reg(index, value); | |
139 | } | |
140 | ||
771142c2 | 141 | static void tci_write_reg16(TCGReg index, uint16_t value) |
7657f4bf SW |
142 | { |
143 | tci_write_reg(index, value); | |
144 | } | |
145 | ||
771142c2 | 146 | static void tci_write_reg32(TCGReg index, uint32_t value) |
7657f4bf SW |
147 | { |
148 | tci_write_reg(index, value); | |
149 | } | |
150 | ||
151 | #if TCG_TARGET_REG_BITS == 32 | |
152 | static void tci_write_reg64(uint32_t high_index, uint32_t low_index, | |
153 | uint64_t value) | |
154 | { | |
155 | tci_write_reg(low_index, value); | |
156 | tci_write_reg(high_index, value >> 32); | |
157 | } | |
158 | #elif TCG_TARGET_REG_BITS == 64 | |
771142c2 | 159 | static void tci_write_reg64(TCGReg index, uint64_t value) |
7657f4bf SW |
160 | { |
161 | tci_write_reg(index, value); | |
162 | } | |
163 | #endif | |
164 | ||
165 | #if TCG_TARGET_REG_BITS == 32 | |
166 | /* Create a 64 bit value from two 32 bit values. */ | |
167 | static uint64_t tci_uint64(uint32_t high, uint32_t low) | |
168 | { | |
169 | return ((uint64_t)high << 32) + low; | |
170 | } | |
171 | #endif | |
172 | ||
173 | /* Read constant (native size) from bytecode. */ | |
174 | static tcg_target_ulong tci_read_i(uint8_t **tb_ptr) | |
175 | { | |
176 | tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr); | |
177 | *tb_ptr += sizeof(value); | |
178 | return value; | |
179 | } | |
180 | ||
03fc0548 | 181 | /* Read unsigned constant (32 bit) from bytecode. */ |
7657f4bf SW |
182 | static uint32_t tci_read_i32(uint8_t **tb_ptr) |
183 | { | |
184 | uint32_t value = *(uint32_t *)(*tb_ptr); | |
185 | *tb_ptr += sizeof(value); | |
186 | return value; | |
187 | } | |
188 | ||
03fc0548 RH |
189 | /* Read signed constant (32 bit) from bytecode. */ |
190 | static int32_t tci_read_s32(uint8_t **tb_ptr) | |
191 | { | |
192 | int32_t value = *(int32_t *)(*tb_ptr); | |
193 | *tb_ptr += sizeof(value); | |
194 | return value; | |
195 | } | |
196 | ||
7657f4bf SW |
197 | #if TCG_TARGET_REG_BITS == 64 |
198 | /* Read constant (64 bit) from bytecode. */ | |
199 | static uint64_t tci_read_i64(uint8_t **tb_ptr) | |
200 | { | |
201 | uint64_t value = *(uint64_t *)(*tb_ptr); | |
202 | *tb_ptr += sizeof(value); | |
203 | return value; | |
204 | } | |
205 | #endif | |
206 | ||
207 | /* Read indexed register (native size) from bytecode. */ | |
208 | static tcg_target_ulong tci_read_r(uint8_t **tb_ptr) | |
209 | { | |
210 | tcg_target_ulong value = tci_read_reg(**tb_ptr); | |
211 | *tb_ptr += 1; | |
212 | return value; | |
213 | } | |
214 | ||
215 | /* Read indexed register (8 bit) from bytecode. */ | |
216 | static uint8_t tci_read_r8(uint8_t **tb_ptr) | |
217 | { | |
218 | uint8_t value = tci_read_reg8(**tb_ptr); | |
219 | *tb_ptr += 1; | |
220 | return value; | |
221 | } | |
222 | ||
223 | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 | |
224 | /* Read indexed register (8 bit signed) from bytecode. */ | |
225 | static int8_t tci_read_r8s(uint8_t **tb_ptr) | |
226 | { | |
227 | int8_t value = tci_read_reg8s(**tb_ptr); | |
228 | *tb_ptr += 1; | |
229 | return value; | |
230 | } | |
231 | #endif | |
232 | ||
233 | /* Read indexed register (16 bit) from bytecode. */ | |
234 | static uint16_t tci_read_r16(uint8_t **tb_ptr) | |
235 | { | |
236 | uint16_t value = tci_read_reg16(**tb_ptr); | |
237 | *tb_ptr += 1; | |
238 | return value; | |
239 | } | |
240 | ||
241 | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 | |
242 | /* Read indexed register (16 bit signed) from bytecode. */ | |
243 | static int16_t tci_read_r16s(uint8_t **tb_ptr) | |
244 | { | |
245 | int16_t value = tci_read_reg16s(**tb_ptr); | |
246 | *tb_ptr += 1; | |
247 | return value; | |
248 | } | |
249 | #endif | |
250 | ||
251 | /* Read indexed register (32 bit) from bytecode. */ | |
252 | static uint32_t tci_read_r32(uint8_t **tb_ptr) | |
253 | { | |
254 | uint32_t value = tci_read_reg32(**tb_ptr); | |
255 | *tb_ptr += 1; | |
256 | return value; | |
257 | } | |
258 | ||
259 | #if TCG_TARGET_REG_BITS == 32 | |
260 | /* Read two indexed registers (2 * 32 bit) from bytecode. */ | |
261 | static uint64_t tci_read_r64(uint8_t **tb_ptr) | |
262 | { | |
263 | uint32_t low = tci_read_r32(tb_ptr); | |
264 | return tci_uint64(tci_read_r32(tb_ptr), low); | |
265 | } | |
266 | #elif TCG_TARGET_REG_BITS == 64 | |
267 | /* Read indexed register (32 bit signed) from bytecode. */ | |
268 | static int32_t tci_read_r32s(uint8_t **tb_ptr) | |
269 | { | |
270 | int32_t value = tci_read_reg32s(**tb_ptr); | |
271 | *tb_ptr += 1; | |
272 | return value; | |
273 | } | |
274 | ||
275 | /* Read indexed register (64 bit) from bytecode. */ | |
276 | static uint64_t tci_read_r64(uint8_t **tb_ptr) | |
277 | { | |
278 | uint64_t value = tci_read_reg64(**tb_ptr); | |
279 | *tb_ptr += 1; | |
280 | return value; | |
281 | } | |
282 | #endif | |
283 | ||
284 | /* Read indexed register(s) with target address from bytecode. */ | |
285 | static target_ulong tci_read_ulong(uint8_t **tb_ptr) | |
286 | { | |
287 | target_ulong taddr = tci_read_r(tb_ptr); | |
288 | #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS | |
289 | taddr += (uint64_t)tci_read_r(tb_ptr) << 32; | |
290 | #endif | |
291 | return taddr; | |
292 | } | |
293 | ||
294 | /* Read indexed register or constant (native size) from bytecode. */ | |
295 | static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr) | |
296 | { | |
297 | tcg_target_ulong value; | |
771142c2 | 298 | TCGReg r = **tb_ptr; |
7657f4bf SW |
299 | *tb_ptr += 1; |
300 | if (r == TCG_CONST) { | |
301 | value = tci_read_i(tb_ptr); | |
302 | } else { | |
303 | value = tci_read_reg(r); | |
304 | } | |
305 | return value; | |
306 | } | |
307 | ||
308 | /* Read indexed register or constant (32 bit) from bytecode. */ | |
309 | static uint32_t tci_read_ri32(uint8_t **tb_ptr) | |
310 | { | |
311 | uint32_t value; | |
771142c2 | 312 | TCGReg r = **tb_ptr; |
7657f4bf SW |
313 | *tb_ptr += 1; |
314 | if (r == TCG_CONST) { | |
315 | value = tci_read_i32(tb_ptr); | |
316 | } else { | |
317 | value = tci_read_reg32(r); | |
318 | } | |
319 | return value; | |
320 | } | |
321 | ||
322 | #if TCG_TARGET_REG_BITS == 32 | |
323 | /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */ | |
324 | static uint64_t tci_read_ri64(uint8_t **tb_ptr) | |
325 | { | |
326 | uint32_t low = tci_read_ri32(tb_ptr); | |
327 | return tci_uint64(tci_read_ri32(tb_ptr), low); | |
328 | } | |
329 | #elif TCG_TARGET_REG_BITS == 64 | |
330 | /* Read indexed register or constant (64 bit) from bytecode. */ | |
331 | static uint64_t tci_read_ri64(uint8_t **tb_ptr) | |
332 | { | |
333 | uint64_t value; | |
771142c2 | 334 | TCGReg r = **tb_ptr; |
7657f4bf SW |
335 | *tb_ptr += 1; |
336 | if (r == TCG_CONST) { | |
337 | value = tci_read_i64(tb_ptr); | |
338 | } else { | |
339 | value = tci_read_reg64(r); | |
340 | } | |
341 | return value; | |
342 | } | |
343 | #endif | |
344 | ||
c6c5063c | 345 | static tcg_target_ulong tci_read_label(uint8_t **tb_ptr) |
7657f4bf | 346 | { |
c6c5063c | 347 | tcg_target_ulong label = tci_read_i(tb_ptr); |
7657f4bf SW |
348 | assert(label != 0); |
349 | return label; | |
350 | } | |
351 | ||
352 | static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition) | |
353 | { | |
354 | bool result = false; | |
355 | int32_t i0 = u0; | |
356 | int32_t i1 = u1; | |
357 | switch (condition) { | |
358 | case TCG_COND_EQ: | |
359 | result = (u0 == u1); | |
360 | break; | |
361 | case TCG_COND_NE: | |
362 | result = (u0 != u1); | |
363 | break; | |
364 | case TCG_COND_LT: | |
365 | result = (i0 < i1); | |
366 | break; | |
367 | case TCG_COND_GE: | |
368 | result = (i0 >= i1); | |
369 | break; | |
370 | case TCG_COND_LE: | |
371 | result = (i0 <= i1); | |
372 | break; | |
373 | case TCG_COND_GT: | |
374 | result = (i0 > i1); | |
375 | break; | |
376 | case TCG_COND_LTU: | |
377 | result = (u0 < u1); | |
378 | break; | |
379 | case TCG_COND_GEU: | |
380 | result = (u0 >= u1); | |
381 | break; | |
382 | case TCG_COND_LEU: | |
383 | result = (u0 <= u1); | |
384 | break; | |
385 | case TCG_COND_GTU: | |
386 | result = (u0 > u1); | |
387 | break; | |
388 | default: | |
389 | TODO(); | |
390 | } | |
391 | return result; | |
392 | } | |
393 | ||
394 | static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition) | |
395 | { | |
396 | bool result = false; | |
397 | int64_t i0 = u0; | |
398 | int64_t i1 = u1; | |
399 | switch (condition) { | |
400 | case TCG_COND_EQ: | |
401 | result = (u0 == u1); | |
402 | break; | |
403 | case TCG_COND_NE: | |
404 | result = (u0 != u1); | |
405 | break; | |
406 | case TCG_COND_LT: | |
407 | result = (i0 < i1); | |
408 | break; | |
409 | case TCG_COND_GE: | |
410 | result = (i0 >= i1); | |
411 | break; | |
412 | case TCG_COND_LE: | |
413 | result = (i0 <= i1); | |
414 | break; | |
415 | case TCG_COND_GT: | |
416 | result = (i0 > i1); | |
417 | break; | |
418 | case TCG_COND_LTU: | |
419 | result = (u0 < u1); | |
420 | break; | |
421 | case TCG_COND_GEU: | |
422 | result = (u0 >= u1); | |
423 | break; | |
424 | case TCG_COND_LEU: | |
425 | result = (u0 <= u1); | |
426 | break; | |
427 | case TCG_COND_GTU: | |
428 | result = (u0 > u1); | |
429 | break; | |
430 | default: | |
431 | TODO(); | |
432 | } | |
433 | return result; | |
434 | } | |
435 | ||
436 | /* Interpret pseudo code in tb. */ | |
04d5a1da | 437 | uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr) |
7657f4bf | 438 | { |
ee79c356 RH |
439 | long tcg_temps[CPU_TEMP_BUF_NLONGS]; |
440 | uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS); | |
04d5a1da | 441 | uintptr_t next_tb = 0; |
7657f4bf | 442 | |
7657f4bf | 443 | tci_reg[TCG_AREG0] = (tcg_target_ulong)env; |
ee79c356 | 444 | tci_reg[TCG_REG_CALL_STACK] = sp_value; |
7657f4bf SW |
445 | assert(tb_ptr); |
446 | ||
447 | for (;;) { | |
7657f4bf SW |
448 | TCGOpcode opc = tb_ptr[0]; |
449 | #if !defined(NDEBUG) | |
450 | uint8_t op_size = tb_ptr[1]; | |
451 | uint8_t *old_code_ptr = tb_ptr; | |
452 | #endif | |
453 | tcg_target_ulong t0; | |
454 | tcg_target_ulong t1; | |
455 | tcg_target_ulong t2; | |
456 | tcg_target_ulong label; | |
457 | TCGCond condition; | |
458 | target_ulong taddr; | |
459 | #ifndef CONFIG_SOFTMMU | |
460 | tcg_target_ulong host_addr; | |
461 | #endif | |
462 | uint8_t tmp8; | |
463 | uint16_t tmp16; | |
464 | uint32_t tmp32; | |
465 | uint64_t tmp64; | |
466 | #if TCG_TARGET_REG_BITS == 32 | |
467 | uint64_t v64; | |
468 | #endif | |
469 | ||
dea8fde8 RH |
470 | #if defined(GETPC) |
471 | tci_tb_ptr = (uintptr_t)tb_ptr; | |
472 | #endif | |
473 | ||
7657f4bf SW |
474 | /* Skip opcode and size entry. */ |
475 | tb_ptr += 2; | |
476 | ||
477 | switch (opc) { | |
478 | case INDEX_op_end: | |
479 | case INDEX_op_nop: | |
480 | break; | |
481 | case INDEX_op_nop1: | |
482 | case INDEX_op_nop2: | |
483 | case INDEX_op_nop3: | |
484 | case INDEX_op_nopn: | |
485 | case INDEX_op_discard: | |
486 | TODO(); | |
487 | break; | |
488 | case INDEX_op_set_label: | |
489 | TODO(); | |
490 | break; | |
491 | case INDEX_op_call: | |
492 | t0 = tci_read_ri(&tb_ptr); | |
493 | #if TCG_TARGET_REG_BITS == 32 | |
494 | tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0), | |
495 | tci_read_reg(TCG_REG_R1), | |
496 | tci_read_reg(TCG_REG_R2), | |
497 | tci_read_reg(TCG_REG_R3), | |
498 | tci_read_reg(TCG_REG_R5), | |
499 | tci_read_reg(TCG_REG_R6), | |
500 | tci_read_reg(TCG_REG_R7), | |
6673f47d SW |
501 | tci_read_reg(TCG_REG_R8), |
502 | tci_read_reg(TCG_REG_R9), | |
503 | tci_read_reg(TCG_REG_R10)); | |
7657f4bf SW |
504 | tci_write_reg(TCG_REG_R0, tmp64); |
505 | tci_write_reg(TCG_REG_R1, tmp64 >> 32); | |
506 | #else | |
507 | tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0), | |
508 | tci_read_reg(TCG_REG_R1), | |
509 | tci_read_reg(TCG_REG_R2), | |
6673f47d SW |
510 | tci_read_reg(TCG_REG_R3), |
511 | tci_read_reg(TCG_REG_R5)); | |
7657f4bf SW |
512 | tci_write_reg(TCG_REG_R0, tmp64); |
513 | #endif | |
514 | break; | |
7657f4bf SW |
515 | case INDEX_op_br: |
516 | label = tci_read_label(&tb_ptr); | |
517 | assert(tb_ptr == old_code_ptr + op_size); | |
518 | tb_ptr = (uint8_t *)label; | |
519 | continue; | |
520 | case INDEX_op_setcond_i32: | |
521 | t0 = *tb_ptr++; | |
522 | t1 = tci_read_r32(&tb_ptr); | |
523 | t2 = tci_read_ri32(&tb_ptr); | |
524 | condition = *tb_ptr++; | |
525 | tci_write_reg32(t0, tci_compare32(t1, t2, condition)); | |
526 | break; | |
527 | #if TCG_TARGET_REG_BITS == 32 | |
528 | case INDEX_op_setcond2_i32: | |
529 | t0 = *tb_ptr++; | |
530 | tmp64 = tci_read_r64(&tb_ptr); | |
531 | v64 = tci_read_ri64(&tb_ptr); | |
532 | condition = *tb_ptr++; | |
533 | tci_write_reg32(t0, tci_compare64(tmp64, v64, condition)); | |
534 | break; | |
535 | #elif TCG_TARGET_REG_BITS == 64 | |
536 | case INDEX_op_setcond_i64: | |
537 | t0 = *tb_ptr++; | |
538 | t1 = tci_read_r64(&tb_ptr); | |
539 | t2 = tci_read_ri64(&tb_ptr); | |
540 | condition = *tb_ptr++; | |
541 | tci_write_reg64(t0, tci_compare64(t1, t2, condition)); | |
542 | break; | |
543 | #endif | |
544 | case INDEX_op_mov_i32: | |
545 | t0 = *tb_ptr++; | |
546 | t1 = tci_read_r32(&tb_ptr); | |
547 | tci_write_reg32(t0, t1); | |
548 | break; | |
549 | case INDEX_op_movi_i32: | |
550 | t0 = *tb_ptr++; | |
551 | t1 = tci_read_i32(&tb_ptr); | |
552 | tci_write_reg32(t0, t1); | |
553 | break; | |
554 | ||
555 | /* Load/store operations (32 bit). */ | |
556 | ||
557 | case INDEX_op_ld8u_i32: | |
558 | t0 = *tb_ptr++; | |
559 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 560 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
561 | tci_write_reg8(t0, *(uint8_t *)(t1 + t2)); |
562 | break; | |
563 | case INDEX_op_ld8s_i32: | |
564 | case INDEX_op_ld16u_i32: | |
565 | TODO(); | |
566 | break; | |
567 | case INDEX_op_ld16s_i32: | |
568 | TODO(); | |
569 | break; | |
570 | case INDEX_op_ld_i32: | |
571 | t0 = *tb_ptr++; | |
572 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 573 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
574 | tci_write_reg32(t0, *(uint32_t *)(t1 + t2)); |
575 | break; | |
576 | case INDEX_op_st8_i32: | |
577 | t0 = tci_read_r8(&tb_ptr); | |
578 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 579 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
580 | *(uint8_t *)(t1 + t2) = t0; |
581 | break; | |
582 | case INDEX_op_st16_i32: | |
583 | t0 = tci_read_r16(&tb_ptr); | |
584 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 585 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
586 | *(uint16_t *)(t1 + t2) = t0; |
587 | break; | |
588 | case INDEX_op_st_i32: | |
589 | t0 = tci_read_r32(&tb_ptr); | |
590 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 591 | t2 = tci_read_s32(&tb_ptr); |
ee79c356 | 592 | assert(t1 != sp_value || (int32_t)t2 < 0); |
7657f4bf SW |
593 | *(uint32_t *)(t1 + t2) = t0; |
594 | break; | |
595 | ||
596 | /* Arithmetic operations (32 bit). */ | |
597 | ||
598 | case INDEX_op_add_i32: | |
599 | t0 = *tb_ptr++; | |
600 | t1 = tci_read_ri32(&tb_ptr); | |
601 | t2 = tci_read_ri32(&tb_ptr); | |
602 | tci_write_reg32(t0, t1 + t2); | |
603 | break; | |
604 | case INDEX_op_sub_i32: | |
605 | t0 = *tb_ptr++; | |
606 | t1 = tci_read_ri32(&tb_ptr); | |
607 | t2 = tci_read_ri32(&tb_ptr); | |
608 | tci_write_reg32(t0, t1 - t2); | |
609 | break; | |
610 | case INDEX_op_mul_i32: | |
611 | t0 = *tb_ptr++; | |
612 | t1 = tci_read_ri32(&tb_ptr); | |
613 | t2 = tci_read_ri32(&tb_ptr); | |
614 | tci_write_reg32(t0, t1 * t2); | |
615 | break; | |
616 | #if TCG_TARGET_HAS_div_i32 | |
617 | case INDEX_op_div_i32: | |
618 | t0 = *tb_ptr++; | |
619 | t1 = tci_read_ri32(&tb_ptr); | |
620 | t2 = tci_read_ri32(&tb_ptr); | |
621 | tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2); | |
622 | break; | |
623 | case INDEX_op_divu_i32: | |
624 | t0 = *tb_ptr++; | |
625 | t1 = tci_read_ri32(&tb_ptr); | |
626 | t2 = tci_read_ri32(&tb_ptr); | |
627 | tci_write_reg32(t0, t1 / t2); | |
628 | break; | |
629 | case INDEX_op_rem_i32: | |
630 | t0 = *tb_ptr++; | |
631 | t1 = tci_read_ri32(&tb_ptr); | |
632 | t2 = tci_read_ri32(&tb_ptr); | |
633 | tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2); | |
634 | break; | |
635 | case INDEX_op_remu_i32: | |
636 | t0 = *tb_ptr++; | |
637 | t1 = tci_read_ri32(&tb_ptr); | |
638 | t2 = tci_read_ri32(&tb_ptr); | |
639 | tci_write_reg32(t0, t1 % t2); | |
640 | break; | |
641 | #elif TCG_TARGET_HAS_div2_i32 | |
642 | case INDEX_op_div2_i32: | |
643 | case INDEX_op_divu2_i32: | |
644 | TODO(); | |
645 | break; | |
646 | #endif | |
647 | case INDEX_op_and_i32: | |
648 | t0 = *tb_ptr++; | |
649 | t1 = tci_read_ri32(&tb_ptr); | |
650 | t2 = tci_read_ri32(&tb_ptr); | |
651 | tci_write_reg32(t0, t1 & t2); | |
652 | break; | |
653 | case INDEX_op_or_i32: | |
654 | t0 = *tb_ptr++; | |
655 | t1 = tci_read_ri32(&tb_ptr); | |
656 | t2 = tci_read_ri32(&tb_ptr); | |
657 | tci_write_reg32(t0, t1 | t2); | |
658 | break; | |
659 | case INDEX_op_xor_i32: | |
660 | t0 = *tb_ptr++; | |
661 | t1 = tci_read_ri32(&tb_ptr); | |
662 | t2 = tci_read_ri32(&tb_ptr); | |
663 | tci_write_reg32(t0, t1 ^ t2); | |
664 | break; | |
665 | ||
666 | /* Shift/rotate operations (32 bit). */ | |
667 | ||
668 | case INDEX_op_shl_i32: | |
669 | t0 = *tb_ptr++; | |
670 | t1 = tci_read_ri32(&tb_ptr); | |
671 | t2 = tci_read_ri32(&tb_ptr); | |
672 | tci_write_reg32(t0, t1 << t2); | |
673 | break; | |
674 | case INDEX_op_shr_i32: | |
675 | t0 = *tb_ptr++; | |
676 | t1 = tci_read_ri32(&tb_ptr); | |
677 | t2 = tci_read_ri32(&tb_ptr); | |
678 | tci_write_reg32(t0, t1 >> t2); | |
679 | break; | |
680 | case INDEX_op_sar_i32: | |
681 | t0 = *tb_ptr++; | |
682 | t1 = tci_read_ri32(&tb_ptr); | |
683 | t2 = tci_read_ri32(&tb_ptr); | |
684 | tci_write_reg32(t0, ((int32_t)t1 >> t2)); | |
685 | break; | |
686 | #if TCG_TARGET_HAS_rot_i32 | |
687 | case INDEX_op_rotl_i32: | |
688 | t0 = *tb_ptr++; | |
689 | t1 = tci_read_ri32(&tb_ptr); | |
690 | t2 = tci_read_ri32(&tb_ptr); | |
691 | tci_write_reg32(t0, (t1 << t2) | (t1 >> (32 - t2))); | |
692 | break; | |
693 | case INDEX_op_rotr_i32: | |
694 | t0 = *tb_ptr++; | |
695 | t1 = tci_read_ri32(&tb_ptr); | |
696 | t2 = tci_read_ri32(&tb_ptr); | |
697 | tci_write_reg32(t0, (t1 >> t2) | (t1 << (32 - t2))); | |
698 | break; | |
e24dc9fe SW |
699 | #endif |
700 | #if TCG_TARGET_HAS_deposit_i32 | |
701 | case INDEX_op_deposit_i32: | |
702 | t0 = *tb_ptr++; | |
703 | t1 = tci_read_r32(&tb_ptr); | |
704 | t2 = tci_read_r32(&tb_ptr); | |
705 | tmp16 = *tb_ptr++; | |
706 | tmp8 = *tb_ptr++; | |
707 | tmp32 = (((1 << tmp8) - 1) << tmp16); | |
708 | tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32)); | |
709 | break; | |
7657f4bf SW |
710 | #endif |
711 | case INDEX_op_brcond_i32: | |
712 | t0 = tci_read_r32(&tb_ptr); | |
713 | t1 = tci_read_ri32(&tb_ptr); | |
714 | condition = *tb_ptr++; | |
715 | label = tci_read_label(&tb_ptr); | |
716 | if (tci_compare32(t0, t1, condition)) { | |
717 | assert(tb_ptr == old_code_ptr + op_size); | |
718 | tb_ptr = (uint8_t *)label; | |
719 | continue; | |
720 | } | |
721 | break; | |
722 | #if TCG_TARGET_REG_BITS == 32 | |
723 | case INDEX_op_add2_i32: | |
724 | t0 = *tb_ptr++; | |
725 | t1 = *tb_ptr++; | |
726 | tmp64 = tci_read_r64(&tb_ptr); | |
727 | tmp64 += tci_read_r64(&tb_ptr); | |
728 | tci_write_reg64(t1, t0, tmp64); | |
729 | break; | |
730 | case INDEX_op_sub2_i32: | |
731 | t0 = *tb_ptr++; | |
732 | t1 = *tb_ptr++; | |
733 | tmp64 = tci_read_r64(&tb_ptr); | |
734 | tmp64 -= tci_read_r64(&tb_ptr); | |
735 | tci_write_reg64(t1, t0, tmp64); | |
736 | break; | |
737 | case INDEX_op_brcond2_i32: | |
738 | tmp64 = tci_read_r64(&tb_ptr); | |
739 | v64 = tci_read_ri64(&tb_ptr); | |
740 | condition = *tb_ptr++; | |
741 | label = tci_read_label(&tb_ptr); | |
742 | if (tci_compare64(tmp64, v64, condition)) { | |
743 | assert(tb_ptr == old_code_ptr + op_size); | |
744 | tb_ptr = (uint8_t *)label; | |
745 | continue; | |
746 | } | |
747 | break; | |
748 | case INDEX_op_mulu2_i32: | |
749 | t0 = *tb_ptr++; | |
750 | t1 = *tb_ptr++; | |
751 | t2 = tci_read_r32(&tb_ptr); | |
752 | tmp64 = tci_read_r32(&tb_ptr); | |
753 | tci_write_reg64(t1, t0, t2 * tmp64); | |
754 | break; | |
755 | #endif /* TCG_TARGET_REG_BITS == 32 */ | |
756 | #if TCG_TARGET_HAS_ext8s_i32 | |
757 | case INDEX_op_ext8s_i32: | |
758 | t0 = *tb_ptr++; | |
759 | t1 = tci_read_r8s(&tb_ptr); | |
760 | tci_write_reg32(t0, t1); | |
761 | break; | |
762 | #endif | |
763 | #if TCG_TARGET_HAS_ext16s_i32 | |
764 | case INDEX_op_ext16s_i32: | |
765 | t0 = *tb_ptr++; | |
766 | t1 = tci_read_r16s(&tb_ptr); | |
767 | tci_write_reg32(t0, t1); | |
768 | break; | |
769 | #endif | |
770 | #if TCG_TARGET_HAS_ext8u_i32 | |
771 | case INDEX_op_ext8u_i32: | |
772 | t0 = *tb_ptr++; | |
773 | t1 = tci_read_r8(&tb_ptr); | |
774 | tci_write_reg32(t0, t1); | |
775 | break; | |
776 | #endif | |
777 | #if TCG_TARGET_HAS_ext16u_i32 | |
778 | case INDEX_op_ext16u_i32: | |
779 | t0 = *tb_ptr++; | |
780 | t1 = tci_read_r16(&tb_ptr); | |
781 | tci_write_reg32(t0, t1); | |
782 | break; | |
783 | #endif | |
784 | #if TCG_TARGET_HAS_bswap16_i32 | |
785 | case INDEX_op_bswap16_i32: | |
786 | t0 = *tb_ptr++; | |
787 | t1 = tci_read_r16(&tb_ptr); | |
788 | tci_write_reg32(t0, bswap16(t1)); | |
789 | break; | |
790 | #endif | |
791 | #if TCG_TARGET_HAS_bswap32_i32 | |
792 | case INDEX_op_bswap32_i32: | |
793 | t0 = *tb_ptr++; | |
794 | t1 = tci_read_r32(&tb_ptr); | |
795 | tci_write_reg32(t0, bswap32(t1)); | |
796 | break; | |
797 | #endif | |
798 | #if TCG_TARGET_HAS_not_i32 | |
799 | case INDEX_op_not_i32: | |
800 | t0 = *tb_ptr++; | |
801 | t1 = tci_read_r32(&tb_ptr); | |
802 | tci_write_reg32(t0, ~t1); | |
803 | break; | |
804 | #endif | |
805 | #if TCG_TARGET_HAS_neg_i32 | |
806 | case INDEX_op_neg_i32: | |
807 | t0 = *tb_ptr++; | |
808 | t1 = tci_read_r32(&tb_ptr); | |
809 | tci_write_reg32(t0, -t1); | |
810 | break; | |
811 | #endif | |
812 | #if TCG_TARGET_REG_BITS == 64 | |
813 | case INDEX_op_mov_i64: | |
814 | t0 = *tb_ptr++; | |
815 | t1 = tci_read_r64(&tb_ptr); | |
816 | tci_write_reg64(t0, t1); | |
817 | break; | |
818 | case INDEX_op_movi_i64: | |
819 | t0 = *tb_ptr++; | |
820 | t1 = tci_read_i64(&tb_ptr); | |
821 | tci_write_reg64(t0, t1); | |
822 | break; | |
823 | ||
824 | /* Load/store operations (64 bit). */ | |
825 | ||
826 | case INDEX_op_ld8u_i64: | |
827 | t0 = *tb_ptr++; | |
828 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 829 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
830 | tci_write_reg8(t0, *(uint8_t *)(t1 + t2)); |
831 | break; | |
832 | case INDEX_op_ld8s_i64: | |
833 | case INDEX_op_ld16u_i64: | |
834 | case INDEX_op_ld16s_i64: | |
835 | TODO(); | |
836 | break; | |
837 | case INDEX_op_ld32u_i64: | |
838 | t0 = *tb_ptr++; | |
839 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 840 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
841 | tci_write_reg32(t0, *(uint32_t *)(t1 + t2)); |
842 | break; | |
843 | case INDEX_op_ld32s_i64: | |
844 | t0 = *tb_ptr++; | |
845 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 846 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
847 | tci_write_reg32s(t0, *(int32_t *)(t1 + t2)); |
848 | break; | |
849 | case INDEX_op_ld_i64: | |
850 | t0 = *tb_ptr++; | |
851 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 852 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
853 | tci_write_reg64(t0, *(uint64_t *)(t1 + t2)); |
854 | break; | |
855 | case INDEX_op_st8_i64: | |
856 | t0 = tci_read_r8(&tb_ptr); | |
857 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 858 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
859 | *(uint8_t *)(t1 + t2) = t0; |
860 | break; | |
861 | case INDEX_op_st16_i64: | |
862 | t0 = tci_read_r16(&tb_ptr); | |
863 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 864 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
865 | *(uint16_t *)(t1 + t2) = t0; |
866 | break; | |
867 | case INDEX_op_st32_i64: | |
868 | t0 = tci_read_r32(&tb_ptr); | |
869 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 870 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
871 | *(uint32_t *)(t1 + t2) = t0; |
872 | break; | |
873 | case INDEX_op_st_i64: | |
874 | t0 = tci_read_r64(&tb_ptr); | |
875 | t1 = tci_read_r(&tb_ptr); | |
03fc0548 | 876 | t2 = tci_read_s32(&tb_ptr); |
ee79c356 | 877 | assert(t1 != sp_value || (int32_t)t2 < 0); |
7657f4bf SW |
878 | *(uint64_t *)(t1 + t2) = t0; |
879 | break; | |
880 | ||
881 | /* Arithmetic operations (64 bit). */ | |
882 | ||
883 | case INDEX_op_add_i64: | |
884 | t0 = *tb_ptr++; | |
885 | t1 = tci_read_ri64(&tb_ptr); | |
886 | t2 = tci_read_ri64(&tb_ptr); | |
887 | tci_write_reg64(t0, t1 + t2); | |
888 | break; | |
889 | case INDEX_op_sub_i64: | |
890 | t0 = *tb_ptr++; | |
891 | t1 = tci_read_ri64(&tb_ptr); | |
892 | t2 = tci_read_ri64(&tb_ptr); | |
893 | tci_write_reg64(t0, t1 - t2); | |
894 | break; | |
895 | case INDEX_op_mul_i64: | |
896 | t0 = *tb_ptr++; | |
897 | t1 = tci_read_ri64(&tb_ptr); | |
898 | t2 = tci_read_ri64(&tb_ptr); | |
899 | tci_write_reg64(t0, t1 * t2); | |
900 | break; | |
901 | #if TCG_TARGET_HAS_div_i64 | |
902 | case INDEX_op_div_i64: | |
903 | case INDEX_op_divu_i64: | |
904 | case INDEX_op_rem_i64: | |
905 | case INDEX_op_remu_i64: | |
906 | TODO(); | |
907 | break; | |
908 | #elif TCG_TARGET_HAS_div2_i64 | |
909 | case INDEX_op_div2_i64: | |
910 | case INDEX_op_divu2_i64: | |
911 | TODO(); | |
912 | break; | |
913 | #endif | |
914 | case INDEX_op_and_i64: | |
915 | t0 = *tb_ptr++; | |
916 | t1 = tci_read_ri64(&tb_ptr); | |
917 | t2 = tci_read_ri64(&tb_ptr); | |
918 | tci_write_reg64(t0, t1 & t2); | |
919 | break; | |
920 | case INDEX_op_or_i64: | |
921 | t0 = *tb_ptr++; | |
922 | t1 = tci_read_ri64(&tb_ptr); | |
923 | t2 = tci_read_ri64(&tb_ptr); | |
924 | tci_write_reg64(t0, t1 | t2); | |
925 | break; | |
926 | case INDEX_op_xor_i64: | |
927 | t0 = *tb_ptr++; | |
928 | t1 = tci_read_ri64(&tb_ptr); | |
929 | t2 = tci_read_ri64(&tb_ptr); | |
930 | tci_write_reg64(t0, t1 ^ t2); | |
931 | break; | |
932 | ||
933 | /* Shift/rotate operations (64 bit). */ | |
934 | ||
935 | case INDEX_op_shl_i64: | |
936 | t0 = *tb_ptr++; | |
937 | t1 = tci_read_ri64(&tb_ptr); | |
938 | t2 = tci_read_ri64(&tb_ptr); | |
939 | tci_write_reg64(t0, t1 << t2); | |
940 | break; | |
941 | case INDEX_op_shr_i64: | |
942 | t0 = *tb_ptr++; | |
943 | t1 = tci_read_ri64(&tb_ptr); | |
944 | t2 = tci_read_ri64(&tb_ptr); | |
945 | tci_write_reg64(t0, t1 >> t2); | |
946 | break; | |
947 | case INDEX_op_sar_i64: | |
948 | t0 = *tb_ptr++; | |
949 | t1 = tci_read_ri64(&tb_ptr); | |
950 | t2 = tci_read_ri64(&tb_ptr); | |
951 | tci_write_reg64(t0, ((int64_t)t1 >> t2)); | |
952 | break; | |
953 | #if TCG_TARGET_HAS_rot_i64 | |
954 | case INDEX_op_rotl_i64: | |
955 | case INDEX_op_rotr_i64: | |
956 | TODO(); | |
957 | break; | |
e24dc9fe SW |
958 | #endif |
959 | #if TCG_TARGET_HAS_deposit_i64 | |
960 | case INDEX_op_deposit_i64: | |
961 | t0 = *tb_ptr++; | |
962 | t1 = tci_read_r64(&tb_ptr); | |
963 | t2 = tci_read_r64(&tb_ptr); | |
964 | tmp16 = *tb_ptr++; | |
965 | tmp8 = *tb_ptr++; | |
966 | tmp64 = (((1ULL << tmp8) - 1) << tmp16); | |
967 | tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64)); | |
968 | break; | |
7657f4bf SW |
969 | #endif |
970 | case INDEX_op_brcond_i64: | |
971 | t0 = tci_read_r64(&tb_ptr); | |
972 | t1 = tci_read_ri64(&tb_ptr); | |
973 | condition = *tb_ptr++; | |
974 | label = tci_read_label(&tb_ptr); | |
975 | if (tci_compare64(t0, t1, condition)) { | |
976 | assert(tb_ptr == old_code_ptr + op_size); | |
977 | tb_ptr = (uint8_t *)label; | |
978 | continue; | |
979 | } | |
980 | break; | |
981 | #if TCG_TARGET_HAS_ext8u_i64 | |
982 | case INDEX_op_ext8u_i64: | |
983 | t0 = *tb_ptr++; | |
984 | t1 = tci_read_r8(&tb_ptr); | |
985 | tci_write_reg64(t0, t1); | |
986 | break; | |
987 | #endif | |
988 | #if TCG_TARGET_HAS_ext8s_i64 | |
989 | case INDEX_op_ext8s_i64: | |
990 | t0 = *tb_ptr++; | |
991 | t1 = tci_read_r8s(&tb_ptr); | |
992 | tci_write_reg64(t0, t1); | |
993 | break; | |
994 | #endif | |
995 | #if TCG_TARGET_HAS_ext16s_i64 | |
996 | case INDEX_op_ext16s_i64: | |
997 | t0 = *tb_ptr++; | |
998 | t1 = tci_read_r16s(&tb_ptr); | |
999 | tci_write_reg64(t0, t1); | |
1000 | break; | |
1001 | #endif | |
1002 | #if TCG_TARGET_HAS_ext16u_i64 | |
1003 | case INDEX_op_ext16u_i64: | |
1004 | t0 = *tb_ptr++; | |
1005 | t1 = tci_read_r16(&tb_ptr); | |
1006 | tci_write_reg64(t0, t1); | |
1007 | break; | |
1008 | #endif | |
1009 | #if TCG_TARGET_HAS_ext32s_i64 | |
1010 | case INDEX_op_ext32s_i64: | |
1011 | t0 = *tb_ptr++; | |
1012 | t1 = tci_read_r32s(&tb_ptr); | |
1013 | tci_write_reg64(t0, t1); | |
1014 | break; | |
1015 | #endif | |
1016 | #if TCG_TARGET_HAS_ext32u_i64 | |
1017 | case INDEX_op_ext32u_i64: | |
1018 | t0 = *tb_ptr++; | |
1019 | t1 = tci_read_r32(&tb_ptr); | |
1020 | tci_write_reg64(t0, t1); | |
1021 | break; | |
1022 | #endif | |
1023 | #if TCG_TARGET_HAS_bswap16_i64 | |
1024 | case INDEX_op_bswap16_i64: | |
1025 | TODO(); | |
1026 | t0 = *tb_ptr++; | |
1027 | t1 = tci_read_r16(&tb_ptr); | |
1028 | tci_write_reg64(t0, bswap16(t1)); | |
1029 | break; | |
1030 | #endif | |
1031 | #if TCG_TARGET_HAS_bswap32_i64 | |
1032 | case INDEX_op_bswap32_i64: | |
1033 | t0 = *tb_ptr++; | |
1034 | t1 = tci_read_r32(&tb_ptr); | |
1035 | tci_write_reg64(t0, bswap32(t1)); | |
1036 | break; | |
1037 | #endif | |
1038 | #if TCG_TARGET_HAS_bswap64_i64 | |
1039 | case INDEX_op_bswap64_i64: | |
7657f4bf SW |
1040 | t0 = *tb_ptr++; |
1041 | t1 = tci_read_r64(&tb_ptr); | |
1042 | tci_write_reg64(t0, bswap64(t1)); | |
1043 | break; | |
1044 | #endif | |
1045 | #if TCG_TARGET_HAS_not_i64 | |
1046 | case INDEX_op_not_i64: | |
1047 | t0 = *tb_ptr++; | |
1048 | t1 = tci_read_r64(&tb_ptr); | |
1049 | tci_write_reg64(t0, ~t1); | |
1050 | break; | |
1051 | #endif | |
1052 | #if TCG_TARGET_HAS_neg_i64 | |
1053 | case INDEX_op_neg_i64: | |
1054 | t0 = *tb_ptr++; | |
1055 | t1 = tci_read_r64(&tb_ptr); | |
1056 | tci_write_reg64(t0, -t1); | |
1057 | break; | |
1058 | #endif | |
1059 | #endif /* TCG_TARGET_REG_BITS == 64 */ | |
1060 | ||
1061 | /* QEMU specific operations. */ | |
1062 | ||
1063 | #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS | |
1064 | case INDEX_op_debug_insn_start: | |
1065 | TODO(); | |
1066 | break; | |
1067 | #else | |
1068 | case INDEX_op_debug_insn_start: | |
1069 | TODO(); | |
1070 | break; | |
1071 | #endif | |
1072 | case INDEX_op_exit_tb: | |
1073 | next_tb = *(uint64_t *)tb_ptr; | |
1074 | goto exit; | |
1075 | break; | |
1076 | case INDEX_op_goto_tb: | |
1077 | t0 = tci_read_i32(&tb_ptr); | |
1078 | assert(tb_ptr == old_code_ptr + op_size); | |
1079 | tb_ptr += (int32_t)t0; | |
1080 | continue; | |
1081 | case INDEX_op_qemu_ld8u: | |
1082 | t0 = *tb_ptr++; | |
1083 | taddr = tci_read_ulong(&tb_ptr); | |
1084 | #ifdef CONFIG_SOFTMMU | |
3b2aba2f | 1085 | tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr)); |
7657f4bf SW |
1086 | #else |
1087 | host_addr = (tcg_target_ulong)taddr; | |
7657f4bf SW |
1088 | tmp8 = *(uint8_t *)(host_addr + GUEST_BASE); |
1089 | #endif | |
1090 | tci_write_reg8(t0, tmp8); | |
1091 | break; | |
1092 | case INDEX_op_qemu_ld8s: | |
1093 | t0 = *tb_ptr++; | |
1094 | taddr = tci_read_ulong(&tb_ptr); | |
1095 | #ifdef CONFIG_SOFTMMU | |
3b2aba2f | 1096 | tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr)); |
7657f4bf SW |
1097 | #else |
1098 | host_addr = (tcg_target_ulong)taddr; | |
7657f4bf SW |
1099 | tmp8 = *(uint8_t *)(host_addr + GUEST_BASE); |
1100 | #endif | |
1101 | tci_write_reg8s(t0, tmp8); | |
1102 | break; | |
1103 | case INDEX_op_qemu_ld16u: | |
1104 | t0 = *tb_ptr++; | |
1105 | taddr = tci_read_ulong(&tb_ptr); | |
1106 | #ifdef CONFIG_SOFTMMU | |
3b2aba2f | 1107 | tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr)); |
7657f4bf SW |
1108 | #else |
1109 | host_addr = (tcg_target_ulong)taddr; | |
7657f4bf SW |
1110 | tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE)); |
1111 | #endif | |
1112 | tci_write_reg16(t0, tmp16); | |
1113 | break; | |
1114 | case INDEX_op_qemu_ld16s: | |
1115 | t0 = *tb_ptr++; | |
1116 | taddr = tci_read_ulong(&tb_ptr); | |
1117 | #ifdef CONFIG_SOFTMMU | |
3b2aba2f | 1118 | tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr)); |
7657f4bf SW |
1119 | #else |
1120 | host_addr = (tcg_target_ulong)taddr; | |
7657f4bf SW |
1121 | tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE)); |
1122 | #endif | |
1123 | tci_write_reg16s(t0, tmp16); | |
1124 | break; | |
1125 | #if TCG_TARGET_REG_BITS == 64 | |
1126 | case INDEX_op_qemu_ld32u: | |
1127 | t0 = *tb_ptr++; | |
1128 | taddr = tci_read_ulong(&tb_ptr); | |
1129 | #ifdef CONFIG_SOFTMMU | |
3b2aba2f | 1130 | tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr)); |
7657f4bf SW |
1131 | #else |
1132 | host_addr = (tcg_target_ulong)taddr; | |
7657f4bf SW |
1133 | tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE)); |
1134 | #endif | |
1135 | tci_write_reg32(t0, tmp32); | |
1136 | break; | |
1137 | case INDEX_op_qemu_ld32s: | |
1138 | t0 = *tb_ptr++; | |
1139 | taddr = tci_read_ulong(&tb_ptr); | |
1140 | #ifdef CONFIG_SOFTMMU | |
3b2aba2f | 1141 | tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr)); |
7657f4bf SW |
1142 | #else |
1143 | host_addr = (tcg_target_ulong)taddr; | |
7657f4bf SW |
1144 | tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE)); |
1145 | #endif | |
1146 | tci_write_reg32s(t0, tmp32); | |
1147 | break; | |
1148 | #endif /* TCG_TARGET_REG_BITS == 64 */ | |
1149 | case INDEX_op_qemu_ld32: | |
1150 | t0 = *tb_ptr++; | |
1151 | taddr = tci_read_ulong(&tb_ptr); | |
1152 | #ifdef CONFIG_SOFTMMU | |
3b2aba2f | 1153 | tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr)); |
7657f4bf SW |
1154 | #else |
1155 | host_addr = (tcg_target_ulong)taddr; | |
7657f4bf SW |
1156 | tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE)); |
1157 | #endif | |
1158 | tci_write_reg32(t0, tmp32); | |
1159 | break; | |
1160 | case INDEX_op_qemu_ld64: | |
1161 | t0 = *tb_ptr++; | |
1162 | #if TCG_TARGET_REG_BITS == 32 | |
1163 | t1 = *tb_ptr++; | |
1164 | #endif | |
1165 | taddr = tci_read_ulong(&tb_ptr); | |
1166 | #ifdef CONFIG_SOFTMMU | |
3b2aba2f | 1167 | tmp64 = helper_ldq_mmu(env, taddr, tci_read_i(&tb_ptr)); |
7657f4bf SW |
1168 | #else |
1169 | host_addr = (tcg_target_ulong)taddr; | |
7657f4bf SW |
1170 | tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE)); |
1171 | #endif | |
1172 | tci_write_reg(t0, tmp64); | |
1173 | #if TCG_TARGET_REG_BITS == 32 | |
1174 | tci_write_reg(t1, tmp64 >> 32); | |
1175 | #endif | |
1176 | break; | |
1177 | case INDEX_op_qemu_st8: | |
1178 | t0 = tci_read_r8(&tb_ptr); | |
1179 | taddr = tci_read_ulong(&tb_ptr); | |
1180 | #ifdef CONFIG_SOFTMMU | |
1181 | t2 = tci_read_i(&tb_ptr); | |
3b2aba2f | 1182 | helper_stb_mmu(env, taddr, t0, t2); |
7657f4bf SW |
1183 | #else |
1184 | host_addr = (tcg_target_ulong)taddr; | |
7657f4bf SW |
1185 | *(uint8_t *)(host_addr + GUEST_BASE) = t0; |
1186 | #endif | |
1187 | break; | |
1188 | case INDEX_op_qemu_st16: | |
1189 | t0 = tci_read_r16(&tb_ptr); | |
1190 | taddr = tci_read_ulong(&tb_ptr); | |
1191 | #ifdef CONFIG_SOFTMMU | |
1192 | t2 = tci_read_i(&tb_ptr); | |
3b2aba2f | 1193 | helper_stw_mmu(env, taddr, t0, t2); |
7657f4bf SW |
1194 | #else |
1195 | host_addr = (tcg_target_ulong)taddr; | |
7657f4bf SW |
1196 | *(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0); |
1197 | #endif | |
1198 | break; | |
1199 | case INDEX_op_qemu_st32: | |
1200 | t0 = tci_read_r32(&tb_ptr); | |
1201 | taddr = tci_read_ulong(&tb_ptr); | |
1202 | #ifdef CONFIG_SOFTMMU | |
1203 | t2 = tci_read_i(&tb_ptr); | |
3b2aba2f | 1204 | helper_stl_mmu(env, taddr, t0, t2); |
7657f4bf SW |
1205 | #else |
1206 | host_addr = (tcg_target_ulong)taddr; | |
7657f4bf SW |
1207 | *(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0); |
1208 | #endif | |
1209 | break; | |
1210 | case INDEX_op_qemu_st64: | |
1211 | tmp64 = tci_read_r64(&tb_ptr); | |
1212 | taddr = tci_read_ulong(&tb_ptr); | |
1213 | #ifdef CONFIG_SOFTMMU | |
1214 | t2 = tci_read_i(&tb_ptr); | |
3b2aba2f | 1215 | helper_stq_mmu(env, taddr, tmp64, t2); |
7657f4bf SW |
1216 | #else |
1217 | host_addr = (tcg_target_ulong)taddr; | |
7657f4bf SW |
1218 | *(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64); |
1219 | #endif | |
1220 | break; | |
1221 | default: | |
1222 | TODO(); | |
1223 | break; | |
1224 | } | |
1225 | assert(tb_ptr == old_code_ptr + op_size); | |
1226 | } | |
1227 | exit: | |
1228 | return next_tb; | |
1229 | } |