]>
Commit | Line | Data |
---|---|---|
7657f4bf SW |
1 | /* |
2 | * Tiny Code Interpreter for QEMU | |
3 | * | |
3ccdbecf | 4 | * Copyright (c) 2009, 2011, 2016 Stefan Weil |
7657f4bf SW |
5 | * |
6 | * This program is free software: you can redistribute it and/or modify | |
7 | * it under the terms of the GNU General Public License as published by | |
8 | * the Free Software Foundation, either version 2 of the License, or | |
9 | * (at your option) any later version. | |
10 | * | |
11 | * This program is distributed in the hope that it will be useful, | |
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | * GNU General Public License for more details. | |
15 | * | |
16 | * You should have received a copy of the GNU General Public License | |
17 | * along with this program. If not, see <http://www.gnu.org/licenses/>. | |
18 | */ | |
19 | ||
d38ea87a | 20 | #include "qemu/osdep.h" |
7657f4bf | 21 | |
3ccdbecf SW |
22 | /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined). |
23 | * Without assertions, the interpreter runs much faster. */ | |
24 | #if defined(CONFIG_DEBUG_TCG) | |
25 | # define tci_assert(cond) assert(cond) | |
26 | #else | |
27 | # define tci_assert(cond) ((void)0) | |
7657f4bf SW |
28 | #endif |
29 | ||
30 | #include "qemu-common.h" | |
65603e2f | 31 | #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */ |
f08b6170 | 32 | #include "exec/cpu_ldst.h" |
dcb32f1d | 33 | #include "tcg/tcg-op.h" |
c905a368 | 34 | #include "qemu/compiler.h" |
7657f4bf SW |
35 | |
36 | /* Marker for missing code. */ | |
37 | #define TODO() \ | |
38 | do { \ | |
39 | fprintf(stderr, "TODO %s:%u: %s()\n", \ | |
40 | __FILE__, __LINE__, __func__); \ | |
41 | tcg_abort(); \ | |
42 | } while (0) | |
43 | ||
1df3caa9 | 44 | #if MAX_OPC_PARAM_IARGS != 6 |
7657f4bf SW |
45 | # error Fix needed, number of supported input arguments changed! |
46 | #endif | |
47 | #if TCG_TARGET_REG_BITS == 32 | |
48 | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, | |
6673f47d | 49 | tcg_target_ulong, tcg_target_ulong, |
7657f4bf SW |
50 | tcg_target_ulong, tcg_target_ulong, |
51 | tcg_target_ulong, tcg_target_ulong, | |
1df3caa9 | 52 | tcg_target_ulong, tcg_target_ulong, |
7657f4bf SW |
53 | tcg_target_ulong, tcg_target_ulong); |
54 | #else | |
55 | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, | |
6673f47d | 56 | tcg_target_ulong, tcg_target_ulong, |
1df3caa9 | 57 | tcg_target_ulong, tcg_target_ulong); |
7657f4bf SW |
58 | #endif |
59 | ||
13e71f08 RH |
60 | __thread uintptr_t tci_tb_ptr; |
61 | ||
5e75150c | 62 | static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 63 | { |
5e75150c EC |
64 | tci_assert(index < TCG_TARGET_NB_REGS); |
65 | return regs[index]; | |
7657f4bf SW |
66 | } |
67 | ||
68 | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 | |
5e75150c | 69 | static int8_t tci_read_reg8s(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 70 | { |
5e75150c | 71 | return (int8_t)tci_read_reg(regs, index); |
7657f4bf SW |
72 | } |
73 | #endif | |
74 | ||
75 | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 | |
5e75150c | 76 | static int16_t tci_read_reg16s(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 77 | { |
5e75150c | 78 | return (int16_t)tci_read_reg(regs, index); |
7657f4bf SW |
79 | } |
80 | #endif | |
81 | ||
82 | #if TCG_TARGET_REG_BITS == 64 | |
5e75150c | 83 | static int32_t tci_read_reg32s(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 84 | { |
5e75150c | 85 | return (int32_t)tci_read_reg(regs, index); |
7657f4bf SW |
86 | } |
87 | #endif | |
88 | ||
5e75150c | 89 | static uint8_t tci_read_reg8(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 90 | { |
5e75150c | 91 | return (uint8_t)tci_read_reg(regs, index); |
7657f4bf SW |
92 | } |
93 | ||
5e75150c | 94 | static uint16_t tci_read_reg16(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 95 | { |
5e75150c | 96 | return (uint16_t)tci_read_reg(regs, index); |
7657f4bf SW |
97 | } |
98 | ||
5e75150c | 99 | static uint32_t tci_read_reg32(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 100 | { |
5e75150c | 101 | return (uint32_t)tci_read_reg(regs, index); |
7657f4bf SW |
102 | } |
103 | ||
104 | #if TCG_TARGET_REG_BITS == 64 | |
5e75150c | 105 | static uint64_t tci_read_reg64(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 106 | { |
5e75150c | 107 | return tci_read_reg(regs, index); |
7657f4bf SW |
108 | } |
109 | #endif | |
110 | ||
5e75150c EC |
111 | static void |
112 | tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value) | |
7657f4bf | 113 | { |
5e75150c | 114 | tci_assert(index < TCG_TARGET_NB_REGS); |
3ccdbecf SW |
115 | tci_assert(index != TCG_AREG0); |
116 | tci_assert(index != TCG_REG_CALL_STACK); | |
5e75150c | 117 | regs[index] = value; |
7657f4bf SW |
118 | } |
119 | ||
7657f4bf | 120 | #if TCG_TARGET_REG_BITS == 64 |
5e75150c EC |
121 | static void |
122 | tci_write_reg32s(tcg_target_ulong *regs, TCGReg index, int32_t value) | |
7657f4bf | 123 | { |
5e75150c | 124 | tci_write_reg(regs, index, value); |
7657f4bf SW |
125 | } |
126 | #endif | |
127 | ||
5e75150c | 128 | static void tci_write_reg8(tcg_target_ulong *regs, TCGReg index, uint8_t value) |
7657f4bf | 129 | { |
5e75150c | 130 | tci_write_reg(regs, index, value); |
7657f4bf SW |
131 | } |
132 | ||
5fa6ab7e | 133 | #if TCG_TARGET_REG_BITS == 64 |
2f160e0f SW |
134 | static void |
135 | tci_write_reg16(tcg_target_ulong *regs, TCGReg index, uint16_t value) | |
136 | { | |
137 | tci_write_reg(regs, index, value); | |
138 | } | |
5fa6ab7e | 139 | #endif |
2f160e0f | 140 | |
5e75150c EC |
141 | static void |
142 | tci_write_reg32(tcg_target_ulong *regs, TCGReg index, uint32_t value) | |
7657f4bf | 143 | { |
5e75150c | 144 | tci_write_reg(regs, index, value); |
7657f4bf SW |
145 | } |
146 | ||
147 | #if TCG_TARGET_REG_BITS == 32 | |
5e75150c EC |
148 | static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index, |
149 | uint32_t low_index, uint64_t value) | |
7657f4bf | 150 | { |
5e75150c EC |
151 | tci_write_reg(regs, low_index, value); |
152 | tci_write_reg(regs, high_index, value >> 32); | |
7657f4bf SW |
153 | } |
154 | #elif TCG_TARGET_REG_BITS == 64 | |
5e75150c EC |
155 | static void |
156 | tci_write_reg64(tcg_target_ulong *regs, TCGReg index, uint64_t value) | |
7657f4bf | 157 | { |
5e75150c | 158 | tci_write_reg(regs, index, value); |
7657f4bf SW |
159 | } |
160 | #endif | |
161 | ||
162 | #if TCG_TARGET_REG_BITS == 32 | |
163 | /* Create a 64 bit value from two 32 bit values. */ | |
164 | static uint64_t tci_uint64(uint32_t high, uint32_t low) | |
165 | { | |
166 | return ((uint64_t)high << 32) + low; | |
167 | } | |
168 | #endif | |
169 | ||
170 | /* Read constant (native size) from bytecode. */ | |
305daaed | 171 | static tcg_target_ulong tci_read_i(const uint8_t **tb_ptr) |
7657f4bf | 172 | { |
305daaed | 173 | tcg_target_ulong value = *(const tcg_target_ulong *)(*tb_ptr); |
7657f4bf SW |
174 | *tb_ptr += sizeof(value); |
175 | return value; | |
176 | } | |
177 | ||
03fc0548 | 178 | /* Read unsigned constant (32 bit) from bytecode. */ |
305daaed | 179 | static uint32_t tci_read_i32(const uint8_t **tb_ptr) |
7657f4bf | 180 | { |
305daaed | 181 | uint32_t value = *(const uint32_t *)(*tb_ptr); |
7657f4bf SW |
182 | *tb_ptr += sizeof(value); |
183 | return value; | |
184 | } | |
185 | ||
03fc0548 | 186 | /* Read signed constant (32 bit) from bytecode. */ |
305daaed | 187 | static int32_t tci_read_s32(const uint8_t **tb_ptr) |
03fc0548 | 188 | { |
305daaed | 189 | int32_t value = *(const int32_t *)(*tb_ptr); |
03fc0548 RH |
190 | *tb_ptr += sizeof(value); |
191 | return value; | |
192 | } | |
193 | ||
7657f4bf SW |
194 | #if TCG_TARGET_REG_BITS == 64 |
195 | /* Read constant (64 bit) from bytecode. */ | |
305daaed | 196 | static uint64_t tci_read_i64(const uint8_t **tb_ptr) |
7657f4bf | 197 | { |
305daaed | 198 | uint64_t value = *(const uint64_t *)(*tb_ptr); |
7657f4bf SW |
199 | *tb_ptr += sizeof(value); |
200 | return value; | |
201 | } | |
202 | #endif | |
203 | ||
204 | /* Read indexed register (native size) from bytecode. */ | |
5e75150c | 205 | static tcg_target_ulong |
305daaed | 206 | tci_read_r(const tcg_target_ulong *regs, const uint8_t **tb_ptr) |
7657f4bf | 207 | { |
5e75150c | 208 | tcg_target_ulong value = tci_read_reg(regs, **tb_ptr); |
7657f4bf SW |
209 | *tb_ptr += 1; |
210 | return value; | |
211 | } | |
212 | ||
213 | /* Read indexed register (8 bit) from bytecode. */ | |
305daaed | 214 | static uint8_t tci_read_r8(const tcg_target_ulong *regs, const uint8_t **tb_ptr) |
7657f4bf | 215 | { |
5e75150c | 216 | uint8_t value = tci_read_reg8(regs, **tb_ptr); |
7657f4bf SW |
217 | *tb_ptr += 1; |
218 | return value; | |
219 | } | |
220 | ||
221 | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 | |
222 | /* Read indexed register (8 bit signed) from bytecode. */ | |
305daaed | 223 | static int8_t tci_read_r8s(const tcg_target_ulong *regs, const uint8_t **tb_ptr) |
7657f4bf | 224 | { |
5e75150c | 225 | int8_t value = tci_read_reg8s(regs, **tb_ptr); |
7657f4bf SW |
226 | *tb_ptr += 1; |
227 | return value; | |
228 | } | |
229 | #endif | |
230 | ||
231 | /* Read indexed register (16 bit) from bytecode. */ | |
305daaed RH |
232 | static uint16_t tci_read_r16(const tcg_target_ulong *regs, |
233 | const uint8_t **tb_ptr) | |
7657f4bf | 234 | { |
5e75150c | 235 | uint16_t value = tci_read_reg16(regs, **tb_ptr); |
7657f4bf SW |
236 | *tb_ptr += 1; |
237 | return value; | |
238 | } | |
239 | ||
240 | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 | |
241 | /* Read indexed register (16 bit signed) from bytecode. */ | |
305daaed RH |
242 | static int16_t tci_read_r16s(const tcg_target_ulong *regs, |
243 | const uint8_t **tb_ptr) | |
7657f4bf | 244 | { |
5e75150c | 245 | int16_t value = tci_read_reg16s(regs, **tb_ptr); |
7657f4bf SW |
246 | *tb_ptr += 1; |
247 | return value; | |
248 | } | |
249 | #endif | |
250 | ||
251 | /* Read indexed register (32 bit) from bytecode. */ | |
305daaed RH |
252 | static uint32_t tci_read_r32(const tcg_target_ulong *regs, |
253 | const uint8_t **tb_ptr) | |
7657f4bf | 254 | { |
5e75150c | 255 | uint32_t value = tci_read_reg32(regs, **tb_ptr); |
7657f4bf SW |
256 | *tb_ptr += 1; |
257 | return value; | |
258 | } | |
259 | ||
260 | #if TCG_TARGET_REG_BITS == 32 | |
261 | /* Read two indexed registers (2 * 32 bit) from bytecode. */ | |
305daaed RH |
262 | static uint64_t tci_read_r64(const tcg_target_ulong *regs, |
263 | const uint8_t **tb_ptr) | |
7657f4bf | 264 | { |
5e75150c EC |
265 | uint32_t low = tci_read_r32(regs, tb_ptr); |
266 | return tci_uint64(tci_read_r32(regs, tb_ptr), low); | |
7657f4bf SW |
267 | } |
268 | #elif TCG_TARGET_REG_BITS == 64 | |
269 | /* Read indexed register (32 bit signed) from bytecode. */ | |
305daaed RH |
270 | static int32_t tci_read_r32s(const tcg_target_ulong *regs, |
271 | const uint8_t **tb_ptr) | |
7657f4bf | 272 | { |
5e75150c | 273 | int32_t value = tci_read_reg32s(regs, **tb_ptr); |
7657f4bf SW |
274 | *tb_ptr += 1; |
275 | return value; | |
276 | } | |
277 | ||
278 | /* Read indexed register (64 bit) from bytecode. */ | |
305daaed RH |
279 | static uint64_t tci_read_r64(const tcg_target_ulong *regs, |
280 | const uint8_t **tb_ptr) | |
7657f4bf | 281 | { |
5e75150c | 282 | uint64_t value = tci_read_reg64(regs, **tb_ptr); |
7657f4bf SW |
283 | *tb_ptr += 1; |
284 | return value; | |
285 | } | |
286 | #endif | |
287 | ||
288 | /* Read indexed register(s) with target address from bytecode. */ | |
5e75150c | 289 | static target_ulong |
305daaed | 290 | tci_read_ulong(const tcg_target_ulong *regs, const uint8_t **tb_ptr) |
7657f4bf | 291 | { |
5e75150c | 292 | target_ulong taddr = tci_read_r(regs, tb_ptr); |
7657f4bf | 293 | #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS |
5e75150c | 294 | taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32; |
7657f4bf SW |
295 | #endif |
296 | return taddr; | |
297 | } | |
298 | ||
299 | /* Read indexed register or constant (native size) from bytecode. */ | |
5e75150c | 300 | static tcg_target_ulong |
305daaed | 301 | tci_read_ri(const tcg_target_ulong *regs, const uint8_t **tb_ptr) |
7657f4bf SW |
302 | { |
303 | tcg_target_ulong value; | |
771142c2 | 304 | TCGReg r = **tb_ptr; |
7657f4bf SW |
305 | *tb_ptr += 1; |
306 | if (r == TCG_CONST) { | |
307 | value = tci_read_i(tb_ptr); | |
308 | } else { | |
5e75150c | 309 | value = tci_read_reg(regs, r); |
7657f4bf SW |
310 | } |
311 | return value; | |
312 | } | |
313 | ||
314 | /* Read indexed register or constant (32 bit) from bytecode. */ | |
305daaed RH |
315 | static uint32_t tci_read_ri32(const tcg_target_ulong *regs, |
316 | const uint8_t **tb_ptr) | |
7657f4bf SW |
317 | { |
318 | uint32_t value; | |
771142c2 | 319 | TCGReg r = **tb_ptr; |
7657f4bf SW |
320 | *tb_ptr += 1; |
321 | if (r == TCG_CONST) { | |
322 | value = tci_read_i32(tb_ptr); | |
323 | } else { | |
5e75150c | 324 | value = tci_read_reg32(regs, r); |
7657f4bf SW |
325 | } |
326 | return value; | |
327 | } | |
328 | ||
329 | #if TCG_TARGET_REG_BITS == 32 | |
330 | /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */ | |
305daaed RH |
331 | static uint64_t tci_read_ri64(const tcg_target_ulong *regs, |
332 | const uint8_t **tb_ptr) | |
7657f4bf | 333 | { |
5e75150c EC |
334 | uint32_t low = tci_read_ri32(regs, tb_ptr); |
335 | return tci_uint64(tci_read_ri32(regs, tb_ptr), low); | |
7657f4bf SW |
336 | } |
337 | #elif TCG_TARGET_REG_BITS == 64 | |
338 | /* Read indexed register or constant (64 bit) from bytecode. */ | |
305daaed RH |
339 | static uint64_t tci_read_ri64(const tcg_target_ulong *regs, |
340 | const uint8_t **tb_ptr) | |
7657f4bf SW |
341 | { |
342 | uint64_t value; | |
771142c2 | 343 | TCGReg r = **tb_ptr; |
7657f4bf SW |
344 | *tb_ptr += 1; |
345 | if (r == TCG_CONST) { | |
346 | value = tci_read_i64(tb_ptr); | |
347 | } else { | |
5e75150c | 348 | value = tci_read_reg64(regs, r); |
7657f4bf SW |
349 | } |
350 | return value; | |
351 | } | |
352 | #endif | |
353 | ||
305daaed | 354 | static tcg_target_ulong tci_read_label(const uint8_t **tb_ptr) |
7657f4bf | 355 | { |
c6c5063c | 356 | tcg_target_ulong label = tci_read_i(tb_ptr); |
3ccdbecf | 357 | tci_assert(label != 0); |
7657f4bf SW |
358 | return label; |
359 | } | |
360 | ||
361 | static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition) | |
362 | { | |
363 | bool result = false; | |
364 | int32_t i0 = u0; | |
365 | int32_t i1 = u1; | |
366 | switch (condition) { | |
367 | case TCG_COND_EQ: | |
368 | result = (u0 == u1); | |
369 | break; | |
370 | case TCG_COND_NE: | |
371 | result = (u0 != u1); | |
372 | break; | |
373 | case TCG_COND_LT: | |
374 | result = (i0 < i1); | |
375 | break; | |
376 | case TCG_COND_GE: | |
377 | result = (i0 >= i1); | |
378 | break; | |
379 | case TCG_COND_LE: | |
380 | result = (i0 <= i1); | |
381 | break; | |
382 | case TCG_COND_GT: | |
383 | result = (i0 > i1); | |
384 | break; | |
385 | case TCG_COND_LTU: | |
386 | result = (u0 < u1); | |
387 | break; | |
388 | case TCG_COND_GEU: | |
389 | result = (u0 >= u1); | |
390 | break; | |
391 | case TCG_COND_LEU: | |
392 | result = (u0 <= u1); | |
393 | break; | |
394 | case TCG_COND_GTU: | |
395 | result = (u0 > u1); | |
396 | break; | |
397 | default: | |
398 | TODO(); | |
399 | } | |
400 | return result; | |
401 | } | |
402 | ||
403 | static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition) | |
404 | { | |
405 | bool result = false; | |
406 | int64_t i0 = u0; | |
407 | int64_t i1 = u1; | |
408 | switch (condition) { | |
409 | case TCG_COND_EQ: | |
410 | result = (u0 == u1); | |
411 | break; | |
412 | case TCG_COND_NE: | |
413 | result = (u0 != u1); | |
414 | break; | |
415 | case TCG_COND_LT: | |
416 | result = (i0 < i1); | |
417 | break; | |
418 | case TCG_COND_GE: | |
419 | result = (i0 >= i1); | |
420 | break; | |
421 | case TCG_COND_LE: | |
422 | result = (i0 <= i1); | |
423 | break; | |
424 | case TCG_COND_GT: | |
425 | result = (i0 > i1); | |
426 | break; | |
427 | case TCG_COND_LTU: | |
428 | result = (u0 < u1); | |
429 | break; | |
430 | case TCG_COND_GEU: | |
431 | result = (u0 >= u1); | |
432 | break; | |
433 | case TCG_COND_LEU: | |
434 | result = (u0 <= u1); | |
435 | break; | |
436 | case TCG_COND_GTU: | |
437 | result = (u0 > u1); | |
438 | break; | |
439 | default: | |
440 | TODO(); | |
441 | } | |
442 | return result; | |
443 | } | |
444 | ||
76782fab | 445 | #ifdef CONFIG_SOFTMMU |
76782fab | 446 | # define qemu_ld_ub \ |
3972ef6f | 447 | helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 448 | # define qemu_ld_leuw \ |
3972ef6f | 449 | helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 450 | # define qemu_ld_leul \ |
3972ef6f | 451 | helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 452 | # define qemu_ld_leq \ |
3972ef6f | 453 | helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 454 | # define qemu_ld_beuw \ |
3972ef6f | 455 | helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 456 | # define qemu_ld_beul \ |
3972ef6f | 457 | helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 458 | # define qemu_ld_beq \ |
3972ef6f | 459 | helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 460 | # define qemu_st_b(X) \ |
3972ef6f | 461 | helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 462 | # define qemu_st_lew(X) \ |
3972ef6f | 463 | helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 464 | # define qemu_st_lel(X) \ |
3972ef6f | 465 | helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 466 | # define qemu_st_leq(X) \ |
3972ef6f | 467 | helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 468 | # define qemu_st_bew(X) \ |
3972ef6f | 469 | helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 470 | # define qemu_st_bel(X) \ |
3972ef6f | 471 | helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 472 | # define qemu_st_beq(X) \ |
3972ef6f | 473 | helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab RH |
474 | #else |
475 | # define qemu_ld_ub ldub_p(g2h(taddr)) | |
476 | # define qemu_ld_leuw lduw_le_p(g2h(taddr)) | |
477 | # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr)) | |
478 | # define qemu_ld_leq ldq_le_p(g2h(taddr)) | |
479 | # define qemu_ld_beuw lduw_be_p(g2h(taddr)) | |
480 | # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr)) | |
481 | # define qemu_ld_beq ldq_be_p(g2h(taddr)) | |
482 | # define qemu_st_b(X) stb_p(g2h(taddr), X) | |
483 | # define qemu_st_lew(X) stw_le_p(g2h(taddr), X) | |
484 | # define qemu_st_lel(X) stl_le_p(g2h(taddr), X) | |
485 | # define qemu_st_leq(X) stq_le_p(g2h(taddr), X) | |
486 | # define qemu_st_bew(X) stw_be_p(g2h(taddr), X) | |
487 | # define qemu_st_bel(X) stl_be_p(g2h(taddr), X) | |
488 | # define qemu_st_beq(X) stq_be_p(g2h(taddr), X) | |
489 | #endif | |
490 | ||
7657f4bf | 491 | /* Interpret pseudo code in tb. */ |
c905a368 DB |
492 | /* |
493 | * Disable CFI checks. | |
494 | * One possible operation in the pseudo code is a call to binary code. | |
495 | * Therefore, disable CFI checks in the interpreter function | |
496 | */ | |
db0c51a3 RH |
497 | uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env, |
498 | const void *v_tb_ptr) | |
7657f4bf | 499 | { |
305daaed | 500 | const uint8_t *tb_ptr = v_tb_ptr; |
5e75150c | 501 | tcg_target_ulong regs[TCG_TARGET_NB_REGS]; |
ee79c356 RH |
502 | long tcg_temps[CPU_TEMP_BUF_NLONGS]; |
503 | uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS); | |
819af24b | 504 | uintptr_t ret = 0; |
7657f4bf | 505 | |
5e75150c EC |
506 | regs[TCG_AREG0] = (tcg_target_ulong)env; |
507 | regs[TCG_REG_CALL_STACK] = sp_value; | |
3ccdbecf | 508 | tci_assert(tb_ptr); |
7657f4bf SW |
509 | |
510 | for (;;) { | |
7657f4bf | 511 | TCGOpcode opc = tb_ptr[0]; |
3ccdbecf | 512 | #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG) |
7657f4bf | 513 | uint8_t op_size = tb_ptr[1]; |
305daaed | 514 | const uint8_t *old_code_ptr = tb_ptr; |
7657f4bf SW |
515 | #endif |
516 | tcg_target_ulong t0; | |
517 | tcg_target_ulong t1; | |
518 | tcg_target_ulong t2; | |
519 | tcg_target_ulong label; | |
520 | TCGCond condition; | |
521 | target_ulong taddr; | |
7657f4bf SW |
522 | uint8_t tmp8; |
523 | uint16_t tmp16; | |
524 | uint32_t tmp32; | |
525 | uint64_t tmp64; | |
526 | #if TCG_TARGET_REG_BITS == 32 | |
527 | uint64_t v64; | |
528 | #endif | |
59227d5d | 529 | TCGMemOpIdx oi; |
7657f4bf SW |
530 | |
531 | /* Skip opcode and size entry. */ | |
532 | tb_ptr += 2; | |
533 | ||
534 | switch (opc) { | |
7657f4bf | 535 | case INDEX_op_call: |
5e75150c | 536 | t0 = tci_read_ri(regs, &tb_ptr); |
13e71f08 | 537 | tci_tb_ptr = (uintptr_t)tb_ptr; |
7657f4bf | 538 | #if TCG_TARGET_REG_BITS == 32 |
5e75150c EC |
539 | tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0), |
540 | tci_read_reg(regs, TCG_REG_R1), | |
541 | tci_read_reg(regs, TCG_REG_R2), | |
542 | tci_read_reg(regs, TCG_REG_R3), | |
543 | tci_read_reg(regs, TCG_REG_R5), | |
544 | tci_read_reg(regs, TCG_REG_R6), | |
545 | tci_read_reg(regs, TCG_REG_R7), | |
546 | tci_read_reg(regs, TCG_REG_R8), | |
547 | tci_read_reg(regs, TCG_REG_R9), | |
1df3caa9 RH |
548 | tci_read_reg(regs, TCG_REG_R10), |
549 | tci_read_reg(regs, TCG_REG_R11), | |
550 | tci_read_reg(regs, TCG_REG_R12)); | |
5e75150c EC |
551 | tci_write_reg(regs, TCG_REG_R0, tmp64); |
552 | tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32); | |
7657f4bf | 553 | #else |
5e75150c EC |
554 | tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0), |
555 | tci_read_reg(regs, TCG_REG_R1), | |
556 | tci_read_reg(regs, TCG_REG_R2), | |
557 | tci_read_reg(regs, TCG_REG_R3), | |
1df3caa9 RH |
558 | tci_read_reg(regs, TCG_REG_R5), |
559 | tci_read_reg(regs, TCG_REG_R6)); | |
5e75150c | 560 | tci_write_reg(regs, TCG_REG_R0, tmp64); |
7657f4bf SW |
561 | #endif |
562 | break; | |
7657f4bf SW |
563 | case INDEX_op_br: |
564 | label = tci_read_label(&tb_ptr); | |
3ccdbecf | 565 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
566 | tb_ptr = (uint8_t *)label; |
567 | continue; | |
568 | case INDEX_op_setcond_i32: | |
569 | t0 = *tb_ptr++; | |
5e75150c EC |
570 | t1 = tci_read_r32(regs, &tb_ptr); |
571 | t2 = tci_read_ri32(regs, &tb_ptr); | |
7657f4bf | 572 | condition = *tb_ptr++; |
5e75150c | 573 | tci_write_reg32(regs, t0, tci_compare32(t1, t2, condition)); |
7657f4bf SW |
574 | break; |
575 | #if TCG_TARGET_REG_BITS == 32 | |
576 | case INDEX_op_setcond2_i32: | |
577 | t0 = *tb_ptr++; | |
5e75150c EC |
578 | tmp64 = tci_read_r64(regs, &tb_ptr); |
579 | v64 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf | 580 | condition = *tb_ptr++; |
5e75150c | 581 | tci_write_reg32(regs, t0, tci_compare64(tmp64, v64, condition)); |
7657f4bf SW |
582 | break; |
583 | #elif TCG_TARGET_REG_BITS == 64 | |
584 | case INDEX_op_setcond_i64: | |
585 | t0 = *tb_ptr++; | |
5e75150c EC |
586 | t1 = tci_read_r64(regs, &tb_ptr); |
587 | t2 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf | 588 | condition = *tb_ptr++; |
5e75150c | 589 | tci_write_reg64(regs, t0, tci_compare64(t1, t2, condition)); |
7657f4bf SW |
590 | break; |
591 | #endif | |
592 | case INDEX_op_mov_i32: | |
593 | t0 = *tb_ptr++; | |
5e75150c EC |
594 | t1 = tci_read_r32(regs, &tb_ptr); |
595 | tci_write_reg32(regs, t0, t1); | |
7657f4bf | 596 | break; |
1bd1af98 | 597 | case INDEX_op_tci_movi_i32: |
7657f4bf SW |
598 | t0 = *tb_ptr++; |
599 | t1 = tci_read_i32(&tb_ptr); | |
5e75150c | 600 | tci_write_reg32(regs, t0, t1); |
7657f4bf SW |
601 | break; |
602 | ||
603 | /* Load/store operations (32 bit). */ | |
604 | ||
605 | case INDEX_op_ld8u_i32: | |
606 | t0 = *tb_ptr++; | |
5e75150c | 607 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 608 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 609 | tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2)); |
7657f4bf SW |
610 | break; |
611 | case INDEX_op_ld8s_i32: | |
2f160e0f SW |
612 | TODO(); |
613 | break; | |
7657f4bf SW |
614 | case INDEX_op_ld16u_i32: |
615 | TODO(); | |
616 | break; | |
617 | case INDEX_op_ld16s_i32: | |
618 | TODO(); | |
619 | break; | |
620 | case INDEX_op_ld_i32: | |
621 | t0 = *tb_ptr++; | |
5e75150c | 622 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 623 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 624 | tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2)); |
7657f4bf SW |
625 | break; |
626 | case INDEX_op_st8_i32: | |
5e75150c EC |
627 | t0 = tci_read_r8(regs, &tb_ptr); |
628 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 629 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
630 | *(uint8_t *)(t1 + t2) = t0; |
631 | break; | |
632 | case INDEX_op_st16_i32: | |
5e75150c EC |
633 | t0 = tci_read_r16(regs, &tb_ptr); |
634 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 635 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
636 | *(uint16_t *)(t1 + t2) = t0; |
637 | break; | |
638 | case INDEX_op_st_i32: | |
5e75150c EC |
639 | t0 = tci_read_r32(regs, &tb_ptr); |
640 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 641 | t2 = tci_read_s32(&tb_ptr); |
3ccdbecf | 642 | tci_assert(t1 != sp_value || (int32_t)t2 < 0); |
7657f4bf SW |
643 | *(uint32_t *)(t1 + t2) = t0; |
644 | break; | |
645 | ||
646 | /* Arithmetic operations (32 bit). */ | |
647 | ||
648 | case INDEX_op_add_i32: | |
649 | t0 = *tb_ptr++; | |
5e75150c EC |
650 | t1 = tci_read_ri32(regs, &tb_ptr); |
651 | t2 = tci_read_ri32(regs, &tb_ptr); | |
652 | tci_write_reg32(regs, t0, t1 + t2); | |
7657f4bf SW |
653 | break; |
654 | case INDEX_op_sub_i32: | |
655 | t0 = *tb_ptr++; | |
5e75150c EC |
656 | t1 = tci_read_ri32(regs, &tb_ptr); |
657 | t2 = tci_read_ri32(regs, &tb_ptr); | |
658 | tci_write_reg32(regs, t0, t1 - t2); | |
7657f4bf SW |
659 | break; |
660 | case INDEX_op_mul_i32: | |
661 | t0 = *tb_ptr++; | |
5e75150c EC |
662 | t1 = tci_read_ri32(regs, &tb_ptr); |
663 | t2 = tci_read_ri32(regs, &tb_ptr); | |
664 | tci_write_reg32(regs, t0, t1 * t2); | |
7657f4bf SW |
665 | break; |
666 | #if TCG_TARGET_HAS_div_i32 | |
667 | case INDEX_op_div_i32: | |
668 | t0 = *tb_ptr++; | |
5e75150c EC |
669 | t1 = tci_read_ri32(regs, &tb_ptr); |
670 | t2 = tci_read_ri32(regs, &tb_ptr); | |
671 | tci_write_reg32(regs, t0, (int32_t)t1 / (int32_t)t2); | |
7657f4bf SW |
672 | break; |
673 | case INDEX_op_divu_i32: | |
674 | t0 = *tb_ptr++; | |
5e75150c EC |
675 | t1 = tci_read_ri32(regs, &tb_ptr); |
676 | t2 = tci_read_ri32(regs, &tb_ptr); | |
677 | tci_write_reg32(regs, t0, t1 / t2); | |
7657f4bf SW |
678 | break; |
679 | case INDEX_op_rem_i32: | |
680 | t0 = *tb_ptr++; | |
5e75150c EC |
681 | t1 = tci_read_ri32(regs, &tb_ptr); |
682 | t2 = tci_read_ri32(regs, &tb_ptr); | |
683 | tci_write_reg32(regs, t0, (int32_t)t1 % (int32_t)t2); | |
7657f4bf SW |
684 | break; |
685 | case INDEX_op_remu_i32: | |
686 | t0 = *tb_ptr++; | |
5e75150c EC |
687 | t1 = tci_read_ri32(regs, &tb_ptr); |
688 | t2 = tci_read_ri32(regs, &tb_ptr); | |
689 | tci_write_reg32(regs, t0, t1 % t2); | |
7657f4bf SW |
690 | break; |
691 | #elif TCG_TARGET_HAS_div2_i32 | |
692 | case INDEX_op_div2_i32: | |
693 | case INDEX_op_divu2_i32: | |
694 | TODO(); | |
695 | break; | |
696 | #endif | |
697 | case INDEX_op_and_i32: | |
698 | t0 = *tb_ptr++; | |
5e75150c EC |
699 | t1 = tci_read_ri32(regs, &tb_ptr); |
700 | t2 = tci_read_ri32(regs, &tb_ptr); | |
701 | tci_write_reg32(regs, t0, t1 & t2); | |
7657f4bf SW |
702 | break; |
703 | case INDEX_op_or_i32: | |
704 | t0 = *tb_ptr++; | |
5e75150c EC |
705 | t1 = tci_read_ri32(regs, &tb_ptr); |
706 | t2 = tci_read_ri32(regs, &tb_ptr); | |
707 | tci_write_reg32(regs, t0, t1 | t2); | |
7657f4bf SW |
708 | break; |
709 | case INDEX_op_xor_i32: | |
710 | t0 = *tb_ptr++; | |
5e75150c EC |
711 | t1 = tci_read_ri32(regs, &tb_ptr); |
712 | t2 = tci_read_ri32(regs, &tb_ptr); | |
713 | tci_write_reg32(regs, t0, t1 ^ t2); | |
7657f4bf SW |
714 | break; |
715 | ||
716 | /* Shift/rotate operations (32 bit). */ | |
717 | ||
718 | case INDEX_op_shl_i32: | |
719 | t0 = *tb_ptr++; | |
5e75150c EC |
720 | t1 = tci_read_ri32(regs, &tb_ptr); |
721 | t2 = tci_read_ri32(regs, &tb_ptr); | |
722 | tci_write_reg32(regs, t0, t1 << (t2 & 31)); | |
7657f4bf SW |
723 | break; |
724 | case INDEX_op_shr_i32: | |
725 | t0 = *tb_ptr++; | |
5e75150c EC |
726 | t1 = tci_read_ri32(regs, &tb_ptr); |
727 | t2 = tci_read_ri32(regs, &tb_ptr); | |
728 | tci_write_reg32(regs, t0, t1 >> (t2 & 31)); | |
7657f4bf SW |
729 | break; |
730 | case INDEX_op_sar_i32: | |
731 | t0 = *tb_ptr++; | |
5e75150c EC |
732 | t1 = tci_read_ri32(regs, &tb_ptr); |
733 | t2 = tci_read_ri32(regs, &tb_ptr); | |
734 | tci_write_reg32(regs, t0, ((int32_t)t1 >> (t2 & 31))); | |
7657f4bf SW |
735 | break; |
736 | #if TCG_TARGET_HAS_rot_i32 | |
737 | case INDEX_op_rotl_i32: | |
738 | t0 = *tb_ptr++; | |
5e75150c EC |
739 | t1 = tci_read_ri32(regs, &tb_ptr); |
740 | t2 = tci_read_ri32(regs, &tb_ptr); | |
741 | tci_write_reg32(regs, t0, rol32(t1, t2 & 31)); | |
7657f4bf SW |
742 | break; |
743 | case INDEX_op_rotr_i32: | |
744 | t0 = *tb_ptr++; | |
5e75150c EC |
745 | t1 = tci_read_ri32(regs, &tb_ptr); |
746 | t2 = tci_read_ri32(regs, &tb_ptr); | |
747 | tci_write_reg32(regs, t0, ror32(t1, t2 & 31)); | |
7657f4bf | 748 | break; |
e24dc9fe SW |
749 | #endif |
750 | #if TCG_TARGET_HAS_deposit_i32 | |
751 | case INDEX_op_deposit_i32: | |
752 | t0 = *tb_ptr++; | |
5e75150c EC |
753 | t1 = tci_read_r32(regs, &tb_ptr); |
754 | t2 = tci_read_r32(regs, &tb_ptr); | |
e24dc9fe SW |
755 | tmp16 = *tb_ptr++; |
756 | tmp8 = *tb_ptr++; | |
757 | tmp32 = (((1 << tmp8) - 1) << tmp16); | |
5e75150c | 758 | tci_write_reg32(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32)); |
e24dc9fe | 759 | break; |
7657f4bf SW |
760 | #endif |
761 | case INDEX_op_brcond_i32: | |
5e75150c EC |
762 | t0 = tci_read_r32(regs, &tb_ptr); |
763 | t1 = tci_read_ri32(regs, &tb_ptr); | |
7657f4bf SW |
764 | condition = *tb_ptr++; |
765 | label = tci_read_label(&tb_ptr); | |
766 | if (tci_compare32(t0, t1, condition)) { | |
3ccdbecf | 767 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
768 | tb_ptr = (uint8_t *)label; |
769 | continue; | |
770 | } | |
771 | break; | |
772 | #if TCG_TARGET_REG_BITS == 32 | |
773 | case INDEX_op_add2_i32: | |
774 | t0 = *tb_ptr++; | |
775 | t1 = *tb_ptr++; | |
5e75150c EC |
776 | tmp64 = tci_read_r64(regs, &tb_ptr); |
777 | tmp64 += tci_read_r64(regs, &tb_ptr); | |
778 | tci_write_reg64(regs, t1, t0, tmp64); | |
7657f4bf SW |
779 | break; |
780 | case INDEX_op_sub2_i32: | |
781 | t0 = *tb_ptr++; | |
782 | t1 = *tb_ptr++; | |
5e75150c EC |
783 | tmp64 = tci_read_r64(regs, &tb_ptr); |
784 | tmp64 -= tci_read_r64(regs, &tb_ptr); | |
785 | tci_write_reg64(regs, t1, t0, tmp64); | |
7657f4bf SW |
786 | break; |
787 | case INDEX_op_brcond2_i32: | |
5e75150c EC |
788 | tmp64 = tci_read_r64(regs, &tb_ptr); |
789 | v64 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf SW |
790 | condition = *tb_ptr++; |
791 | label = tci_read_label(&tb_ptr); | |
792 | if (tci_compare64(tmp64, v64, condition)) { | |
3ccdbecf | 793 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
794 | tb_ptr = (uint8_t *)label; |
795 | continue; | |
796 | } | |
797 | break; | |
798 | case INDEX_op_mulu2_i32: | |
799 | t0 = *tb_ptr++; | |
800 | t1 = *tb_ptr++; | |
5e75150c EC |
801 | t2 = tci_read_r32(regs, &tb_ptr); |
802 | tmp64 = tci_read_r32(regs, &tb_ptr); | |
803 | tci_write_reg64(regs, t1, t0, t2 * tmp64); | |
7657f4bf SW |
804 | break; |
805 | #endif /* TCG_TARGET_REG_BITS == 32 */ | |
806 | #if TCG_TARGET_HAS_ext8s_i32 | |
807 | case INDEX_op_ext8s_i32: | |
808 | t0 = *tb_ptr++; | |
5e75150c EC |
809 | t1 = tci_read_r8s(regs, &tb_ptr); |
810 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
811 | break; |
812 | #endif | |
813 | #if TCG_TARGET_HAS_ext16s_i32 | |
814 | case INDEX_op_ext16s_i32: | |
815 | t0 = *tb_ptr++; | |
5e75150c EC |
816 | t1 = tci_read_r16s(regs, &tb_ptr); |
817 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
818 | break; |
819 | #endif | |
820 | #if TCG_TARGET_HAS_ext8u_i32 | |
821 | case INDEX_op_ext8u_i32: | |
822 | t0 = *tb_ptr++; | |
5e75150c EC |
823 | t1 = tci_read_r8(regs, &tb_ptr); |
824 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
825 | break; |
826 | #endif | |
827 | #if TCG_TARGET_HAS_ext16u_i32 | |
828 | case INDEX_op_ext16u_i32: | |
829 | t0 = *tb_ptr++; | |
5e75150c EC |
830 | t1 = tci_read_r16(regs, &tb_ptr); |
831 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
832 | break; |
833 | #endif | |
834 | #if TCG_TARGET_HAS_bswap16_i32 | |
835 | case INDEX_op_bswap16_i32: | |
836 | t0 = *tb_ptr++; | |
5e75150c EC |
837 | t1 = tci_read_r16(regs, &tb_ptr); |
838 | tci_write_reg32(regs, t0, bswap16(t1)); | |
7657f4bf SW |
839 | break; |
840 | #endif | |
841 | #if TCG_TARGET_HAS_bswap32_i32 | |
842 | case INDEX_op_bswap32_i32: | |
843 | t0 = *tb_ptr++; | |
5e75150c EC |
844 | t1 = tci_read_r32(regs, &tb_ptr); |
845 | tci_write_reg32(regs, t0, bswap32(t1)); | |
7657f4bf SW |
846 | break; |
847 | #endif | |
848 | #if TCG_TARGET_HAS_not_i32 | |
849 | case INDEX_op_not_i32: | |
850 | t0 = *tb_ptr++; | |
5e75150c EC |
851 | t1 = tci_read_r32(regs, &tb_ptr); |
852 | tci_write_reg32(regs, t0, ~t1); | |
7657f4bf SW |
853 | break; |
854 | #endif | |
855 | #if TCG_TARGET_HAS_neg_i32 | |
856 | case INDEX_op_neg_i32: | |
857 | t0 = *tb_ptr++; | |
5e75150c EC |
858 | t1 = tci_read_r32(regs, &tb_ptr); |
859 | tci_write_reg32(regs, t0, -t1); | |
7657f4bf SW |
860 | break; |
861 | #endif | |
862 | #if TCG_TARGET_REG_BITS == 64 | |
863 | case INDEX_op_mov_i64: | |
864 | t0 = *tb_ptr++; | |
5e75150c EC |
865 | t1 = tci_read_r64(regs, &tb_ptr); |
866 | tci_write_reg64(regs, t0, t1); | |
7657f4bf | 867 | break; |
1bd1af98 | 868 | case INDEX_op_tci_movi_i64: |
7657f4bf SW |
869 | t0 = *tb_ptr++; |
870 | t1 = tci_read_i64(&tb_ptr); | |
5e75150c | 871 | tci_write_reg64(regs, t0, t1); |
7657f4bf SW |
872 | break; |
873 | ||
874 | /* Load/store operations (64 bit). */ | |
875 | ||
876 | case INDEX_op_ld8u_i64: | |
877 | t0 = *tb_ptr++; | |
5e75150c | 878 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 879 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 880 | tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2)); |
7657f4bf SW |
881 | break; |
882 | case INDEX_op_ld8s_i64: | |
2f160e0f SW |
883 | TODO(); |
884 | break; | |
7657f4bf | 885 | case INDEX_op_ld16u_i64: |
2f160e0f SW |
886 | t0 = *tb_ptr++; |
887 | t1 = tci_read_r(regs, &tb_ptr); | |
888 | t2 = tci_read_s32(&tb_ptr); | |
889 | tci_write_reg16(regs, t0, *(uint16_t *)(t1 + t2)); | |
890 | break; | |
7657f4bf SW |
891 | case INDEX_op_ld16s_i64: |
892 | TODO(); | |
893 | break; | |
894 | case INDEX_op_ld32u_i64: | |
895 | t0 = *tb_ptr++; | |
5e75150c | 896 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 897 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 898 | tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2)); |
7657f4bf SW |
899 | break; |
900 | case INDEX_op_ld32s_i64: | |
901 | t0 = *tb_ptr++; | |
5e75150c | 902 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 903 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 904 | tci_write_reg32s(regs, t0, *(int32_t *)(t1 + t2)); |
7657f4bf SW |
905 | break; |
906 | case INDEX_op_ld_i64: | |
907 | t0 = *tb_ptr++; | |
5e75150c | 908 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 909 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 910 | tci_write_reg64(regs, t0, *(uint64_t *)(t1 + t2)); |
7657f4bf SW |
911 | break; |
912 | case INDEX_op_st8_i64: | |
5e75150c EC |
913 | t0 = tci_read_r8(regs, &tb_ptr); |
914 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 915 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
916 | *(uint8_t *)(t1 + t2) = t0; |
917 | break; | |
918 | case INDEX_op_st16_i64: | |
5e75150c EC |
919 | t0 = tci_read_r16(regs, &tb_ptr); |
920 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 921 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
922 | *(uint16_t *)(t1 + t2) = t0; |
923 | break; | |
924 | case INDEX_op_st32_i64: | |
5e75150c EC |
925 | t0 = tci_read_r32(regs, &tb_ptr); |
926 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 927 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
928 | *(uint32_t *)(t1 + t2) = t0; |
929 | break; | |
930 | case INDEX_op_st_i64: | |
5e75150c EC |
931 | t0 = tci_read_r64(regs, &tb_ptr); |
932 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 933 | t2 = tci_read_s32(&tb_ptr); |
3ccdbecf | 934 | tci_assert(t1 != sp_value || (int32_t)t2 < 0); |
7657f4bf SW |
935 | *(uint64_t *)(t1 + t2) = t0; |
936 | break; | |
937 | ||
938 | /* Arithmetic operations (64 bit). */ | |
939 | ||
940 | case INDEX_op_add_i64: | |
941 | t0 = *tb_ptr++; | |
5e75150c EC |
942 | t1 = tci_read_ri64(regs, &tb_ptr); |
943 | t2 = tci_read_ri64(regs, &tb_ptr); | |
944 | tci_write_reg64(regs, t0, t1 + t2); | |
7657f4bf SW |
945 | break; |
946 | case INDEX_op_sub_i64: | |
947 | t0 = *tb_ptr++; | |
5e75150c EC |
948 | t1 = tci_read_ri64(regs, &tb_ptr); |
949 | t2 = tci_read_ri64(regs, &tb_ptr); | |
950 | tci_write_reg64(regs, t0, t1 - t2); | |
7657f4bf SW |
951 | break; |
952 | case INDEX_op_mul_i64: | |
953 | t0 = *tb_ptr++; | |
5e75150c EC |
954 | t1 = tci_read_ri64(regs, &tb_ptr); |
955 | t2 = tci_read_ri64(regs, &tb_ptr); | |
956 | tci_write_reg64(regs, t0, t1 * t2); | |
7657f4bf SW |
957 | break; |
958 | #if TCG_TARGET_HAS_div_i64 | |
959 | case INDEX_op_div_i64: | |
960 | case INDEX_op_divu_i64: | |
961 | case INDEX_op_rem_i64: | |
962 | case INDEX_op_remu_i64: | |
963 | TODO(); | |
964 | break; | |
965 | #elif TCG_TARGET_HAS_div2_i64 | |
966 | case INDEX_op_div2_i64: | |
967 | case INDEX_op_divu2_i64: | |
968 | TODO(); | |
969 | break; | |
970 | #endif | |
971 | case INDEX_op_and_i64: | |
972 | t0 = *tb_ptr++; | |
5e75150c EC |
973 | t1 = tci_read_ri64(regs, &tb_ptr); |
974 | t2 = tci_read_ri64(regs, &tb_ptr); | |
975 | tci_write_reg64(regs, t0, t1 & t2); | |
7657f4bf SW |
976 | break; |
977 | case INDEX_op_or_i64: | |
978 | t0 = *tb_ptr++; | |
5e75150c EC |
979 | t1 = tci_read_ri64(regs, &tb_ptr); |
980 | t2 = tci_read_ri64(regs, &tb_ptr); | |
981 | tci_write_reg64(regs, t0, t1 | t2); | |
7657f4bf SW |
982 | break; |
983 | case INDEX_op_xor_i64: | |
984 | t0 = *tb_ptr++; | |
5e75150c EC |
985 | t1 = tci_read_ri64(regs, &tb_ptr); |
986 | t2 = tci_read_ri64(regs, &tb_ptr); | |
987 | tci_write_reg64(regs, t0, t1 ^ t2); | |
7657f4bf SW |
988 | break; |
989 | ||
990 | /* Shift/rotate operations (64 bit). */ | |
991 | ||
992 | case INDEX_op_shl_i64: | |
993 | t0 = *tb_ptr++; | |
5e75150c EC |
994 | t1 = tci_read_ri64(regs, &tb_ptr); |
995 | t2 = tci_read_ri64(regs, &tb_ptr); | |
996 | tci_write_reg64(regs, t0, t1 << (t2 & 63)); | |
7657f4bf SW |
997 | break; |
998 | case INDEX_op_shr_i64: | |
999 | t0 = *tb_ptr++; | |
5e75150c EC |
1000 | t1 = tci_read_ri64(regs, &tb_ptr); |
1001 | t2 = tci_read_ri64(regs, &tb_ptr); | |
1002 | tci_write_reg64(regs, t0, t1 >> (t2 & 63)); | |
7657f4bf SW |
1003 | break; |
1004 | case INDEX_op_sar_i64: | |
1005 | t0 = *tb_ptr++; | |
5e75150c EC |
1006 | t1 = tci_read_ri64(regs, &tb_ptr); |
1007 | t2 = tci_read_ri64(regs, &tb_ptr); | |
1008 | tci_write_reg64(regs, t0, ((int64_t)t1 >> (t2 & 63))); | |
7657f4bf SW |
1009 | break; |
1010 | #if TCG_TARGET_HAS_rot_i64 | |
1011 | case INDEX_op_rotl_i64: | |
d285bf78 | 1012 | t0 = *tb_ptr++; |
5e75150c EC |
1013 | t1 = tci_read_ri64(regs, &tb_ptr); |
1014 | t2 = tci_read_ri64(regs, &tb_ptr); | |
1015 | tci_write_reg64(regs, t0, rol64(t1, t2 & 63)); | |
d285bf78 | 1016 | break; |
7657f4bf | 1017 | case INDEX_op_rotr_i64: |
d285bf78 | 1018 | t0 = *tb_ptr++; |
5e75150c EC |
1019 | t1 = tci_read_ri64(regs, &tb_ptr); |
1020 | t2 = tci_read_ri64(regs, &tb_ptr); | |
1021 | tci_write_reg64(regs, t0, ror64(t1, t2 & 63)); | |
7657f4bf | 1022 | break; |
e24dc9fe SW |
1023 | #endif |
1024 | #if TCG_TARGET_HAS_deposit_i64 | |
1025 | case INDEX_op_deposit_i64: | |
1026 | t0 = *tb_ptr++; | |
5e75150c EC |
1027 | t1 = tci_read_r64(regs, &tb_ptr); |
1028 | t2 = tci_read_r64(regs, &tb_ptr); | |
e24dc9fe SW |
1029 | tmp16 = *tb_ptr++; |
1030 | tmp8 = *tb_ptr++; | |
1031 | tmp64 = (((1ULL << tmp8) - 1) << tmp16); | |
5e75150c | 1032 | tci_write_reg64(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64)); |
e24dc9fe | 1033 | break; |
7657f4bf SW |
1034 | #endif |
1035 | case INDEX_op_brcond_i64: | |
5e75150c EC |
1036 | t0 = tci_read_r64(regs, &tb_ptr); |
1037 | t1 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf SW |
1038 | condition = *tb_ptr++; |
1039 | label = tci_read_label(&tb_ptr); | |
1040 | if (tci_compare64(t0, t1, condition)) { | |
3ccdbecf | 1041 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
1042 | tb_ptr = (uint8_t *)label; |
1043 | continue; | |
1044 | } | |
1045 | break; | |
1046 | #if TCG_TARGET_HAS_ext8u_i64 | |
1047 | case INDEX_op_ext8u_i64: | |
1048 | t0 = *tb_ptr++; | |
5e75150c EC |
1049 | t1 = tci_read_r8(regs, &tb_ptr); |
1050 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1051 | break; |
1052 | #endif | |
1053 | #if TCG_TARGET_HAS_ext8s_i64 | |
1054 | case INDEX_op_ext8s_i64: | |
1055 | t0 = *tb_ptr++; | |
5e75150c EC |
1056 | t1 = tci_read_r8s(regs, &tb_ptr); |
1057 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1058 | break; |
1059 | #endif | |
1060 | #if TCG_TARGET_HAS_ext16s_i64 | |
1061 | case INDEX_op_ext16s_i64: | |
1062 | t0 = *tb_ptr++; | |
5e75150c EC |
1063 | t1 = tci_read_r16s(regs, &tb_ptr); |
1064 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1065 | break; |
1066 | #endif | |
1067 | #if TCG_TARGET_HAS_ext16u_i64 | |
1068 | case INDEX_op_ext16u_i64: | |
1069 | t0 = *tb_ptr++; | |
5e75150c EC |
1070 | t1 = tci_read_r16(regs, &tb_ptr); |
1071 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1072 | break; |
1073 | #endif | |
1074 | #if TCG_TARGET_HAS_ext32s_i64 | |
1075 | case INDEX_op_ext32s_i64: | |
4f2331e5 AJ |
1076 | #endif |
1077 | case INDEX_op_ext_i32_i64: | |
7657f4bf | 1078 | t0 = *tb_ptr++; |
5e75150c EC |
1079 | t1 = tci_read_r32s(regs, &tb_ptr); |
1080 | tci_write_reg64(regs, t0, t1); | |
7657f4bf | 1081 | break; |
7657f4bf SW |
1082 | #if TCG_TARGET_HAS_ext32u_i64 |
1083 | case INDEX_op_ext32u_i64: | |
4f2331e5 AJ |
1084 | #endif |
1085 | case INDEX_op_extu_i32_i64: | |
7657f4bf | 1086 | t0 = *tb_ptr++; |
5e75150c EC |
1087 | t1 = tci_read_r32(regs, &tb_ptr); |
1088 | tci_write_reg64(regs, t0, t1); | |
7657f4bf | 1089 | break; |
7657f4bf SW |
1090 | #if TCG_TARGET_HAS_bswap16_i64 |
1091 | case INDEX_op_bswap16_i64: | |
7657f4bf | 1092 | t0 = *tb_ptr++; |
5e75150c EC |
1093 | t1 = tci_read_r16(regs, &tb_ptr); |
1094 | tci_write_reg64(regs, t0, bswap16(t1)); | |
7657f4bf SW |
1095 | break; |
1096 | #endif | |
1097 | #if TCG_TARGET_HAS_bswap32_i64 | |
1098 | case INDEX_op_bswap32_i64: | |
1099 | t0 = *tb_ptr++; | |
5e75150c EC |
1100 | t1 = tci_read_r32(regs, &tb_ptr); |
1101 | tci_write_reg64(regs, t0, bswap32(t1)); | |
7657f4bf SW |
1102 | break; |
1103 | #endif | |
1104 | #if TCG_TARGET_HAS_bswap64_i64 | |
1105 | case INDEX_op_bswap64_i64: | |
7657f4bf | 1106 | t0 = *tb_ptr++; |
5e75150c EC |
1107 | t1 = tci_read_r64(regs, &tb_ptr); |
1108 | tci_write_reg64(regs, t0, bswap64(t1)); | |
7657f4bf SW |
1109 | break; |
1110 | #endif | |
1111 | #if TCG_TARGET_HAS_not_i64 | |
1112 | case INDEX_op_not_i64: | |
1113 | t0 = *tb_ptr++; | |
5e75150c EC |
1114 | t1 = tci_read_r64(regs, &tb_ptr); |
1115 | tci_write_reg64(regs, t0, ~t1); | |
7657f4bf SW |
1116 | break; |
1117 | #endif | |
1118 | #if TCG_TARGET_HAS_neg_i64 | |
1119 | case INDEX_op_neg_i64: | |
1120 | t0 = *tb_ptr++; | |
5e75150c EC |
1121 | t1 = tci_read_r64(regs, &tb_ptr); |
1122 | tci_write_reg64(regs, t0, -t1); | |
7657f4bf SW |
1123 | break; |
1124 | #endif | |
1125 | #endif /* TCG_TARGET_REG_BITS == 64 */ | |
1126 | ||
1127 | /* QEMU specific operations. */ | |
1128 | ||
7657f4bf | 1129 | case INDEX_op_exit_tb: |
819af24b | 1130 | ret = *(uint64_t *)tb_ptr; |
7657f4bf SW |
1131 | goto exit; |
1132 | break; | |
1133 | case INDEX_op_goto_tb: | |
76442a93 SF |
1134 | /* Jump address is aligned */ |
1135 | tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4); | |
d73415a3 | 1136 | t0 = qatomic_read((int32_t *)tb_ptr); |
76442a93 | 1137 | tb_ptr += sizeof(int32_t); |
3ccdbecf | 1138 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
1139 | tb_ptr += (int32_t)t0; |
1140 | continue; | |
76782fab | 1141 | case INDEX_op_qemu_ld_i32: |
7657f4bf | 1142 | t0 = *tb_ptr++; |
5e75150c | 1143 | taddr = tci_read_ulong(regs, &tb_ptr); |
59227d5d | 1144 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1145 | switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) { |
76782fab RH |
1146 | case MO_UB: |
1147 | tmp32 = qemu_ld_ub; | |
1148 | break; | |
1149 | case MO_SB: | |
1150 | tmp32 = (int8_t)qemu_ld_ub; | |
1151 | break; | |
1152 | case MO_LEUW: | |
1153 | tmp32 = qemu_ld_leuw; | |
1154 | break; | |
1155 | case MO_LESW: | |
1156 | tmp32 = (int16_t)qemu_ld_leuw; | |
1157 | break; | |
1158 | case MO_LEUL: | |
1159 | tmp32 = qemu_ld_leul; | |
1160 | break; | |
1161 | case MO_BEUW: | |
1162 | tmp32 = qemu_ld_beuw; | |
1163 | break; | |
1164 | case MO_BESW: | |
1165 | tmp32 = (int16_t)qemu_ld_beuw; | |
1166 | break; | |
1167 | case MO_BEUL: | |
1168 | tmp32 = qemu_ld_beul; | |
1169 | break; | |
1170 | default: | |
1171 | tcg_abort(); | |
1172 | } | |
5e75150c | 1173 | tci_write_reg(regs, t0, tmp32); |
7657f4bf | 1174 | break; |
76782fab | 1175 | case INDEX_op_qemu_ld_i64: |
7657f4bf | 1176 | t0 = *tb_ptr++; |
76782fab RH |
1177 | if (TCG_TARGET_REG_BITS == 32) { |
1178 | t1 = *tb_ptr++; | |
1179 | } | |
5e75150c | 1180 | taddr = tci_read_ulong(regs, &tb_ptr); |
59227d5d | 1181 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1182 | switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) { |
76782fab RH |
1183 | case MO_UB: |
1184 | tmp64 = qemu_ld_ub; | |
1185 | break; | |
1186 | case MO_SB: | |
1187 | tmp64 = (int8_t)qemu_ld_ub; | |
1188 | break; | |
1189 | case MO_LEUW: | |
1190 | tmp64 = qemu_ld_leuw; | |
1191 | break; | |
1192 | case MO_LESW: | |
1193 | tmp64 = (int16_t)qemu_ld_leuw; | |
1194 | break; | |
1195 | case MO_LEUL: | |
1196 | tmp64 = qemu_ld_leul; | |
1197 | break; | |
1198 | case MO_LESL: | |
1199 | tmp64 = (int32_t)qemu_ld_leul; | |
1200 | break; | |
1201 | case MO_LEQ: | |
1202 | tmp64 = qemu_ld_leq; | |
1203 | break; | |
1204 | case MO_BEUW: | |
1205 | tmp64 = qemu_ld_beuw; | |
1206 | break; | |
1207 | case MO_BESW: | |
1208 | tmp64 = (int16_t)qemu_ld_beuw; | |
1209 | break; | |
1210 | case MO_BEUL: | |
1211 | tmp64 = qemu_ld_beul; | |
1212 | break; | |
1213 | case MO_BESL: | |
1214 | tmp64 = (int32_t)qemu_ld_beul; | |
1215 | break; | |
1216 | case MO_BEQ: | |
1217 | tmp64 = qemu_ld_beq; | |
1218 | break; | |
1219 | default: | |
1220 | tcg_abort(); | |
1221 | } | |
5e75150c | 1222 | tci_write_reg(regs, t0, tmp64); |
76782fab | 1223 | if (TCG_TARGET_REG_BITS == 32) { |
5e75150c | 1224 | tci_write_reg(regs, t1, tmp64 >> 32); |
76782fab | 1225 | } |
7657f4bf | 1226 | break; |
76782fab | 1227 | case INDEX_op_qemu_st_i32: |
5e75150c EC |
1228 | t0 = tci_read_r(regs, &tb_ptr); |
1229 | taddr = tci_read_ulong(regs, &tb_ptr); | |
59227d5d | 1230 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1231 | switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) { |
76782fab RH |
1232 | case MO_UB: |
1233 | qemu_st_b(t0); | |
1234 | break; | |
1235 | case MO_LEUW: | |
1236 | qemu_st_lew(t0); | |
1237 | break; | |
1238 | case MO_LEUL: | |
1239 | qemu_st_lel(t0); | |
1240 | break; | |
1241 | case MO_BEUW: | |
1242 | qemu_st_bew(t0); | |
1243 | break; | |
1244 | case MO_BEUL: | |
1245 | qemu_st_bel(t0); | |
1246 | break; | |
1247 | default: | |
1248 | tcg_abort(); | |
1249 | } | |
7657f4bf | 1250 | break; |
76782fab | 1251 | case INDEX_op_qemu_st_i64: |
5e75150c EC |
1252 | tmp64 = tci_read_r64(regs, &tb_ptr); |
1253 | taddr = tci_read_ulong(regs, &tb_ptr); | |
59227d5d | 1254 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1255 | switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) { |
76782fab RH |
1256 | case MO_UB: |
1257 | qemu_st_b(tmp64); | |
1258 | break; | |
1259 | case MO_LEUW: | |
1260 | qemu_st_lew(tmp64); | |
1261 | break; | |
1262 | case MO_LEUL: | |
1263 | qemu_st_lel(tmp64); | |
1264 | break; | |
1265 | case MO_LEQ: | |
1266 | qemu_st_leq(tmp64); | |
1267 | break; | |
1268 | case MO_BEUW: | |
1269 | qemu_st_bew(tmp64); | |
1270 | break; | |
1271 | case MO_BEUL: | |
1272 | qemu_st_bel(tmp64); | |
1273 | break; | |
1274 | case MO_BEQ: | |
1275 | qemu_st_beq(tmp64); | |
1276 | break; | |
1277 | default: | |
1278 | tcg_abort(); | |
1279 | } | |
7657f4bf | 1280 | break; |
a1e69e2f PK |
1281 | case INDEX_op_mb: |
1282 | /* Ensure ordering for all kinds */ | |
1283 | smp_mb(); | |
1284 | break; | |
7657f4bf SW |
1285 | default: |
1286 | TODO(); | |
1287 | break; | |
1288 | } | |
3ccdbecf | 1289 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
1290 | } |
1291 | exit: | |
819af24b | 1292 | return ret; |
7657f4bf | 1293 | } |