]> git.proxmox.com Git - qemu.git/blob - tcg/tcg.h
8a5e55bcc475c64d38e12d9d924be77ff57a88ce
[qemu.git] / tcg / tcg.h
1 /*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2008 Fabrice Bellard
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24 #include "qemu-common.h"
25
26 #include "tcg-target.h"
27
28 /* Default target word size to pointer size. */
29 #ifndef TCG_TARGET_REG_BITS
30 # if UINTPTR_MAX == UINT32_MAX
31 # define TCG_TARGET_REG_BITS 32
32 # elif UINTPTR_MAX == UINT64_MAX
33 # define TCG_TARGET_REG_BITS 64
34 # else
35 # error Unknown pointer size for tcg target
36 # endif
37 #endif
38
39 #if TCG_TARGET_REG_BITS == 32
40 typedef int32_t tcg_target_long;
41 typedef uint32_t tcg_target_ulong;
42 #define TCG_PRIlx PRIx32
43 #define TCG_PRIld PRId32
44 #elif TCG_TARGET_REG_BITS == 64
45 typedef int64_t tcg_target_long;
46 typedef uint64_t tcg_target_ulong;
47 #define TCG_PRIlx PRIx64
48 #define TCG_PRIld PRId64
49 #else
50 #error unsupported
51 #endif
52
53 #include "tcg-runtime.h"
54
55 #if TCG_TARGET_NB_REGS <= 32
56 typedef uint32_t TCGRegSet;
57 #elif TCG_TARGET_NB_REGS <= 64
58 typedef uint64_t TCGRegSet;
59 #else
60 #error unsupported
61 #endif
62
63 #if TCG_TARGET_REG_BITS == 32
64 /* Turn some undef macros into false macros. */
65 #define TCG_TARGET_HAS_div_i64 0
66 #define TCG_TARGET_HAS_rem_i64 0
67 #define TCG_TARGET_HAS_div2_i64 0
68 #define TCG_TARGET_HAS_rot_i64 0
69 #define TCG_TARGET_HAS_ext8s_i64 0
70 #define TCG_TARGET_HAS_ext16s_i64 0
71 #define TCG_TARGET_HAS_ext32s_i64 0
72 #define TCG_TARGET_HAS_ext8u_i64 0
73 #define TCG_TARGET_HAS_ext16u_i64 0
74 #define TCG_TARGET_HAS_ext32u_i64 0
75 #define TCG_TARGET_HAS_bswap16_i64 0
76 #define TCG_TARGET_HAS_bswap32_i64 0
77 #define TCG_TARGET_HAS_bswap64_i64 0
78 #define TCG_TARGET_HAS_neg_i64 0
79 #define TCG_TARGET_HAS_not_i64 0
80 #define TCG_TARGET_HAS_andc_i64 0
81 #define TCG_TARGET_HAS_orc_i64 0
82 #define TCG_TARGET_HAS_eqv_i64 0
83 #define TCG_TARGET_HAS_nand_i64 0
84 #define TCG_TARGET_HAS_nor_i64 0
85 #define TCG_TARGET_HAS_deposit_i64 0
86 #define TCG_TARGET_HAS_movcond_i64 0
87 #define TCG_TARGET_HAS_add2_i64 0
88 #define TCG_TARGET_HAS_sub2_i64 0
89 #define TCG_TARGET_HAS_mulu2_i64 0
90 #define TCG_TARGET_HAS_muls2_i64 0
91 #define TCG_TARGET_HAS_muluh_i64 0
92 #define TCG_TARGET_HAS_mulsh_i64 0
93 /* Turn some undef macros into true macros. */
94 #define TCG_TARGET_HAS_add2_i32 1
95 #define TCG_TARGET_HAS_sub2_i32 1
96 #define TCG_TARGET_HAS_mulu2_i32 1
97 #endif
98
99 #ifndef TCG_TARGET_deposit_i32_valid
100 #define TCG_TARGET_deposit_i32_valid(ofs, len) 1
101 #endif
102 #ifndef TCG_TARGET_deposit_i64_valid
103 #define TCG_TARGET_deposit_i64_valid(ofs, len) 1
104 #endif
105
106 /* Only one of DIV or DIV2 should be defined. */
107 #if defined(TCG_TARGET_HAS_div_i32)
108 #define TCG_TARGET_HAS_div2_i32 0
109 #elif defined(TCG_TARGET_HAS_div2_i32)
110 #define TCG_TARGET_HAS_div_i32 0
111 #define TCG_TARGET_HAS_rem_i32 0
112 #endif
113 #if defined(TCG_TARGET_HAS_div_i64)
114 #define TCG_TARGET_HAS_div2_i64 0
115 #elif defined(TCG_TARGET_HAS_div2_i64)
116 #define TCG_TARGET_HAS_div_i64 0
117 #define TCG_TARGET_HAS_rem_i64 0
118 #endif
119
120 typedef enum TCGOpcode {
121 #define DEF(name, oargs, iargs, cargs, flags) INDEX_op_ ## name,
122 #include "tcg-opc.h"
123 #undef DEF
124 NB_OPS,
125 } TCGOpcode;
126
127 #define tcg_regset_clear(d) (d) = 0
128 #define tcg_regset_set(d, s) (d) = (s)
129 #define tcg_regset_set32(d, reg, val32) (d) |= (val32) << (reg)
130 #define tcg_regset_set_reg(d, r) (d) |= 1L << (r)
131 #define tcg_regset_reset_reg(d, r) (d) &= ~(1L << (r))
132 #define tcg_regset_test_reg(d, r) (((d) >> (r)) & 1)
133 #define tcg_regset_or(d, a, b) (d) = (a) | (b)
134 #define tcg_regset_and(d, a, b) (d) = (a) & (b)
135 #define tcg_regset_andnot(d, a, b) (d) = (a) & ~(b)
136 #define tcg_regset_not(d, a) (d) = ~(a)
137
138 typedef struct TCGRelocation {
139 struct TCGRelocation *next;
140 int type;
141 uint8_t *ptr;
142 tcg_target_long addend;
143 } TCGRelocation;
144
145 typedef struct TCGLabel {
146 int has_value;
147 union {
148 tcg_target_ulong value;
149 TCGRelocation *first_reloc;
150 } u;
151 } TCGLabel;
152
153 typedef struct TCGPool {
154 struct TCGPool *next;
155 int size;
156 uint8_t data[0] __attribute__ ((aligned));
157 } TCGPool;
158
159 #define TCG_POOL_CHUNK_SIZE 32768
160
161 #define TCG_MAX_LABELS 512
162
163 #define TCG_MAX_TEMPS 512
164
165 /* when the size of the arguments of a called function is smaller than
166 this value, they are statically allocated in the TB stack frame */
167 #define TCG_STATIC_CALL_ARGS_SIZE 128
168
169 typedef enum TCGType {
170 TCG_TYPE_I32,
171 TCG_TYPE_I64,
172 TCG_TYPE_COUNT, /* number of different types */
173
174 /* An alias for the size of the host register. */
175 #if TCG_TARGET_REG_BITS == 32
176 TCG_TYPE_REG = TCG_TYPE_I32,
177 #else
178 TCG_TYPE_REG = TCG_TYPE_I64,
179 #endif
180
181 /* An alias for the size of the native pointer. We don't currently
182 support any hosts with 64-bit registers and 32-bit pointers. */
183 TCG_TYPE_PTR = TCG_TYPE_REG,
184
185 /* An alias for the size of the target "long", aka register. */
186 #if TARGET_LONG_BITS == 64
187 TCG_TYPE_TL = TCG_TYPE_I64,
188 #else
189 TCG_TYPE_TL = TCG_TYPE_I32,
190 #endif
191 } TCGType;
192
193 typedef tcg_target_ulong TCGArg;
194
195 /* Define a type and accessor macros for variables. Using a struct is
196 nice because it gives some level of type safely. Ideally the compiler
197 be able to see through all this. However in practice this is not true,
198 especially on targets with braindamaged ABIs (e.g. i386).
199 We use plain int by default to avoid this runtime overhead.
200 Users of tcg_gen_* don't need to know about any of this, and should
201 treat TCGv as an opaque type.
202 In addition we do typechecking for different types of variables. TCGv_i32
203 and TCGv_i64 are 32/64-bit variables respectively. TCGv and TCGv_ptr
204 are aliases for target_ulong and host pointer sized values respectively.
205 */
206
207 #if defined(CONFIG_QEMU_LDST_OPTIMIZATION) && defined(CONFIG_SOFTMMU)
208 /* Macros/structures for qemu_ld/st IR code optimization:
209 TCG_MAX_HELPER_LABELS is defined as same as OPC_BUF_SIZE in exec-all.h. */
210 #define TCG_MAX_QEMU_LDST 640
211
212 typedef struct TCGLabelQemuLdst {
213 int is_ld:1; /* qemu_ld: 1, qemu_st: 0 */
214 int opc:4;
215 int addrlo_reg; /* reg index for low word of guest virtual addr */
216 int addrhi_reg; /* reg index for high word of guest virtual addr */
217 int datalo_reg; /* reg index for low word to be loaded or stored */
218 int datahi_reg; /* reg index for high word to be loaded or stored */
219 int mem_index; /* soft MMU memory index */
220 uint8_t *raddr; /* gen code addr of the next IR of qemu_ld/st IR */
221 uint8_t *label_ptr[2]; /* label pointers to be updated */
222 } TCGLabelQemuLdst;
223 #endif
224
225 #ifdef CONFIG_DEBUG_TCG
226 #define DEBUG_TCGV 1
227 #endif
228
229 #ifdef DEBUG_TCGV
230
231 typedef struct
232 {
233 int i32;
234 } TCGv_i32;
235
236 typedef struct
237 {
238 int i64;
239 } TCGv_i64;
240
241 typedef struct {
242 int iptr;
243 } TCGv_ptr;
244
245 #define MAKE_TCGV_I32(i) __extension__ \
246 ({ TCGv_i32 make_tcgv_tmp = {i}; make_tcgv_tmp;})
247 #define MAKE_TCGV_I64(i) __extension__ \
248 ({ TCGv_i64 make_tcgv_tmp = {i}; make_tcgv_tmp;})
249 #define MAKE_TCGV_PTR(i) __extension__ \
250 ({ TCGv_ptr make_tcgv_tmp = {i}; make_tcgv_tmp; })
251 #define GET_TCGV_I32(t) ((t).i32)
252 #define GET_TCGV_I64(t) ((t).i64)
253 #define GET_TCGV_PTR(t) ((t).iptr)
254 #if TCG_TARGET_REG_BITS == 32
255 #define TCGV_LOW(t) MAKE_TCGV_I32(GET_TCGV_I64(t))
256 #define TCGV_HIGH(t) MAKE_TCGV_I32(GET_TCGV_I64(t) + 1)
257 #endif
258
259 #else /* !DEBUG_TCGV */
260
261 typedef int TCGv_i32;
262 typedef int TCGv_i64;
263 #if TCG_TARGET_REG_BITS == 32
264 #define TCGv_ptr TCGv_i32
265 #else
266 #define TCGv_ptr TCGv_i64
267 #endif
268 #define MAKE_TCGV_I32(x) (x)
269 #define MAKE_TCGV_I64(x) (x)
270 #define MAKE_TCGV_PTR(x) (x)
271 #define GET_TCGV_I32(t) (t)
272 #define GET_TCGV_I64(t) (t)
273 #define GET_TCGV_PTR(t) (t)
274
275 #if TCG_TARGET_REG_BITS == 32
276 #define TCGV_LOW(t) (t)
277 #define TCGV_HIGH(t) ((t) + 1)
278 #endif
279
280 #endif /* DEBUG_TCGV */
281
282 #define TCGV_EQUAL_I32(a, b) (GET_TCGV_I32(a) == GET_TCGV_I32(b))
283 #define TCGV_EQUAL_I64(a, b) (GET_TCGV_I64(a) == GET_TCGV_I64(b))
284
285 /* Dummy definition to avoid compiler warnings. */
286 #define TCGV_UNUSED_I32(x) x = MAKE_TCGV_I32(-1)
287 #define TCGV_UNUSED_I64(x) x = MAKE_TCGV_I64(-1)
288
289 #define TCGV_IS_UNUSED_I32(x) (GET_TCGV_I32(x) == -1)
290 #define TCGV_IS_UNUSED_I64(x) (GET_TCGV_I64(x) == -1)
291
292 /* call flags */
293 /* Helper does not read globals (either directly or through an exception). It
294 implies TCG_CALL_NO_WRITE_GLOBALS. */
295 #define TCG_CALL_NO_READ_GLOBALS 0x0010
296 /* Helper does not write globals */
297 #define TCG_CALL_NO_WRITE_GLOBALS 0x0020
298 /* Helper can be safely suppressed if the return value is not used. */
299 #define TCG_CALL_NO_SIDE_EFFECTS 0x0040
300
301 /* convenience version of most used call flags */
302 #define TCG_CALL_NO_RWG TCG_CALL_NO_READ_GLOBALS
303 #define TCG_CALL_NO_WG TCG_CALL_NO_WRITE_GLOBALS
304 #define TCG_CALL_NO_SE TCG_CALL_NO_SIDE_EFFECTS
305 #define TCG_CALL_NO_RWG_SE (TCG_CALL_NO_RWG | TCG_CALL_NO_SE)
306 #define TCG_CALL_NO_WG_SE (TCG_CALL_NO_WG | TCG_CALL_NO_SE)
307
308 /* used to align parameters */
309 #define TCG_CALL_DUMMY_TCGV MAKE_TCGV_I32(-1)
310 #define TCG_CALL_DUMMY_ARG ((TCGArg)(-1))
311
312 /* Conditions. Note that these are laid out for easy manipulation by
313 the functions below:
314 bit 0 is used for inverting;
315 bit 1 is signed,
316 bit 2 is unsigned,
317 bit 3 is used with bit 0 for swapping signed/unsigned. */
318 typedef enum {
319 /* non-signed */
320 TCG_COND_NEVER = 0 | 0 | 0 | 0,
321 TCG_COND_ALWAYS = 0 | 0 | 0 | 1,
322 TCG_COND_EQ = 8 | 0 | 0 | 0,
323 TCG_COND_NE = 8 | 0 | 0 | 1,
324 /* signed */
325 TCG_COND_LT = 0 | 0 | 2 | 0,
326 TCG_COND_GE = 0 | 0 | 2 | 1,
327 TCG_COND_LE = 8 | 0 | 2 | 0,
328 TCG_COND_GT = 8 | 0 | 2 | 1,
329 /* unsigned */
330 TCG_COND_LTU = 0 | 4 | 0 | 0,
331 TCG_COND_GEU = 0 | 4 | 0 | 1,
332 TCG_COND_LEU = 8 | 4 | 0 | 0,
333 TCG_COND_GTU = 8 | 4 | 0 | 1,
334 } TCGCond;
335
336 /* Invert the sense of the comparison. */
337 static inline TCGCond tcg_invert_cond(TCGCond c)
338 {
339 return (TCGCond)(c ^ 1);
340 }
341
342 /* Swap the operands in a comparison. */
343 static inline TCGCond tcg_swap_cond(TCGCond c)
344 {
345 return c & 6 ? (TCGCond)(c ^ 9) : c;
346 }
347
348 /* Create an "unsigned" version of a "signed" comparison. */
349 static inline TCGCond tcg_unsigned_cond(TCGCond c)
350 {
351 return c & 2 ? (TCGCond)(c ^ 6) : c;
352 }
353
354 /* Must a comparison be considered unsigned? */
355 static inline bool is_unsigned_cond(TCGCond c)
356 {
357 return (c & 4) != 0;
358 }
359
360 /* Create a "high" version of a double-word comparison.
361 This removes equality from a LTE or GTE comparison. */
362 static inline TCGCond tcg_high_cond(TCGCond c)
363 {
364 switch (c) {
365 case TCG_COND_GE:
366 case TCG_COND_LE:
367 case TCG_COND_GEU:
368 case TCG_COND_LEU:
369 return (TCGCond)(c ^ 8);
370 default:
371 return c;
372 }
373 }
374
375 #define TEMP_VAL_DEAD 0
376 #define TEMP_VAL_REG 1
377 #define TEMP_VAL_MEM 2
378 #define TEMP_VAL_CONST 3
379
380 /* XXX: optimize memory layout */
381 typedef struct TCGTemp {
382 TCGType base_type;
383 TCGType type;
384 int val_type;
385 int reg;
386 tcg_target_long val;
387 int mem_reg;
388 tcg_target_long mem_offset;
389 unsigned int fixed_reg:1;
390 unsigned int mem_coherent:1;
391 unsigned int mem_allocated:1;
392 unsigned int temp_local:1; /* If true, the temp is saved across
393 basic blocks. Otherwise, it is not
394 preserved across basic blocks. */
395 unsigned int temp_allocated:1; /* never used for code gen */
396 /* index of next free temp of same base type, -1 if end */
397 int next_free_temp;
398 const char *name;
399 } TCGTemp;
400
401 typedef struct TCGHelperInfo {
402 tcg_target_ulong func;
403 const char *name;
404 } TCGHelperInfo;
405
406 typedef struct TCGContext TCGContext;
407
408 struct TCGContext {
409 uint8_t *pool_cur, *pool_end;
410 TCGPool *pool_first, *pool_current, *pool_first_large;
411 TCGLabel *labels;
412 int nb_labels;
413 int nb_globals;
414 int nb_temps;
415 /* index of free temps, -1 if none */
416 int first_free_temp[TCG_TYPE_COUNT * 2];
417
418 /* goto_tb support */
419 uint8_t *code_buf;
420 uintptr_t *tb_next;
421 uint16_t *tb_next_offset;
422 uint16_t *tb_jmp_offset; /* != NULL if USE_DIRECT_JUMP */
423
424 /* liveness analysis */
425 uint16_t *op_dead_args; /* for each operation, each bit tells if the
426 corresponding argument is dead */
427 uint8_t *op_sync_args; /* for each operation, each bit tells if the
428 corresponding output argument needs to be
429 sync to memory. */
430
431 /* tells in which temporary a given register is. It does not take
432 into account fixed registers */
433 int reg_to_temp[TCG_TARGET_NB_REGS];
434 TCGRegSet reserved_regs;
435 tcg_target_long current_frame_offset;
436 tcg_target_long frame_start;
437 tcg_target_long frame_end;
438 int frame_reg;
439
440 uint8_t *code_ptr;
441 TCGTemp temps[TCG_MAX_TEMPS]; /* globals first, temps after */
442
443 TCGHelperInfo *helpers;
444 int nb_helpers;
445 int allocated_helpers;
446 int helpers_sorted;
447
448 #ifdef CONFIG_PROFILER
449 /* profiling info */
450 int64_t tb_count1;
451 int64_t tb_count;
452 int64_t op_count; /* total insn count */
453 int op_count_max; /* max insn per TB */
454 int64_t temp_count;
455 int temp_count_max;
456 int64_t del_op_count;
457 int64_t code_in_len;
458 int64_t code_out_len;
459 int64_t interm_time;
460 int64_t code_time;
461 int64_t la_time;
462 int64_t opt_time;
463 int64_t restore_count;
464 int64_t restore_time;
465 #endif
466
467 #ifdef CONFIG_DEBUG_TCG
468 int temps_in_use;
469 int goto_tb_issue_mask;
470 #endif
471
472 uint16_t gen_opc_buf[OPC_BUF_SIZE];
473 TCGArg gen_opparam_buf[OPPARAM_BUF_SIZE];
474
475 uint16_t *gen_opc_ptr;
476 TCGArg *gen_opparam_ptr;
477 target_ulong gen_opc_pc[OPC_BUF_SIZE];
478 uint16_t gen_opc_icount[OPC_BUF_SIZE];
479 uint8_t gen_opc_instr_start[OPC_BUF_SIZE];
480
481 /* Code generation */
482 int code_gen_max_blocks;
483 uint8_t *code_gen_prologue;
484 uint8_t *code_gen_buffer;
485 size_t code_gen_buffer_size;
486 /* threshold to flush the translated code buffer */
487 size_t code_gen_buffer_max_size;
488 uint8_t *code_gen_ptr;
489
490 TBContext tb_ctx;
491
492 #if defined(CONFIG_QEMU_LDST_OPTIMIZATION) && defined(CONFIG_SOFTMMU)
493 /* labels info for qemu_ld/st IRs
494 The labels help to generate TLB miss case codes at the end of TB */
495 TCGLabelQemuLdst *qemu_ldst_labels;
496 int nb_qemu_ldst_labels;
497 #endif
498 };
499
500 extern TCGContext tcg_ctx;
501
502 /* pool based memory allocation */
503
504 void *tcg_malloc_internal(TCGContext *s, int size);
505 void tcg_pool_reset(TCGContext *s);
506 void tcg_pool_delete(TCGContext *s);
507
508 static inline void *tcg_malloc(int size)
509 {
510 TCGContext *s = &tcg_ctx;
511 uint8_t *ptr, *ptr_end;
512 size = (size + sizeof(long) - 1) & ~(sizeof(long) - 1);
513 ptr = s->pool_cur;
514 ptr_end = ptr + size;
515 if (unlikely(ptr_end > s->pool_end)) {
516 return tcg_malloc_internal(&tcg_ctx, size);
517 } else {
518 s->pool_cur = ptr_end;
519 return ptr;
520 }
521 }
522
523 void tcg_context_init(TCGContext *s);
524 void tcg_prologue_init(TCGContext *s);
525 void tcg_func_start(TCGContext *s);
526
527 int tcg_gen_code(TCGContext *s, uint8_t *gen_code_buf);
528 int tcg_gen_code_search_pc(TCGContext *s, uint8_t *gen_code_buf, long offset);
529
530 void tcg_set_frame(TCGContext *s, int reg,
531 tcg_target_long start, tcg_target_long size);
532
533 TCGv_i32 tcg_global_reg_new_i32(int reg, const char *name);
534 TCGv_i32 tcg_global_mem_new_i32(int reg, tcg_target_long offset,
535 const char *name);
536 TCGv_i32 tcg_temp_new_internal_i32(int temp_local);
537 static inline TCGv_i32 tcg_temp_new_i32(void)
538 {
539 return tcg_temp_new_internal_i32(0);
540 }
541 static inline TCGv_i32 tcg_temp_local_new_i32(void)
542 {
543 return tcg_temp_new_internal_i32(1);
544 }
545 void tcg_temp_free_i32(TCGv_i32 arg);
546 char *tcg_get_arg_str_i32(TCGContext *s, char *buf, int buf_size, TCGv_i32 arg);
547
548 TCGv_i64 tcg_global_reg_new_i64(int reg, const char *name);
549 TCGv_i64 tcg_global_mem_new_i64(int reg, tcg_target_long offset,
550 const char *name);
551 TCGv_i64 tcg_temp_new_internal_i64(int temp_local);
552 static inline TCGv_i64 tcg_temp_new_i64(void)
553 {
554 return tcg_temp_new_internal_i64(0);
555 }
556 static inline TCGv_i64 tcg_temp_local_new_i64(void)
557 {
558 return tcg_temp_new_internal_i64(1);
559 }
560 void tcg_temp_free_i64(TCGv_i64 arg);
561 char *tcg_get_arg_str_i64(TCGContext *s, char *buf, int buf_size, TCGv_i64 arg);
562
563 #if defined(CONFIG_DEBUG_TCG)
564 /* If you call tcg_clear_temp_count() at the start of a section of
565 * code which is not supposed to leak any TCG temporaries, then
566 * calling tcg_check_temp_count() at the end of the section will
567 * return 1 if the section did in fact leak a temporary.
568 */
569 void tcg_clear_temp_count(void);
570 int tcg_check_temp_count(void);
571 #else
572 #define tcg_clear_temp_count() do { } while (0)
573 #define tcg_check_temp_count() 0
574 #endif
575
576 void tcg_dump_info(FILE *f, fprintf_function cpu_fprintf);
577
578 #define TCG_CT_ALIAS 0x80
579 #define TCG_CT_IALIAS 0x40
580 #define TCG_CT_REG 0x01
581 #define TCG_CT_CONST 0x02 /* any constant of register size */
582
583 typedef struct TCGArgConstraint {
584 uint16_t ct;
585 uint8_t alias_index;
586 union {
587 TCGRegSet regs;
588 } u;
589 } TCGArgConstraint;
590
591 #define TCG_MAX_OP_ARGS 16
592
593 /* Bits for TCGOpDef->flags, 8 bits available. */
594 enum {
595 /* Instruction defines the end of a basic block. */
596 TCG_OPF_BB_END = 0x01,
597 /* Instruction clobbers call registers and potentially update globals. */
598 TCG_OPF_CALL_CLOBBER = 0x02,
599 /* Instruction has side effects: it cannot be removed if its outputs
600 are not used, and might trigger exceptions. */
601 TCG_OPF_SIDE_EFFECTS = 0x04,
602 /* Instruction operands are 64-bits (otherwise 32-bits). */
603 TCG_OPF_64BIT = 0x08,
604 /* Instruction is optional and not implemented by the host, or insn
605 is generic and should not be implemened by the host. */
606 TCG_OPF_NOT_PRESENT = 0x10,
607 };
608
609 typedef struct TCGOpDef {
610 const char *name;
611 uint8_t nb_oargs, nb_iargs, nb_cargs, nb_args;
612 uint8_t flags;
613 TCGArgConstraint *args_ct;
614 int *sorted_args;
615 #if defined(CONFIG_DEBUG_TCG)
616 int used;
617 #endif
618 } TCGOpDef;
619
620 extern TCGOpDef tcg_op_defs[];
621 extern const size_t tcg_op_defs_max;
622
623 typedef struct TCGTargetOpDef {
624 TCGOpcode op;
625 const char *args_ct_str[TCG_MAX_OP_ARGS];
626 } TCGTargetOpDef;
627
628 #define tcg_abort() \
629 do {\
630 fprintf(stderr, "%s:%d: tcg fatal error\n", __FILE__, __LINE__);\
631 abort();\
632 } while (0)
633
634 #ifdef CONFIG_DEBUG_TCG
635 # define tcg_debug_assert(X) do { assert(X); } while (0)
636 #elif QEMU_GNUC_PREREQ(4, 5)
637 # define tcg_debug_assert(X) \
638 do { if (!(X)) { __builtin_unreachable(); } } while (0)
639 #else
640 # define tcg_debug_assert(X) do { (void)(X); } while (0)
641 #endif
642
643 void tcg_add_target_add_op_defs(const TCGTargetOpDef *tdefs);
644
645 #if TCG_TARGET_REG_BITS == 32
646 #define TCGV_NAT_TO_PTR(n) MAKE_TCGV_PTR(GET_TCGV_I32(n))
647 #define TCGV_PTR_TO_NAT(n) MAKE_TCGV_I32(GET_TCGV_PTR(n))
648
649 #define tcg_const_ptr(V) TCGV_NAT_TO_PTR(tcg_const_i32((tcg_target_long)(V)))
650 #define tcg_global_reg_new_ptr(R, N) \
651 TCGV_NAT_TO_PTR(tcg_global_reg_new_i32((R), (N)))
652 #define tcg_global_mem_new_ptr(R, O, N) \
653 TCGV_NAT_TO_PTR(tcg_global_mem_new_i32((R), (O), (N)))
654 #define tcg_temp_new_ptr() TCGV_NAT_TO_PTR(tcg_temp_new_i32())
655 #define tcg_temp_free_ptr(T) tcg_temp_free_i32(TCGV_PTR_TO_NAT(T))
656 #else
657 #define TCGV_NAT_TO_PTR(n) MAKE_TCGV_PTR(GET_TCGV_I64(n))
658 #define TCGV_PTR_TO_NAT(n) MAKE_TCGV_I64(GET_TCGV_PTR(n))
659
660 #define tcg_const_ptr(V) TCGV_NAT_TO_PTR(tcg_const_i64((tcg_target_long)(V)))
661 #define tcg_global_reg_new_ptr(R, N) \
662 TCGV_NAT_TO_PTR(tcg_global_reg_new_i64((R), (N)))
663 #define tcg_global_mem_new_ptr(R, O, N) \
664 TCGV_NAT_TO_PTR(tcg_global_mem_new_i64((R), (O), (N)))
665 #define tcg_temp_new_ptr() TCGV_NAT_TO_PTR(tcg_temp_new_i64())
666 #define tcg_temp_free_ptr(T) tcg_temp_free_i64(TCGV_PTR_TO_NAT(T))
667 #endif
668
669 void tcg_gen_callN(TCGContext *s, TCGv_ptr func, unsigned int flags,
670 int sizemask, TCGArg ret, int nargs, TCGArg *args);
671
672 void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
673 int c, int right, int arith);
674
675 TCGArg *tcg_optimize(TCGContext *s, uint16_t *tcg_opc_ptr, TCGArg *args,
676 TCGOpDef *tcg_op_def);
677
678 /* only used for debugging purposes */
679 void tcg_register_helper(void *func, const char *name);
680 const char *tcg_helper_get_name(TCGContext *s, void *func);
681 void tcg_dump_ops(TCGContext *s);
682
683 void dump_ops(const uint16_t *opc_buf, const TCGArg *opparam_buf);
684 TCGv_i32 tcg_const_i32(int32_t val);
685 TCGv_i64 tcg_const_i64(int64_t val);
686 TCGv_i32 tcg_const_local_i32(int32_t val);
687 TCGv_i64 tcg_const_local_i64(int64_t val);
688
689 /**
690 * tcg_qemu_tb_exec:
691 * @env: CPUArchState * for the CPU
692 * @tb_ptr: address of generated code for the TB to execute
693 *
694 * Start executing code from a given translation block.
695 * Where translation blocks have been linked, execution
696 * may proceed from the given TB into successive ones.
697 * Control eventually returns only when some action is needed
698 * from the top-level loop: either control must pass to a TB
699 * which has not yet been directly linked, or an asynchronous
700 * event such as an interrupt needs handling.
701 *
702 * The return value is a pointer to the next TB to execute
703 * (if known; otherwise zero). This pointer is assumed to be
704 * 4-aligned, and the bottom two bits are used to return further
705 * information:
706 * 0, 1: the link between this TB and the next is via the specified
707 * TB index (0 or 1). That is, we left the TB via (the equivalent
708 * of) "goto_tb <index>". The main loop uses this to determine
709 * how to link the TB just executed to the next.
710 * 2: we are using instruction counting code generation, and we
711 * did not start executing this TB because the instruction counter
712 * would hit zero midway through it. In this case the next-TB pointer
713 * returned is the TB we were about to execute, and the caller must
714 * arrange to execute the remaining count of instructions.
715 * 3: we stopped because the CPU's exit_request flag was set
716 * (usually meaning that there is an interrupt that needs to be
717 * handled). The next-TB pointer returned is the TB we were
718 * about to execute when we noticed the pending exit request.
719 *
720 * If the bottom two bits indicate an exit-via-index then the CPU
721 * state is correctly synchronised and ready for execution of the next
722 * TB (and in particular the guest PC is the address to execute next).
723 * Otherwise, we gave up on execution of this TB before it started, and
724 * the caller must fix up the CPU state by calling cpu_pc_from_tb()
725 * with the next-TB pointer we return.
726 *
727 * Note that TCG targets may use a different definition of tcg_qemu_tb_exec
728 * to this default (which just calls the prologue.code emitted by
729 * tcg_target_qemu_prologue()).
730 */
731 #define TB_EXIT_MASK 3
732 #define TB_EXIT_IDX0 0
733 #define TB_EXIT_IDX1 1
734 #define TB_EXIT_ICOUNT_EXPIRED 2
735 #define TB_EXIT_REQUESTED 3
736
737 #if !defined(tcg_qemu_tb_exec)
738 # define tcg_qemu_tb_exec(env, tb_ptr) \
739 ((uintptr_t (*)(void *, void *))tcg_ctx.code_gen_prologue)(env, tb_ptr)
740 #endif
741
742 void tcg_register_jit(void *buf, size_t buf_size);
743
744 #if defined(CONFIG_QEMU_LDST_OPTIMIZATION) && defined(CONFIG_SOFTMMU)
745 /* Generate TB finalization at the end of block */
746 void tcg_out_tb_finalize(TCGContext *s);
747 #endif