]> git.proxmox.com Git - qemu.git/blob - tcg/x86_64/tcg-target.c
168faabeece82cc65c645816366e81e788ed0b56
[qemu.git] / tcg / x86_64 / tcg-target.c
1 /*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2008 Fabrice Bellard
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24 const char *tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
25 "%rax",
26 "%rcx",
27 "%rdx",
28 "%rbx",
29 "%rsp",
30 "%rbp",
31 "%rsi",
32 "%rdi",
33 "%r8",
34 "%r9",
35 "%r10",
36 "%r11",
37 "%r12",
38 "%r13",
39 "%r14",
40 "%r15",
41 };
42
43 int tcg_target_reg_alloc_order[] = {
44 TCG_REG_RDI,
45 TCG_REG_RSI,
46 TCG_REG_RDX,
47 TCG_REG_RCX,
48 TCG_REG_R8,
49 TCG_REG_R9,
50 TCG_REG_RAX,
51 TCG_REG_R10,
52 TCG_REG_R11,
53
54 TCG_REG_RBP,
55 TCG_REG_RBX,
56 TCG_REG_R12,
57 TCG_REG_R13,
58 TCG_REG_R14,
59 TCG_REG_R15,
60 };
61
62 const int tcg_target_call_iarg_regs[6] = {
63 TCG_REG_RDI,
64 TCG_REG_RSI,
65 TCG_REG_RDX,
66 TCG_REG_RCX,
67 TCG_REG_R8,
68 TCG_REG_R9,
69 };
70
71 const int tcg_target_call_oarg_regs[2] = {
72 TCG_REG_RAX,
73 TCG_REG_RDX
74 };
75
76 static uint8_t *tb_ret_addr;
77
78 static void patch_reloc(uint8_t *code_ptr, int type,
79 tcg_target_long value, tcg_target_long addend)
80 {
81 value += addend;
82 switch(type) {
83 case R_X86_64_32:
84 if (value != (uint32_t)value)
85 tcg_abort();
86 *(uint32_t *)code_ptr = value;
87 break;
88 case R_X86_64_32S:
89 if (value != (int32_t)value)
90 tcg_abort();
91 *(uint32_t *)code_ptr = value;
92 break;
93 case R_386_PC32:
94 value -= (long)code_ptr;
95 if (value != (int32_t)value)
96 tcg_abort();
97 *(uint32_t *)code_ptr = value;
98 break;
99 default:
100 tcg_abort();
101 }
102 }
103
104 /* maximum number of register used for input function arguments */
105 static inline int tcg_target_get_call_iarg_regs_count(int flags)
106 {
107 return 6;
108 }
109
110 /* parse target specific constraints */
111 int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
112 {
113 const char *ct_str;
114
115 ct_str = *pct_str;
116 switch(ct_str[0]) {
117 case 'a':
118 ct->ct |= TCG_CT_REG;
119 tcg_regset_set_reg(ct->u.regs, TCG_REG_RAX);
120 break;
121 case 'b':
122 ct->ct |= TCG_CT_REG;
123 tcg_regset_set_reg(ct->u.regs, TCG_REG_RBX);
124 break;
125 case 'c':
126 ct->ct |= TCG_CT_REG;
127 tcg_regset_set_reg(ct->u.regs, TCG_REG_RCX);
128 break;
129 case 'd':
130 ct->ct |= TCG_CT_REG;
131 tcg_regset_set_reg(ct->u.regs, TCG_REG_RDX);
132 break;
133 case 'S':
134 ct->ct |= TCG_CT_REG;
135 tcg_regset_set_reg(ct->u.regs, TCG_REG_RSI);
136 break;
137 case 'D':
138 ct->ct |= TCG_CT_REG;
139 tcg_regset_set_reg(ct->u.regs, TCG_REG_RDI);
140 break;
141 case 'q':
142 ct->ct |= TCG_CT_REG;
143 tcg_regset_set32(ct->u.regs, 0, 0xf);
144 break;
145 case 'r':
146 ct->ct |= TCG_CT_REG;
147 tcg_regset_set32(ct->u.regs, 0, 0xffff);
148 break;
149 case 'L': /* qemu_ld/st constraint */
150 ct->ct |= TCG_CT_REG;
151 tcg_regset_set32(ct->u.regs, 0, 0xffff);
152 tcg_regset_reset_reg(ct->u.regs, TCG_REG_RSI);
153 tcg_regset_reset_reg(ct->u.regs, TCG_REG_RDI);
154 break;
155 case 'e':
156 ct->ct |= TCG_CT_CONST_S32;
157 break;
158 case 'Z':
159 ct->ct |= TCG_CT_CONST_U32;
160 break;
161 default:
162 return -1;
163 }
164 ct_str++;
165 *pct_str = ct_str;
166 return 0;
167 }
168
169 /* test if a constant matches the constraint */
170 static inline int tcg_target_const_match(tcg_target_long val,
171 const TCGArgConstraint *arg_ct)
172 {
173 int ct;
174 ct = arg_ct->ct;
175 if (ct & TCG_CT_CONST)
176 return 1;
177 else if ((ct & TCG_CT_CONST_S32) && val == (int32_t)val)
178 return 1;
179 else if ((ct & TCG_CT_CONST_U32) && val == (uint32_t)val)
180 return 1;
181 else
182 return 0;
183 }
184
185 #define ARITH_ADD 0
186 #define ARITH_OR 1
187 #define ARITH_ADC 2
188 #define ARITH_SBB 3
189 #define ARITH_AND 4
190 #define ARITH_SUB 5
191 #define ARITH_XOR 6
192 #define ARITH_CMP 7
193
194 #define SHIFT_SHL 4
195 #define SHIFT_SHR 5
196 #define SHIFT_SAR 7
197
198 #define JCC_JMP (-1)
199 #define JCC_JO 0x0
200 #define JCC_JNO 0x1
201 #define JCC_JB 0x2
202 #define JCC_JAE 0x3
203 #define JCC_JE 0x4
204 #define JCC_JNE 0x5
205 #define JCC_JBE 0x6
206 #define JCC_JA 0x7
207 #define JCC_JS 0x8
208 #define JCC_JNS 0x9
209 #define JCC_JP 0xa
210 #define JCC_JNP 0xb
211 #define JCC_JL 0xc
212 #define JCC_JGE 0xd
213 #define JCC_JLE 0xe
214 #define JCC_JG 0xf
215
216 #define P_EXT 0x100 /* 0x0f opcode prefix */
217 #define P_REXW 0x200 /* set rex.w = 1 */
218 #define P_REXB 0x400 /* force rex use for byte registers */
219
220 static const uint8_t tcg_cond_to_jcc[10] = {
221 [TCG_COND_EQ] = JCC_JE,
222 [TCG_COND_NE] = JCC_JNE,
223 [TCG_COND_LT] = JCC_JL,
224 [TCG_COND_GE] = JCC_JGE,
225 [TCG_COND_LE] = JCC_JLE,
226 [TCG_COND_GT] = JCC_JG,
227 [TCG_COND_LTU] = JCC_JB,
228 [TCG_COND_GEU] = JCC_JAE,
229 [TCG_COND_LEU] = JCC_JBE,
230 [TCG_COND_GTU] = JCC_JA,
231 };
232
233 static inline void tcg_out_opc(TCGContext *s, int opc, int r, int rm, int x)
234 {
235 int rex;
236 rex = ((opc >> 6) & 0x8) | ((r >> 1) & 0x4) |
237 ((x >> 2) & 2) | ((rm >> 3) & 1);
238 if (rex || (opc & P_REXB)) {
239 tcg_out8(s, rex | 0x40);
240 }
241 if (opc & P_EXT)
242 tcg_out8(s, 0x0f);
243 tcg_out8(s, opc);
244 }
245
246 static inline void tcg_out_modrm(TCGContext *s, int opc, int r, int rm)
247 {
248 tcg_out_opc(s, opc, r, rm, 0);
249 tcg_out8(s, 0xc0 | ((r & 7) << 3) | (rm & 7));
250 }
251
252 /* rm < 0 means no register index plus (-rm - 1 immediate bytes) */
253 static inline void tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm,
254 tcg_target_long offset)
255 {
256 if (rm < 0) {
257 tcg_target_long val;
258 tcg_out_opc(s, opc, r, 0, 0);
259 val = offset - ((tcg_target_long)s->code_ptr + 5 + (-rm - 1));
260 if (val == (int32_t)val) {
261 /* eip relative */
262 tcg_out8(s, 0x05 | ((r & 7) << 3));
263 tcg_out32(s, val);
264 } else if (offset == (int32_t)offset) {
265 tcg_out8(s, 0x04 | ((r & 7) << 3));
266 tcg_out8(s, 0x25); /* sib */
267 tcg_out32(s, offset);
268 } else {
269 tcg_abort();
270 }
271 } else if (offset == 0 && (rm & 7) != TCG_REG_RBP) {
272 tcg_out_opc(s, opc, r, rm, 0);
273 if ((rm & 7) == TCG_REG_RSP) {
274 tcg_out8(s, 0x04 | ((r & 7) << 3));
275 tcg_out8(s, 0x24);
276 } else {
277 tcg_out8(s, 0x00 | ((r & 7) << 3) | (rm & 7));
278 }
279 } else if ((int8_t)offset == offset) {
280 tcg_out_opc(s, opc, r, rm, 0);
281 if ((rm & 7) == TCG_REG_RSP) {
282 tcg_out8(s, 0x44 | ((r & 7) << 3));
283 tcg_out8(s, 0x24);
284 } else {
285 tcg_out8(s, 0x40 | ((r & 7) << 3) | (rm & 7));
286 }
287 tcg_out8(s, offset);
288 } else {
289 tcg_out_opc(s, opc, r, rm, 0);
290 if ((rm & 7) == TCG_REG_RSP) {
291 tcg_out8(s, 0x84 | ((r & 7) << 3));
292 tcg_out8(s, 0x24);
293 } else {
294 tcg_out8(s, 0x80 | ((r & 7) << 3) | (rm & 7));
295 }
296 tcg_out32(s, offset);
297 }
298 }
299
300 #if defined(CONFIG_SOFTMMU)
301 /* XXX: incomplete. index must be different from ESP */
302 static void tcg_out_modrm_offset2(TCGContext *s, int opc, int r, int rm,
303 int index, int shift,
304 tcg_target_long offset)
305 {
306 int mod;
307 if (rm == -1)
308 tcg_abort();
309 if (offset == 0 && (rm & 7) != TCG_REG_RBP) {
310 mod = 0;
311 } else if (offset == (int8_t)offset) {
312 mod = 0x40;
313 } else if (offset == (int32_t)offset) {
314 mod = 0x80;
315 } else {
316 tcg_abort();
317 }
318 if (index == -1) {
319 tcg_out_opc(s, opc, r, rm, 0);
320 if ((rm & 7) == TCG_REG_RSP) {
321 tcg_out8(s, mod | ((r & 7) << 3) | 0x04);
322 tcg_out8(s, 0x04 | (rm & 7));
323 } else {
324 tcg_out8(s, mod | ((r & 7) << 3) | (rm & 7));
325 }
326 } else {
327 tcg_out_opc(s, opc, r, rm, index);
328 tcg_out8(s, mod | ((r & 7) << 3) | 0x04);
329 tcg_out8(s, (shift << 6) | ((index & 7) << 3) | (rm & 7));
330 }
331 if (mod == 0x40) {
332 tcg_out8(s, offset);
333 } else if (mod == 0x80) {
334 tcg_out32(s, offset);
335 }
336 }
337 #endif
338
339 static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
340 {
341 tcg_out_modrm(s, 0x8b | P_REXW, ret, arg);
342 }
343
344 static inline void tcg_out_movi(TCGContext *s, TCGType type,
345 int ret, tcg_target_long arg)
346 {
347 if (arg == 0) {
348 tcg_out_modrm(s, 0x01 | (ARITH_XOR << 3), ret, ret); /* xor r0,r0 */
349 } else if (arg == (uint32_t)arg || type == TCG_TYPE_I32) {
350 tcg_out_opc(s, 0xb8 + (ret & 7), 0, ret, 0);
351 tcg_out32(s, arg);
352 } else if (arg == (int32_t)arg) {
353 tcg_out_modrm(s, 0xc7 | P_REXW, 0, ret);
354 tcg_out32(s, arg);
355 } else {
356 tcg_out_opc(s, (0xb8 + (ret & 7)) | P_REXW, 0, ret, 0);
357 tcg_out32(s, arg);
358 tcg_out32(s, arg >> 32);
359 }
360 }
361
362 static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
363 int arg1, tcg_target_long arg2)
364 {
365 if (type == TCG_TYPE_I32)
366 tcg_out_modrm_offset(s, 0x8b, ret, arg1, arg2); /* movl */
367 else
368 tcg_out_modrm_offset(s, 0x8b | P_REXW, ret, arg1, arg2); /* movq */
369 }
370
371 static inline void tcg_out_st(TCGContext *s, TCGType type, int arg,
372 int arg1, tcg_target_long arg2)
373 {
374 if (type == TCG_TYPE_I32)
375 tcg_out_modrm_offset(s, 0x89, arg, arg1, arg2); /* movl */
376 else
377 tcg_out_modrm_offset(s, 0x89 | P_REXW, arg, arg1, arg2); /* movq */
378 }
379
380 static inline void tgen_arithi32(TCGContext *s, int c, int r0, int32_t val)
381 {
382 if (val == (int8_t)val) {
383 tcg_out_modrm(s, 0x83, c, r0);
384 tcg_out8(s, val);
385 } else {
386 tcg_out_modrm(s, 0x81, c, r0);
387 tcg_out32(s, val);
388 }
389 }
390
391 static inline void tgen_arithi64(TCGContext *s, int c, int r0, int64_t val)
392 {
393 if (val == (int8_t)val) {
394 tcg_out_modrm(s, 0x83 | P_REXW, c, r0);
395 tcg_out8(s, val);
396 } else if (val == (int32_t)val) {
397 tcg_out_modrm(s, 0x81 | P_REXW, c, r0);
398 tcg_out32(s, val);
399 } else if (c == ARITH_AND && val == (uint32_t)val) {
400 tcg_out_modrm(s, 0x81, c, r0);
401 tcg_out32(s, val);
402 } else {
403 tcg_abort();
404 }
405 }
406
407 void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
408 {
409 if (val != 0)
410 tgen_arithi64(s, ARITH_ADD, reg, val);
411 }
412
413 static void tcg_out_jxx(TCGContext *s, int opc, int label_index)
414 {
415 int32_t val, val1;
416 TCGLabel *l = &s->labels[label_index];
417
418 if (l->has_value) {
419 val = l->u.value - (tcg_target_long)s->code_ptr;
420 val1 = val - 2;
421 if ((int8_t)val1 == val1) {
422 if (opc == -1)
423 tcg_out8(s, 0xeb);
424 else
425 tcg_out8(s, 0x70 + opc);
426 tcg_out8(s, val1);
427 } else {
428 if (opc == -1) {
429 tcg_out8(s, 0xe9);
430 tcg_out32(s, val - 5);
431 } else {
432 tcg_out8(s, 0x0f);
433 tcg_out8(s, 0x80 + opc);
434 tcg_out32(s, val - 6);
435 }
436 }
437 } else {
438 if (opc == -1) {
439 tcg_out8(s, 0xe9);
440 } else {
441 tcg_out8(s, 0x0f);
442 tcg_out8(s, 0x80 + opc);
443 }
444 tcg_out_reloc(s, s->code_ptr, R_386_PC32, label_index, -4);
445 s->code_ptr += 4;
446 }
447 }
448
449 static void tcg_out_brcond(TCGContext *s, int cond,
450 TCGArg arg1, TCGArg arg2, int const_arg2,
451 int label_index, int rexw)
452 {
453 if (const_arg2) {
454 if (arg2 == 0) {
455 /* test r, r */
456 tcg_out_modrm(s, 0x85 | rexw, arg1, arg1);
457 } else {
458 if (rexw)
459 tgen_arithi64(s, ARITH_CMP, arg1, arg2);
460 else
461 tgen_arithi32(s, ARITH_CMP, arg1, arg2);
462 }
463 } else {
464 tcg_out_modrm(s, 0x01 | (ARITH_CMP << 3) | rexw, arg2, arg1);
465 }
466 tcg_out_jxx(s, tcg_cond_to_jcc[cond], label_index);
467 }
468
469 #if defined(CONFIG_SOFTMMU)
470 extern void __ldb_mmu(void);
471 extern void __ldw_mmu(void);
472 extern void __ldl_mmu(void);
473 extern void __ldq_mmu(void);
474
475 extern void __stb_mmu(void);
476 extern void __stw_mmu(void);
477 extern void __stl_mmu(void);
478 extern void __stq_mmu(void);
479
480
481 static void *qemu_ld_helpers[4] = {
482 __ldb_mmu,
483 __ldw_mmu,
484 __ldl_mmu,
485 __ldq_mmu,
486 };
487
488 static void *qemu_st_helpers[4] = {
489 __stb_mmu,
490 __stw_mmu,
491 __stl_mmu,
492 __stq_mmu,
493 };
494 #endif
495
496 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
497 int opc)
498 {
499 int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap, rexw;
500 #if defined(CONFIG_SOFTMMU)
501 uint8_t *label1_ptr, *label2_ptr;
502 #endif
503
504 data_reg = *args++;
505 addr_reg = *args++;
506 mem_index = *args;
507 s_bits = opc & 3;
508
509 r0 = TCG_REG_RDI;
510 r1 = TCG_REG_RSI;
511
512 #if TARGET_LONG_BITS == 32
513 rexw = 0;
514 #else
515 rexw = P_REXW;
516 #endif
517 #if defined(CONFIG_SOFTMMU)
518 /* mov */
519 tcg_out_modrm(s, 0x8b | rexw, r1, addr_reg);
520
521 /* mov */
522 tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
523
524 tcg_out_modrm(s, 0xc1 | rexw, 5, r1); /* shr $x, r1 */
525 tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
526
527 tcg_out_modrm(s, 0x81 | rexw, 4, r0); /* andl $x, r0 */
528 tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
529
530 tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
531 tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
532
533 /* lea offset(r1, env), r1 */
534 tcg_out_modrm_offset2(s, 0x8d | P_REXW, r1, r1, TCG_AREG0, 0,
535 offsetof(CPUState, tlb_table[mem_index][0].addr_read));
536
537 /* cmp 0(r1), r0 */
538 tcg_out_modrm_offset(s, 0x3b | rexw, r0, r1, 0);
539
540 /* mov */
541 tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
542
543 /* je label1 */
544 tcg_out8(s, 0x70 + JCC_JE);
545 label1_ptr = s->code_ptr;
546 s->code_ptr++;
547
548 /* XXX: move that code at the end of the TB */
549 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RSI, mem_index);
550 tcg_out8(s, 0xe8);
551 tcg_out32(s, (tcg_target_long)qemu_ld_helpers[s_bits] -
552 (tcg_target_long)s->code_ptr - 4);
553
554 switch(opc) {
555 case 0 | 4:
556 /* movsbq */
557 tcg_out_modrm(s, 0xbe | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
558 break;
559 case 1 | 4:
560 /* movswq */
561 tcg_out_modrm(s, 0xbf | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
562 break;
563 case 2 | 4:
564 /* movslq */
565 tcg_out_modrm(s, 0x63 | P_REXW, data_reg, TCG_REG_RAX);
566 break;
567 case 0:
568 case 1:
569 case 2:
570 default:
571 /* movl */
572 tcg_out_modrm(s, 0x8b, data_reg, TCG_REG_RAX);
573 break;
574 case 3:
575 tcg_out_mov(s, data_reg, TCG_REG_RAX);
576 break;
577 }
578
579 /* jmp label2 */
580 tcg_out8(s, 0xeb);
581 label2_ptr = s->code_ptr;
582 s->code_ptr++;
583
584 /* label1: */
585 *label1_ptr = s->code_ptr - label1_ptr - 1;
586
587 /* add x(r1), r0 */
588 tcg_out_modrm_offset(s, 0x03 | P_REXW, r0, r1, offsetof(CPUTLBEntry, addend) -
589 offsetof(CPUTLBEntry, addr_read));
590 #else
591 r0 = addr_reg;
592 #endif
593
594 #ifdef TARGET_WORDS_BIGENDIAN
595 bswap = 1;
596 #else
597 bswap = 0;
598 #endif
599 switch(opc) {
600 case 0:
601 /* movzbl */
602 tcg_out_modrm_offset(s, 0xb6 | P_EXT, data_reg, r0, 0);
603 break;
604 case 0 | 4:
605 /* movsbX */
606 tcg_out_modrm_offset(s, 0xbe | P_EXT | rexw, data_reg, r0, 0);
607 break;
608 case 1:
609 /* movzwl */
610 tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, 0);
611 if (bswap) {
612 /* rolw $8, data_reg */
613 tcg_out8(s, 0x66);
614 tcg_out_modrm(s, 0xc1, 0, data_reg);
615 tcg_out8(s, 8);
616 }
617 break;
618 case 1 | 4:
619 if (bswap) {
620 /* movzwl */
621 tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, 0);
622 /* rolw $8, data_reg */
623 tcg_out8(s, 0x66);
624 tcg_out_modrm(s, 0xc1, 0, data_reg);
625 tcg_out8(s, 8);
626
627 /* movswX data_reg, data_reg */
628 tcg_out_modrm(s, 0xbf | P_EXT | rexw, data_reg, data_reg);
629 } else {
630 /* movswX */
631 tcg_out_modrm_offset(s, 0xbf | P_EXT | rexw, data_reg, r0, 0);
632 }
633 break;
634 case 2:
635 /* movl (r0), data_reg */
636 tcg_out_modrm_offset(s, 0x8b, data_reg, r0, 0);
637 if (bswap) {
638 /* bswap */
639 tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT, 0, data_reg, 0);
640 }
641 break;
642 case 2 | 4:
643 if (bswap) {
644 /* movl (r0), data_reg */
645 tcg_out_modrm_offset(s, 0x8b, data_reg, r0, 0);
646 /* bswap */
647 tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT, 0, data_reg, 0);
648 /* movslq */
649 tcg_out_modrm(s, 0x63 | P_REXW, data_reg, data_reg);
650 } else {
651 /* movslq */
652 tcg_out_modrm_offset(s, 0x63 | P_REXW, data_reg, r0, 0);
653 }
654 break;
655 case 3:
656 /* movq (r0), data_reg */
657 tcg_out_modrm_offset(s, 0x8b | P_REXW, data_reg, r0, 0);
658 if (bswap) {
659 /* bswap */
660 tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT | P_REXW, 0, data_reg, 0);
661 }
662 break;
663 default:
664 tcg_abort();
665 }
666
667 #if defined(CONFIG_SOFTMMU)
668 /* label2: */
669 *label2_ptr = s->code_ptr - label2_ptr - 1;
670 #endif
671 }
672
673 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
674 int opc)
675 {
676 int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap, rexw;
677 #if defined(CONFIG_SOFTMMU)
678 uint8_t *label1_ptr, *label2_ptr;
679 #endif
680
681 data_reg = *args++;
682 addr_reg = *args++;
683 mem_index = *args;
684
685 s_bits = opc;
686
687 r0 = TCG_REG_RDI;
688 r1 = TCG_REG_RSI;
689
690 #if TARGET_LONG_BITS == 32
691 rexw = 0;
692 #else
693 rexw = P_REXW;
694 #endif
695 #if defined(CONFIG_SOFTMMU)
696 /* mov */
697 tcg_out_modrm(s, 0x8b | rexw, r1, addr_reg);
698
699 /* mov */
700 tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
701
702 tcg_out_modrm(s, 0xc1 | rexw, 5, r1); /* shr $x, r1 */
703 tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
704
705 tcg_out_modrm(s, 0x81 | rexw, 4, r0); /* andl $x, r0 */
706 tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
707
708 tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
709 tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
710
711 /* lea offset(r1, env), r1 */
712 tcg_out_modrm_offset2(s, 0x8d | P_REXW, r1, r1, TCG_AREG0, 0,
713 offsetof(CPUState, tlb_table[mem_index][0].addr_write));
714
715 /* cmp 0(r1), r0 */
716 tcg_out_modrm_offset(s, 0x3b | rexw, r0, r1, 0);
717
718 /* mov */
719 tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
720
721 /* je label1 */
722 tcg_out8(s, 0x70 + JCC_JE);
723 label1_ptr = s->code_ptr;
724 s->code_ptr++;
725
726 /* XXX: move that code at the end of the TB */
727 switch(opc) {
728 case 0:
729 /* movzbl */
730 tcg_out_modrm(s, 0xb6 | P_EXT | P_REXB, TCG_REG_RSI, data_reg);
731 break;
732 case 1:
733 /* movzwl */
734 tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_RSI, data_reg);
735 break;
736 case 2:
737 /* movl */
738 tcg_out_modrm(s, 0x8b, TCG_REG_RSI, data_reg);
739 break;
740 default:
741 case 3:
742 tcg_out_mov(s, TCG_REG_RSI, data_reg);
743 break;
744 }
745 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RDX, mem_index);
746 tcg_out8(s, 0xe8);
747 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
748 (tcg_target_long)s->code_ptr - 4);
749
750 /* jmp label2 */
751 tcg_out8(s, 0xeb);
752 label2_ptr = s->code_ptr;
753 s->code_ptr++;
754
755 /* label1: */
756 *label1_ptr = s->code_ptr - label1_ptr - 1;
757
758 /* add x(r1), r0 */
759 tcg_out_modrm_offset(s, 0x03 | P_REXW, r0, r1, offsetof(CPUTLBEntry, addend) -
760 offsetof(CPUTLBEntry, addr_write));
761 #else
762 r0 = addr_reg;
763 #endif
764
765 #ifdef TARGET_WORDS_BIGENDIAN
766 bswap = 1;
767 #else
768 bswap = 0;
769 #endif
770 switch(opc) {
771 case 0:
772 /* movb */
773 tcg_out_modrm_offset(s, 0x88 | P_REXB, data_reg, r0, 0);
774 break;
775 case 1:
776 if (bswap) {
777 tcg_out_modrm(s, 0x8b, r1, data_reg); /* movl */
778 tcg_out8(s, 0x66); /* rolw $8, %ecx */
779 tcg_out_modrm(s, 0xc1, 0, r1);
780 tcg_out8(s, 8);
781 data_reg = r1;
782 }
783 /* movw */
784 tcg_out8(s, 0x66);
785 tcg_out_modrm_offset(s, 0x89, data_reg, r0, 0);
786 break;
787 case 2:
788 if (bswap) {
789 tcg_out_modrm(s, 0x8b, r1, data_reg); /* movl */
790 /* bswap data_reg */
791 tcg_out_opc(s, (0xc8 + r1) | P_EXT, 0, r1, 0);
792 data_reg = r1;
793 }
794 /* movl */
795 tcg_out_modrm_offset(s, 0x89, data_reg, r0, 0);
796 break;
797 case 3:
798 if (bswap) {
799 tcg_out_mov(s, r1, data_reg);
800 /* bswap data_reg */
801 tcg_out_opc(s, (0xc8 + r1) | P_EXT | P_REXW, 0, r1, 0);
802 data_reg = r1;
803 }
804 /* movq */
805 tcg_out_modrm_offset(s, 0x89 | P_REXW, data_reg, r0, 0);
806 break;
807 default:
808 tcg_abort();
809 }
810
811 #if defined(CONFIG_SOFTMMU)
812 /* label2: */
813 *label2_ptr = s->code_ptr - label2_ptr - 1;
814 #endif
815 }
816
817 static inline void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
818 const int *const_args)
819 {
820 int c;
821
822 switch(opc) {
823 case INDEX_op_exit_tb:
824 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RAX, args[0]);
825 tcg_out8(s, 0xe9); /* jmp tb_ret_addr */
826 tcg_out32(s, tb_ret_addr - s->code_ptr - 4);
827 break;
828 case INDEX_op_goto_tb:
829 if (s->tb_jmp_offset) {
830 /* direct jump method */
831 tcg_out8(s, 0xe9); /* jmp im */
832 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
833 tcg_out32(s, 0);
834 } else {
835 /* indirect jump method */
836 /* jmp Ev */
837 tcg_out_modrm_offset(s, 0xff, 4, -1,
838 (tcg_target_long)(s->tb_next +
839 args[0]));
840 }
841 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
842 break;
843 case INDEX_op_call:
844 if (const_args[0]) {
845 tcg_out8(s, 0xe8);
846 tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
847 } else {
848 tcg_out_modrm(s, 0xff, 2, args[0]);
849 }
850 break;
851 case INDEX_op_jmp:
852 if (const_args[0]) {
853 tcg_out8(s, 0xe9);
854 tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
855 } else {
856 tcg_out_modrm(s, 0xff, 4, args[0]);
857 }
858 break;
859 case INDEX_op_br:
860 tcg_out_jxx(s, JCC_JMP, args[0]);
861 break;
862 case INDEX_op_movi_i32:
863 tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
864 break;
865 case INDEX_op_movi_i64:
866 tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
867 break;
868 case INDEX_op_ld8u_i32:
869 case INDEX_op_ld8u_i64:
870 /* movzbl */
871 tcg_out_modrm_offset(s, 0xb6 | P_EXT, args[0], args[1], args[2]);
872 break;
873 case INDEX_op_ld8s_i32:
874 /* movsbl */
875 tcg_out_modrm_offset(s, 0xbe | P_EXT, args[0], args[1], args[2]);
876 break;
877 case INDEX_op_ld8s_i64:
878 /* movsbq */
879 tcg_out_modrm_offset(s, 0xbe | P_EXT | P_REXW, args[0], args[1], args[2]);
880 break;
881 case INDEX_op_ld16u_i32:
882 case INDEX_op_ld16u_i64:
883 /* movzwl */
884 tcg_out_modrm_offset(s, 0xb7 | P_EXT, args[0], args[1], args[2]);
885 break;
886 case INDEX_op_ld16s_i32:
887 /* movswl */
888 tcg_out_modrm_offset(s, 0xbf | P_EXT, args[0], args[1], args[2]);
889 break;
890 case INDEX_op_ld16s_i64:
891 /* movswq */
892 tcg_out_modrm_offset(s, 0xbf | P_EXT | P_REXW, args[0], args[1], args[2]);
893 break;
894 case INDEX_op_ld_i32:
895 case INDEX_op_ld32u_i64:
896 /* movl */
897 tcg_out_modrm_offset(s, 0x8b, args[0], args[1], args[2]);
898 break;
899 case INDEX_op_ld32s_i64:
900 /* movslq */
901 tcg_out_modrm_offset(s, 0x63 | P_REXW, args[0], args[1], args[2]);
902 break;
903 case INDEX_op_ld_i64:
904 /* movq */
905 tcg_out_modrm_offset(s, 0x8b | P_REXW, args[0], args[1], args[2]);
906 break;
907
908 case INDEX_op_st8_i32:
909 case INDEX_op_st8_i64:
910 /* movb */
911 tcg_out_modrm_offset(s, 0x88 | P_REXB, args[0], args[1], args[2]);
912 break;
913 case INDEX_op_st16_i32:
914 case INDEX_op_st16_i64:
915 /* movw */
916 tcg_out8(s, 0x66);
917 tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
918 break;
919 case INDEX_op_st_i32:
920 case INDEX_op_st32_i64:
921 /* movl */
922 tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
923 break;
924 case INDEX_op_st_i64:
925 /* movq */
926 tcg_out_modrm_offset(s, 0x89 | P_REXW, args[0], args[1], args[2]);
927 break;
928
929 case INDEX_op_sub_i32:
930 c = ARITH_SUB;
931 goto gen_arith32;
932 case INDEX_op_and_i32:
933 c = ARITH_AND;
934 goto gen_arith32;
935 case INDEX_op_or_i32:
936 c = ARITH_OR;
937 goto gen_arith32;
938 case INDEX_op_xor_i32:
939 c = ARITH_XOR;
940 goto gen_arith32;
941 case INDEX_op_add_i32:
942 c = ARITH_ADD;
943 gen_arith32:
944 if (const_args[2]) {
945 tgen_arithi32(s, c, args[0], args[2]);
946 } else {
947 tcg_out_modrm(s, 0x01 | (c << 3), args[2], args[0]);
948 }
949 break;
950
951 case INDEX_op_sub_i64:
952 c = ARITH_SUB;
953 goto gen_arith64;
954 case INDEX_op_and_i64:
955 c = ARITH_AND;
956 goto gen_arith64;
957 case INDEX_op_or_i64:
958 c = ARITH_OR;
959 goto gen_arith64;
960 case INDEX_op_xor_i64:
961 c = ARITH_XOR;
962 goto gen_arith64;
963 case INDEX_op_add_i64:
964 c = ARITH_ADD;
965 gen_arith64:
966 if (const_args[2]) {
967 tgen_arithi64(s, c, args[0], args[2]);
968 } else {
969 tcg_out_modrm(s, 0x01 | (c << 3) | P_REXW, args[2], args[0]);
970 }
971 break;
972
973 case INDEX_op_mul_i32:
974 if (const_args[2]) {
975 int32_t val;
976 val = args[2];
977 if (val == (int8_t)val) {
978 tcg_out_modrm(s, 0x6b, args[0], args[0]);
979 tcg_out8(s, val);
980 } else {
981 tcg_out_modrm(s, 0x69, args[0], args[0]);
982 tcg_out32(s, val);
983 }
984 } else {
985 tcg_out_modrm(s, 0xaf | P_EXT, args[0], args[2]);
986 }
987 break;
988 case INDEX_op_mul_i64:
989 if (const_args[2]) {
990 int32_t val;
991 val = args[2];
992 if (val == (int8_t)val) {
993 tcg_out_modrm(s, 0x6b | P_REXW, args[0], args[0]);
994 tcg_out8(s, val);
995 } else {
996 tcg_out_modrm(s, 0x69 | P_REXW, args[0], args[0]);
997 tcg_out32(s, val);
998 }
999 } else {
1000 tcg_out_modrm(s, 0xaf | P_EXT | P_REXW, args[0], args[2]);
1001 }
1002 break;
1003 case INDEX_op_div2_i32:
1004 tcg_out_modrm(s, 0xf7, 7, args[4]);
1005 break;
1006 case INDEX_op_divu2_i32:
1007 tcg_out_modrm(s, 0xf7, 6, args[4]);
1008 break;
1009 case INDEX_op_div2_i64:
1010 tcg_out_modrm(s, 0xf7 | P_REXW, 7, args[4]);
1011 break;
1012 case INDEX_op_divu2_i64:
1013 tcg_out_modrm(s, 0xf7 | P_REXW, 6, args[4]);
1014 break;
1015
1016 case INDEX_op_shl_i32:
1017 c = SHIFT_SHL;
1018 gen_shift32:
1019 if (const_args[2]) {
1020 if (args[2] == 1) {
1021 tcg_out_modrm(s, 0xd1, c, args[0]);
1022 } else {
1023 tcg_out_modrm(s, 0xc1, c, args[0]);
1024 tcg_out8(s, args[2]);
1025 }
1026 } else {
1027 tcg_out_modrm(s, 0xd3, c, args[0]);
1028 }
1029 break;
1030 case INDEX_op_shr_i32:
1031 c = SHIFT_SHR;
1032 goto gen_shift32;
1033 case INDEX_op_sar_i32:
1034 c = SHIFT_SAR;
1035 goto gen_shift32;
1036
1037 case INDEX_op_shl_i64:
1038 c = SHIFT_SHL;
1039 gen_shift64:
1040 if (const_args[2]) {
1041 if (args[2] == 1) {
1042 tcg_out_modrm(s, 0xd1 | P_REXW, c, args[0]);
1043 } else {
1044 tcg_out_modrm(s, 0xc1 | P_REXW, c, args[0]);
1045 tcg_out8(s, args[2]);
1046 }
1047 } else {
1048 tcg_out_modrm(s, 0xd3 | P_REXW, c, args[0]);
1049 }
1050 break;
1051 case INDEX_op_shr_i64:
1052 c = SHIFT_SHR;
1053 goto gen_shift64;
1054 case INDEX_op_sar_i64:
1055 c = SHIFT_SAR;
1056 goto gen_shift64;
1057
1058 case INDEX_op_brcond_i32:
1059 tcg_out_brcond(s, args[2], args[0], args[1], const_args[1],
1060 args[3], 0);
1061 break;
1062 case INDEX_op_brcond_i64:
1063 tcg_out_brcond(s, args[2], args[0], args[1], const_args[1],
1064 args[3], P_REXW);
1065 break;
1066
1067 case INDEX_op_bswap_i32:
1068 tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT, 0, args[0], 0);
1069 break;
1070 case INDEX_op_bswap_i64:
1071 tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT | P_REXW, 0, args[0], 0);
1072 break;
1073
1074 case INDEX_op_neg_i32:
1075 tcg_out_modrm(s, 0xf7, 3, args[0]);
1076 break;
1077 case INDEX_op_neg_i64:
1078 tcg_out_modrm(s, 0xf7 | P_REXW, 3, args[0]);
1079 break;
1080
1081 case INDEX_op_qemu_ld8u:
1082 tcg_out_qemu_ld(s, args, 0);
1083 break;
1084 case INDEX_op_qemu_ld8s:
1085 tcg_out_qemu_ld(s, args, 0 | 4);
1086 break;
1087 case INDEX_op_qemu_ld16u:
1088 tcg_out_qemu_ld(s, args, 1);
1089 break;
1090 case INDEX_op_qemu_ld16s:
1091 tcg_out_qemu_ld(s, args, 1 | 4);
1092 break;
1093 case INDEX_op_qemu_ld32u:
1094 tcg_out_qemu_ld(s, args, 2);
1095 break;
1096 case INDEX_op_qemu_ld32s:
1097 tcg_out_qemu_ld(s, args, 2 | 4);
1098 break;
1099 case INDEX_op_qemu_ld64:
1100 tcg_out_qemu_ld(s, args, 3);
1101 break;
1102
1103 case INDEX_op_qemu_st8:
1104 tcg_out_qemu_st(s, args, 0);
1105 break;
1106 case INDEX_op_qemu_st16:
1107 tcg_out_qemu_st(s, args, 1);
1108 break;
1109 case INDEX_op_qemu_st32:
1110 tcg_out_qemu_st(s, args, 2);
1111 break;
1112 case INDEX_op_qemu_st64:
1113 tcg_out_qemu_st(s, args, 3);
1114 break;
1115
1116 default:
1117 tcg_abort();
1118 }
1119 }
1120
1121 static int tcg_target_callee_save_regs[] = {
1122 TCG_REG_RBP,
1123 TCG_REG_RBX,
1124 TCG_REG_R12,
1125 TCG_REG_R13,
1126 /* TCG_REG_R14, */ /* currently used for the global env, so no
1127 need to save */
1128 TCG_REG_R15,
1129 };
1130
1131 static inline void tcg_out_push(TCGContext *s, int reg)
1132 {
1133 tcg_out_opc(s, (0x50 + (reg & 7)), 0, reg, 0);
1134 }
1135
1136 static inline void tcg_out_pop(TCGContext *s, int reg)
1137 {
1138 tcg_out_opc(s, (0x58 + (reg & 7)), 0, reg, 0);
1139 }
1140
1141 /* Generate global QEMU prologue and epilogue code */
1142 void tcg_target_qemu_prologue(TCGContext *s)
1143 {
1144 int i, frame_size, push_size, stack_addend;
1145
1146 /* TB prologue */
1147 /* save all callee saved registers */
1148 for(i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
1149 tcg_out_push(s, tcg_target_callee_save_regs[i]);
1150
1151 }
1152 /* reserve some stack space */
1153 push_size = 8 + ARRAY_SIZE(tcg_target_callee_save_regs) * 8;
1154 frame_size = push_size + TCG_STATIC_CALL_ARGS_SIZE;
1155 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
1156 ~(TCG_TARGET_STACK_ALIGN - 1);
1157 stack_addend = frame_size - push_size;
1158 tcg_out_addi(s, TCG_REG_RSP, -stack_addend);
1159
1160 tcg_out_modrm(s, 0xff, 4, TCG_REG_RDI); /* jmp *%rdi */
1161
1162 /* TB epilogue */
1163 tb_ret_addr = s->code_ptr;
1164 tcg_out_addi(s, TCG_REG_RSP, stack_addend);
1165 for(i = ARRAY_SIZE(tcg_target_callee_save_regs) - 1; i >= 0; i--) {
1166 tcg_out_pop(s, tcg_target_callee_save_regs[i]);
1167 }
1168 tcg_out8(s, 0xc3); /* ret */
1169 }
1170
1171 static const TCGTargetOpDef x86_64_op_defs[] = {
1172 { INDEX_op_exit_tb, { } },
1173 { INDEX_op_goto_tb, { } },
1174 { INDEX_op_call, { "ri" } }, /* XXX: might need a specific constant constraint */
1175 { INDEX_op_jmp, { "ri" } }, /* XXX: might need a specific constant constraint */
1176 { INDEX_op_br, { } },
1177
1178 { INDEX_op_mov_i32, { "r", "r" } },
1179 { INDEX_op_movi_i32, { "r" } },
1180 { INDEX_op_ld8u_i32, { "r", "r" } },
1181 { INDEX_op_ld8s_i32, { "r", "r" } },
1182 { INDEX_op_ld16u_i32, { "r", "r" } },
1183 { INDEX_op_ld16s_i32, { "r", "r" } },
1184 { INDEX_op_ld_i32, { "r", "r" } },
1185 { INDEX_op_st8_i32, { "r", "r" } },
1186 { INDEX_op_st16_i32, { "r", "r" } },
1187 { INDEX_op_st_i32, { "r", "r" } },
1188
1189 { INDEX_op_add_i32, { "r", "0", "ri" } },
1190 { INDEX_op_mul_i32, { "r", "0", "ri" } },
1191 { INDEX_op_div2_i32, { "a", "d", "0", "1", "r" } },
1192 { INDEX_op_divu2_i32, { "a", "d", "0", "1", "r" } },
1193 { INDEX_op_sub_i32, { "r", "0", "ri" } },
1194 { INDEX_op_and_i32, { "r", "0", "ri" } },
1195 { INDEX_op_or_i32, { "r", "0", "ri" } },
1196 { INDEX_op_xor_i32, { "r", "0", "ri" } },
1197
1198 { INDEX_op_shl_i32, { "r", "0", "ci" } },
1199 { INDEX_op_shr_i32, { "r", "0", "ci" } },
1200 { INDEX_op_sar_i32, { "r", "0", "ci" } },
1201
1202 { INDEX_op_brcond_i32, { "r", "ri" } },
1203
1204 { INDEX_op_mov_i64, { "r", "r" } },
1205 { INDEX_op_movi_i64, { "r" } },
1206 { INDEX_op_ld8u_i64, { "r", "r" } },
1207 { INDEX_op_ld8s_i64, { "r", "r" } },
1208 { INDEX_op_ld16u_i64, { "r", "r" } },
1209 { INDEX_op_ld16s_i64, { "r", "r" } },
1210 { INDEX_op_ld32u_i64, { "r", "r" } },
1211 { INDEX_op_ld32s_i64, { "r", "r" } },
1212 { INDEX_op_ld_i64, { "r", "r" } },
1213 { INDEX_op_st8_i64, { "r", "r" } },
1214 { INDEX_op_st16_i64, { "r", "r" } },
1215 { INDEX_op_st32_i64, { "r", "r" } },
1216 { INDEX_op_st_i64, { "r", "r" } },
1217
1218 { INDEX_op_add_i64, { "r", "0", "re" } },
1219 { INDEX_op_mul_i64, { "r", "0", "re" } },
1220 { INDEX_op_div2_i64, { "a", "d", "0", "1", "r" } },
1221 { INDEX_op_divu2_i64, { "a", "d", "0", "1", "r" } },
1222 { INDEX_op_sub_i64, { "r", "0", "re" } },
1223 { INDEX_op_and_i64, { "r", "0", "reZ" } },
1224 { INDEX_op_or_i64, { "r", "0", "re" } },
1225 { INDEX_op_xor_i64, { "r", "0", "re" } },
1226
1227 { INDEX_op_shl_i64, { "r", "0", "ci" } },
1228 { INDEX_op_shr_i64, { "r", "0", "ci" } },
1229 { INDEX_op_sar_i64, { "r", "0", "ci" } },
1230
1231 { INDEX_op_brcond_i64, { "r", "re" } },
1232
1233 { INDEX_op_bswap_i32, { "r", "0" } },
1234 { INDEX_op_bswap_i64, { "r", "0" } },
1235
1236 { INDEX_op_neg_i32, { "r", "0" } },
1237 { INDEX_op_neg_i64, { "r", "0" } },
1238
1239 { INDEX_op_qemu_ld8u, { "r", "L" } },
1240 { INDEX_op_qemu_ld8s, { "r", "L" } },
1241 { INDEX_op_qemu_ld16u, { "r", "L" } },
1242 { INDEX_op_qemu_ld16s, { "r", "L" } },
1243 { INDEX_op_qemu_ld32u, { "r", "L" } },
1244 { INDEX_op_qemu_ld32s, { "r", "L" } },
1245 { INDEX_op_qemu_ld64, { "r", "L" } },
1246
1247 { INDEX_op_qemu_st8, { "L", "L" } },
1248 { INDEX_op_qemu_st16, { "L", "L" } },
1249 { INDEX_op_qemu_st32, { "L", "L" } },
1250 { INDEX_op_qemu_st64, { "L", "L", "L" } },
1251
1252 { -1 },
1253 };
1254
1255 void tcg_target_init(TCGContext *s)
1256 {
1257 /* fail safe */
1258 if ((1 << CPU_TLB_ENTRY_BITS) != sizeof(CPUTLBEntry))
1259 tcg_abort();
1260
1261 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffff);
1262 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffff);
1263 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1264 (1 << TCG_REG_RDI) |
1265 (1 << TCG_REG_RSI) |
1266 (1 << TCG_REG_RDX) |
1267 (1 << TCG_REG_RCX) |
1268 (1 << TCG_REG_R8) |
1269 (1 << TCG_REG_R9) |
1270 (1 << TCG_REG_RAX) |
1271 (1 << TCG_REG_R10) |
1272 (1 << TCG_REG_R11));
1273
1274 tcg_regset_clear(s->reserved_regs);
1275 tcg_regset_set_reg(s->reserved_regs, TCG_REG_RSP);
1276
1277 tcg_add_target_add_op_defs(x86_64_op_defs);
1278 }