4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env
, cpu_T
[2], cpu_A0
;
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0
, cpu_tmp1
, cpu_tmp2
, cpu_ptr0
, cpu_ptr1
;
66 static int x86_64_hregs
;
69 typedef struct DisasContext
{
70 /* current insn context */
71 int override
; /* -1 if no override */
74 target_ulong pc
; /* pc = eip + cs_base */
75 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
76 static state change (stop translation) */
77 /* current block context */
78 target_ulong cs_base
; /* base of CS segment */
79 int pe
; /* protected mode */
80 int code32
; /* 32 bit code segment */
82 int lma
; /* long mode active */
83 int code64
; /* 64 bit code segment */
86 int ss32
; /* 32 bit stack segment */
87 int cc_op
; /* current CC operation */
88 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
89 int f_st
; /* currently unused */
90 int vm86
; /* vm86 mode */
93 int tf
; /* TF cpu flag */
94 int singlestep_enabled
; /* "hardware" single step enabled */
95 int jmp_opt
; /* use direct block chaining for direct jumps */
96 int mem_index
; /* select memory access functions */
97 uint64_t flags
; /* all execution flags */
98 struct TranslationBlock
*tb
;
99 int popl_esp_hack
; /* for correct popl with esp base handling */
100 int rip_offset
; /* only used in x86_64, but left for simplicity */
102 int cpuid_ext_features
;
103 int cpuid_ext2_features
;
106 static void gen_eob(DisasContext
*s
);
107 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
108 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
110 /* i386 arith/logic operations */
130 OP_SHL1
, /* undocumented */
143 /* I386 int registers */
144 OR_EAX
, /* MUST be even numbered */
153 OR_TMP0
= 16, /* temporary operand register */
155 OR_A0
, /* temporary register used when doing address evaluation */
158 static inline void gen_op_movl_T0_0(void)
160 tcg_gen_movi_tl(cpu_T
[0], 0);
163 static inline void gen_op_movl_T0_im(int32_t val
)
165 tcg_gen_movi_tl(cpu_T
[0], val
);
168 static inline void gen_op_movl_T0_imu(uint32_t val
)
170 tcg_gen_movi_tl(cpu_T
[0], val
);
173 static inline void gen_op_movl_T1_im(int32_t val
)
175 tcg_gen_movi_tl(cpu_T
[1], val
);
178 static inline void gen_op_movl_T1_imu(uint32_t val
)
180 tcg_gen_movi_tl(cpu_T
[1], val
);
183 static inline void gen_op_movl_A0_im(uint32_t val
)
185 tcg_gen_movi_tl(cpu_A0
, val
);
189 static inline void gen_op_movq_A0_im(int64_t val
)
191 tcg_gen_movi_tl(cpu_A0
, val
);
195 static inline void gen_movtl_T0_im(target_ulong val
)
197 tcg_gen_movi_tl(cpu_T
[0], val
);
200 static inline void gen_movtl_T1_im(target_ulong val
)
202 tcg_gen_movi_tl(cpu_T
[1], val
);
205 static inline void gen_op_andl_T0_ffff(void)
207 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
210 static inline void gen_op_andl_T0_im(uint32_t val
)
212 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], val
);
215 static inline void gen_op_movl_T0_T1(void)
217 tcg_gen_mov_tl(cpu_T
[0], cpu_T
[1]);
220 static inline void gen_op_andl_A0_ffff(void)
222 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffff);
227 #define NB_OP_SIZES 4
229 #define DEF_REGS(prefix, suffix) \
230 prefix ## EAX ## suffix,\
231 prefix ## ECX ## suffix,\
232 prefix ## EDX ## suffix,\
233 prefix ## EBX ## suffix,\
234 prefix ## ESP ## suffix,\
235 prefix ## EBP ## suffix,\
236 prefix ## ESI ## suffix,\
237 prefix ## EDI ## suffix,\
238 prefix ## R8 ## suffix,\
239 prefix ## R9 ## suffix,\
240 prefix ## R10 ## suffix,\
241 prefix ## R11 ## suffix,\
242 prefix ## R12 ## suffix,\
243 prefix ## R13 ## suffix,\
244 prefix ## R14 ## suffix,\
245 prefix ## R15 ## suffix,
247 #else /* !TARGET_X86_64 */
249 #define NB_OP_SIZES 3
251 #define DEF_REGS(prefix, suffix) \
252 prefix ## EAX ## suffix,\
253 prefix ## ECX ## suffix,\
254 prefix ## EDX ## suffix,\
255 prefix ## EBX ## suffix,\
256 prefix ## ESP ## suffix,\
257 prefix ## EBP ## suffix,\
258 prefix ## ESI ## suffix,\
259 prefix ## EDI ## suffix,
261 #endif /* !TARGET_X86_64 */
263 #if defined(WORDS_BIGENDIAN)
264 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
265 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
266 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
267 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
268 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
270 #define REG_B_OFFSET 0
271 #define REG_H_OFFSET 1
272 #define REG_W_OFFSET 0
273 #define REG_L_OFFSET 0
274 #define REG_LH_OFFSET 4
277 static inline void gen_op_mov_reg_TN(int ot
, int t_index
, int reg
)
281 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
282 tcg_gen_st8_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_B_OFFSET
);
284 tcg_gen_st8_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
288 tcg_gen_st16_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
292 tcg_gen_st32_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
293 /* high part of register set to zero */
294 tcg_gen_movi_tl(cpu_tmp0
, 0);
295 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
299 tcg_gen_st_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]));
304 tcg_gen_st32_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
310 static inline void gen_op_mov_reg_T0(int ot
, int reg
)
312 gen_op_mov_reg_TN(ot
, 0, reg
);
315 static inline void gen_op_mov_reg_T1(int ot
, int reg
)
317 gen_op_mov_reg_TN(ot
, 1, reg
);
320 static inline void gen_op_mov_reg_A0(int size
, int reg
)
324 tcg_gen_st16_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
328 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
329 /* high part of register set to zero */
330 tcg_gen_movi_tl(cpu_tmp0
, 0);
331 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
335 tcg_gen_st_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
340 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
346 static inline void gen_op_mov_TN_reg(int ot
, int t_index
, int reg
)
350 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
353 tcg_gen_ld8u_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
358 tcg_gen_ld_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]));
363 static inline void gen_op_movl_A0_reg(int reg
)
365 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
368 static inline void gen_op_addl_A0_im(int32_t val
)
370 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
372 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
377 static inline void gen_op_addq_A0_im(int64_t val
)
379 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
383 static void gen_add_A0_im(DisasContext
*s
, int val
)
387 gen_op_addq_A0_im(val
);
390 gen_op_addl_A0_im(val
);
393 static inline void gen_op_addl_T0_T1(void)
395 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
398 static inline void gen_op_jmp_T0(void)
400 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUState
, eip
));
403 static inline void gen_op_addw_ESP_im(int32_t val
)
405 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
406 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
407 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]) + REG_W_OFFSET
);
410 static inline void gen_op_addl_ESP_im(int32_t val
)
412 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
413 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
415 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffff);
417 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
421 static inline void gen_op_addq_ESP_im(int32_t val
)
423 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
424 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
425 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
429 static inline void gen_op_set_cc_op(int32_t val
)
431 tcg_gen_movi_tl(cpu_tmp0
, val
);
432 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, cc_op
));
435 static inline void gen_op_addl_A0_reg_sN(int shift
, int reg
)
437 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
439 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
440 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
442 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
446 static inline void gen_op_movl_A0_seg(int reg
)
448 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
) + REG_L_OFFSET
);
451 static inline void gen_op_addl_A0_seg(int reg
)
453 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
454 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
456 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
461 static inline void gen_op_movq_A0_seg(int reg
)
463 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
466 static inline void gen_op_addq_A0_seg(int reg
)
468 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
469 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
472 static inline void gen_op_movq_A0_reg(int reg
)
474 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
477 static inline void gen_op_addq_A0_reg_sN(int shift
, int reg
)
479 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
481 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
482 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
486 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
488 DEF_REGS(gen_op_cmovw_
, _T1_T0
)
491 DEF_REGS(gen_op_cmovl_
, _T1_T0
)
495 DEF_REGS(gen_op_cmovq_
, _T1_T0
)
500 #define DEF_ARITHC(SUFFIX)\
502 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
503 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
506 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
507 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
510 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
511 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
514 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
515 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
518 static GenOpFunc
*gen_op_arithc_T0_T1_cc
[4][2] = {
522 static GenOpFunc
*gen_op_arithc_mem_T0_T1_cc
[3 * 4][2] = {
524 #ifndef CONFIG_USER_ONLY
530 static const int cc_op_arithb
[8] = {
541 #define DEF_CMPXCHG(SUFFIX)\
542 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
543 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
544 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
545 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
547 static GenOpFunc
*gen_op_cmpxchg_T0_T1_EAX_cc
[4] = {
551 static GenOpFunc
*gen_op_cmpxchg_mem_T0_T1_EAX_cc
[3 * 4] = {
553 #ifndef CONFIG_USER_ONLY
559 #define DEF_SHIFT(SUFFIX)\
561 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
562 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
563 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
565 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
566 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
568 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
571 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
572 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
573 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
574 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
575 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
576 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
577 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
578 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
581 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
582 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
583 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
584 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
585 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
586 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
587 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
588 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
591 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
592 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
593 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
594 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
595 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
596 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
597 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
598 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
601 static GenOpFunc
*gen_op_shift_T0_T1_cc
[4][8] = {
605 static GenOpFunc
*gen_op_shift_mem_T0_T1_cc
[3 * 4][8] = {
607 #ifndef CONFIG_USER_ONLY
613 #define DEF_SHIFTD(SUFFIX, op)\
619 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
620 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
623 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
624 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
627 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
628 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
631 static GenOpFunc1
*gen_op_shiftd_T0_T1_im_cc
[4][2] = {
635 static GenOpFunc
*gen_op_shiftd_T0_T1_ECX_cc
[4][2] = {
639 static GenOpFunc1
*gen_op_shiftd_mem_T0_T1_im_cc
[3 * 4][2] = {
641 #ifndef CONFIG_USER_ONLY
642 DEF_SHIFTD(_kernel
, im
)
643 DEF_SHIFTD(_user
, im
)
647 static GenOpFunc
*gen_op_shiftd_mem_T0_T1_ECX_cc
[3 * 4][2] = {
648 DEF_SHIFTD(_raw
, ECX
)
649 #ifndef CONFIG_USER_ONLY
650 DEF_SHIFTD(_kernel
, ECX
)
651 DEF_SHIFTD(_user
, ECX
)
655 static GenOpFunc
*gen_op_btx_T0_T1_cc
[3][4] = {
658 gen_op_btsw_T0_T1_cc
,
659 gen_op_btrw_T0_T1_cc
,
660 gen_op_btcw_T0_T1_cc
,
664 gen_op_btsl_T0_T1_cc
,
665 gen_op_btrl_T0_T1_cc
,
666 gen_op_btcl_T0_T1_cc
,
671 gen_op_btsq_T0_T1_cc
,
672 gen_op_btrq_T0_T1_cc
,
673 gen_op_btcq_T0_T1_cc
,
678 static GenOpFunc
*gen_op_add_bit_A0_T1
[3] = {
679 gen_op_add_bitw_A0_T1
,
680 gen_op_add_bitl_A0_T1
,
681 X86_64_ONLY(gen_op_add_bitq_A0_T1
),
684 static GenOpFunc
*gen_op_bsx_T0_cc
[3][2] = {
701 static inline void gen_op_lds_T0_A0(int idx
)
703 int mem_index
= (idx
>> 2) - 1;
706 tcg_gen_qemu_ld8s(cpu_T
[0], cpu_A0
, mem_index
);
709 tcg_gen_qemu_ld16s(cpu_T
[0], cpu_A0
, mem_index
);
713 tcg_gen_qemu_ld32s(cpu_T
[0], cpu_A0
, mem_index
);
718 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719 static inline void gen_op_ld_T0_A0(int idx
)
721 int mem_index
= (idx
>> 2) - 1;
724 tcg_gen_qemu_ld8u(cpu_T
[0], cpu_A0
, mem_index
);
727 tcg_gen_qemu_ld16u(cpu_T
[0], cpu_A0
, mem_index
);
730 tcg_gen_qemu_ld32u(cpu_T
[0], cpu_A0
, mem_index
);
734 tcg_gen_qemu_ld64(cpu_T
[0], cpu_A0
, mem_index
);
739 static inline void gen_op_ldu_T0_A0(int idx
)
741 gen_op_ld_T0_A0(idx
);
744 static inline void gen_op_ld_T1_A0(int idx
)
746 int mem_index
= (idx
>> 2) - 1;
749 tcg_gen_qemu_ld8u(cpu_T
[1], cpu_A0
, mem_index
);
752 tcg_gen_qemu_ld16u(cpu_T
[1], cpu_A0
, mem_index
);
755 tcg_gen_qemu_ld32u(cpu_T
[1], cpu_A0
, mem_index
);
759 tcg_gen_qemu_ld64(cpu_T
[1], cpu_A0
, mem_index
);
764 static inline void gen_op_st_T0_A0(int idx
)
766 int mem_index
= (idx
>> 2) - 1;
769 tcg_gen_qemu_st8(cpu_T
[0], cpu_A0
, mem_index
);
772 tcg_gen_qemu_st16(cpu_T
[0], cpu_A0
, mem_index
);
775 tcg_gen_qemu_st32(cpu_T
[0], cpu_A0
, mem_index
);
779 tcg_gen_qemu_st64(cpu_T
[0], cpu_A0
, mem_index
);
784 static inline void gen_op_st_T1_A0(int idx
)
786 int mem_index
= (idx
>> 2) - 1;
789 tcg_gen_qemu_st8(cpu_T
[1], cpu_A0
, mem_index
);
792 tcg_gen_qemu_st16(cpu_T
[1], cpu_A0
, mem_index
);
795 tcg_gen_qemu_st32(cpu_T
[1], cpu_A0
, mem_index
);
799 tcg_gen_qemu_st64(cpu_T
[1], cpu_A0
, mem_index
);
804 static inline void gen_jmp_im(target_ulong pc
)
806 tcg_gen_movi_tl(cpu_tmp0
, pc
);
807 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, eip
));
810 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
814 override
= s
->override
;
818 gen_op_movq_A0_seg(override
);
819 gen_op_addq_A0_reg_sN(0, R_ESI
);
821 gen_op_movq_A0_reg(R_ESI
);
827 if (s
->addseg
&& override
< 0)
830 gen_op_movl_A0_seg(override
);
831 gen_op_addl_A0_reg_sN(0, R_ESI
);
833 gen_op_movl_A0_reg(R_ESI
);
836 /* 16 address, always override */
839 gen_op_movl_A0_reg(R_ESI
);
840 gen_op_andl_A0_ffff();
841 gen_op_addl_A0_seg(override
);
845 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
849 gen_op_movq_A0_reg(R_EDI
);
854 gen_op_movl_A0_seg(R_ES
);
855 gen_op_addl_A0_reg_sN(0, R_EDI
);
857 gen_op_movl_A0_reg(R_EDI
);
860 gen_op_movl_A0_reg(R_EDI
);
861 gen_op_andl_A0_ffff();
862 gen_op_addl_A0_seg(R_ES
);
866 static GenOpFunc
*gen_op_movl_T0_Dshift
[4] = {
867 gen_op_movl_T0_Dshiftb
,
868 gen_op_movl_T0_Dshiftw
,
869 gen_op_movl_T0_Dshiftl
,
870 X86_64_ONLY(gen_op_movl_T0_Dshiftq
),
873 static GenOpFunc1
*gen_op_jnz_ecx
[3] = {
876 X86_64_ONLY(gen_op_jnz_ecxq
),
879 static GenOpFunc1
*gen_op_jz_ecx
[3] = {
882 X86_64_ONLY(gen_op_jz_ecxq
),
885 static GenOpFunc
*gen_op_dec_ECX
[3] = {
888 X86_64_ONLY(gen_op_decq_ECX
),
891 static GenOpFunc1
*gen_op_string_jnz_sub
[2][4] = {
896 X86_64_ONLY(gen_op_jnz_subq
),
902 X86_64_ONLY(gen_op_jz_subq
),
906 static GenOpFunc
*gen_op_in_DX_T0
[3] = {
912 static GenOpFunc
*gen_op_out_DX_T0
[3] = {
918 static GenOpFunc
*gen_op_in
[3] = {
924 static GenOpFunc
*gen_op_out
[3] = {
930 static GenOpFunc
*gen_check_io_T0
[3] = {
936 static GenOpFunc
*gen_check_io_DX
[3] = {
942 static void gen_check_io(DisasContext
*s
, int ot
, int use_dx
, target_ulong cur_eip
)
944 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
945 if (s
->cc_op
!= CC_OP_DYNAMIC
)
946 gen_op_set_cc_op(s
->cc_op
);
949 gen_check_io_DX
[ot
]();
951 gen_check_io_T0
[ot
]();
955 static inline void gen_movs(DisasContext
*s
, int ot
)
957 gen_string_movl_A0_ESI(s
);
958 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
959 gen_string_movl_A0_EDI(s
);
960 gen_op_st_T0_A0(ot
+ s
->mem_index
);
961 gen_op_movl_T0_Dshift
[ot
]();
964 gen_op_addq_ESI_T0();
965 gen_op_addq_EDI_T0();
969 gen_op_addl_ESI_T0();
970 gen_op_addl_EDI_T0();
972 gen_op_addw_ESI_T0();
973 gen_op_addw_EDI_T0();
977 static inline void gen_update_cc_op(DisasContext
*s
)
979 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
980 gen_op_set_cc_op(s
->cc_op
);
981 s
->cc_op
= CC_OP_DYNAMIC
;
985 /* XXX: does not work with gdbstub "ice" single step - not a
987 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
991 l1
= gen_new_label();
992 l2
= gen_new_label();
993 gen_op_jnz_ecx
[s
->aflag
](l1
);
995 gen_jmp_tb(s
, next_eip
, 1);
1000 static inline void gen_stos(DisasContext
*s
, int ot
)
1002 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1003 gen_string_movl_A0_EDI(s
);
1004 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1005 gen_op_movl_T0_Dshift
[ot
]();
1006 #ifdef TARGET_X86_64
1007 if (s
->aflag
== 2) {
1008 gen_op_addq_EDI_T0();
1012 gen_op_addl_EDI_T0();
1014 gen_op_addw_EDI_T0();
1018 static inline void gen_lods(DisasContext
*s
, int ot
)
1020 gen_string_movl_A0_ESI(s
);
1021 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1022 gen_op_mov_reg_T0(ot
, R_EAX
);
1023 gen_op_movl_T0_Dshift
[ot
]();
1024 #ifdef TARGET_X86_64
1025 if (s
->aflag
== 2) {
1026 gen_op_addq_ESI_T0();
1030 gen_op_addl_ESI_T0();
1032 gen_op_addw_ESI_T0();
1036 static inline void gen_scas(DisasContext
*s
, int ot
)
1038 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1039 gen_string_movl_A0_EDI(s
);
1040 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1041 gen_op_cmpl_T0_T1_cc();
1042 gen_op_movl_T0_Dshift
[ot
]();
1043 #ifdef TARGET_X86_64
1044 if (s
->aflag
== 2) {
1045 gen_op_addq_EDI_T0();
1049 gen_op_addl_EDI_T0();
1051 gen_op_addw_EDI_T0();
1055 static inline void gen_cmps(DisasContext
*s
, int ot
)
1057 gen_string_movl_A0_ESI(s
);
1058 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1059 gen_string_movl_A0_EDI(s
);
1060 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1061 gen_op_cmpl_T0_T1_cc();
1062 gen_op_movl_T0_Dshift
[ot
]();
1063 #ifdef TARGET_X86_64
1064 if (s
->aflag
== 2) {
1065 gen_op_addq_ESI_T0();
1066 gen_op_addq_EDI_T0();
1070 gen_op_addl_ESI_T0();
1071 gen_op_addl_EDI_T0();
1073 gen_op_addw_ESI_T0();
1074 gen_op_addw_EDI_T0();
1078 static inline void gen_ins(DisasContext
*s
, int ot
)
1080 gen_string_movl_A0_EDI(s
);
1082 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1083 gen_op_in_DX_T0
[ot
]();
1084 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1085 gen_op_movl_T0_Dshift
[ot
]();
1086 #ifdef TARGET_X86_64
1087 if (s
->aflag
== 2) {
1088 gen_op_addq_EDI_T0();
1092 gen_op_addl_EDI_T0();
1094 gen_op_addw_EDI_T0();
1098 static inline void gen_outs(DisasContext
*s
, int ot
)
1100 gen_string_movl_A0_ESI(s
);
1101 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1102 gen_op_out_DX_T0
[ot
]();
1103 gen_op_movl_T0_Dshift
[ot
]();
1104 #ifdef TARGET_X86_64
1105 if (s
->aflag
== 2) {
1106 gen_op_addq_ESI_T0();
1110 gen_op_addl_ESI_T0();
1112 gen_op_addw_ESI_T0();
1116 /* same method as Valgrind : we generate jumps to current or next
1118 #define GEN_REPZ(op) \
1119 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1120 target_ulong cur_eip, target_ulong next_eip) \
1123 gen_update_cc_op(s); \
1124 l2 = gen_jz_ecx_string(s, next_eip); \
1125 gen_ ## op(s, ot); \
1126 gen_op_dec_ECX[s->aflag](); \
1127 /* a loop would cause two single step exceptions if ECX = 1 \
1128 before rep string_insn */ \
1130 gen_op_jz_ecx[s->aflag](l2); \
1131 gen_jmp(s, cur_eip); \
1134 #define GEN_REPZ2(op) \
1135 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1136 target_ulong cur_eip, \
1137 target_ulong next_eip, \
1141 gen_update_cc_op(s); \
1142 l2 = gen_jz_ecx_string(s, next_eip); \
1143 gen_ ## op(s, ot); \
1144 gen_op_dec_ECX[s->aflag](); \
1145 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1146 gen_op_string_jnz_sub[nz][ot](l2);\
1148 gen_op_jz_ecx[s->aflag](l2); \
1149 gen_jmp(s, cur_eip); \
1171 static GenOpFunc1
*gen_jcc_sub
[4][8] = {
1202 #ifdef TARGET_X86_64
1205 BUGGY_64(gen_op_jb_subq
),
1207 BUGGY_64(gen_op_jbe_subq
),
1210 BUGGY_64(gen_op_jl_subq
),
1211 BUGGY_64(gen_op_jle_subq
),
1215 static GenOpFunc1
*gen_op_loop
[3][4] = {
1226 #ifdef TARGET_X86_64
1235 static GenOpFunc
*gen_setcc_slow
[8] = {
1246 static GenOpFunc
*gen_setcc_sub
[4][8] = {
1249 gen_op_setb_T0_subb
,
1250 gen_op_setz_T0_subb
,
1251 gen_op_setbe_T0_subb
,
1252 gen_op_sets_T0_subb
,
1254 gen_op_setl_T0_subb
,
1255 gen_op_setle_T0_subb
,
1259 gen_op_setb_T0_subw
,
1260 gen_op_setz_T0_subw
,
1261 gen_op_setbe_T0_subw
,
1262 gen_op_sets_T0_subw
,
1264 gen_op_setl_T0_subw
,
1265 gen_op_setle_T0_subw
,
1269 gen_op_setb_T0_subl
,
1270 gen_op_setz_T0_subl
,
1271 gen_op_setbe_T0_subl
,
1272 gen_op_sets_T0_subl
,
1274 gen_op_setl_T0_subl
,
1275 gen_op_setle_T0_subl
,
1277 #ifdef TARGET_X86_64
1280 gen_op_setb_T0_subq
,
1281 gen_op_setz_T0_subq
,
1282 gen_op_setbe_T0_subq
,
1283 gen_op_sets_T0_subq
,
1285 gen_op_setl_T0_subq
,
1286 gen_op_setle_T0_subq
,
1291 static void *helper_fp_arith_ST0_FT0
[8] = {
1292 helper_fadd_ST0_FT0
,
1293 helper_fmul_ST0_FT0
,
1294 helper_fcom_ST0_FT0
,
1295 helper_fcom_ST0_FT0
,
1296 helper_fsub_ST0_FT0
,
1297 helper_fsubr_ST0_FT0
,
1298 helper_fdiv_ST0_FT0
,
1299 helper_fdivr_ST0_FT0
,
1302 /* NOTE the exception in "r" op ordering */
1303 static void *helper_fp_arith_STN_ST0
[8] = {
1304 helper_fadd_STN_ST0
,
1305 helper_fmul_STN_ST0
,
1308 helper_fsubr_STN_ST0
,
1309 helper_fsub_STN_ST0
,
1310 helper_fdivr_STN_ST0
,
1311 helper_fdiv_STN_ST0
,
1314 /* if d == OR_TMP0, it means memory operand (address in A0) */
1315 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1317 GenOpFunc
*gen_update_cc
;
1320 gen_op_mov_TN_reg(ot
, 0, d
);
1322 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1327 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1328 gen_op_set_cc_op(s1
->cc_op
);
1330 gen_op_arithc_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1331 gen_op_mov_reg_T0(ot
, d
);
1333 gen_op_arithc_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
- OP_ADCL
]();
1335 s1
->cc_op
= CC_OP_DYNAMIC
;
1338 gen_op_addl_T0_T1();
1339 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1340 gen_update_cc
= gen_op_update2_cc
;
1343 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1344 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1345 gen_update_cc
= gen_op_update2_cc
;
1349 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1350 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1351 gen_update_cc
= gen_op_update1_cc
;
1354 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1355 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1356 gen_update_cc
= gen_op_update1_cc
;
1359 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1360 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1361 gen_update_cc
= gen_op_update1_cc
;
1364 gen_op_cmpl_T0_T1_cc();
1365 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1366 gen_update_cc
= NULL
;
1369 if (op
!= OP_CMPL
) {
1371 gen_op_mov_reg_T0(ot
, d
);
1373 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1375 /* the flags update must happen after the memory write (precise
1376 exception support) */
1382 /* if d == OR_TMP0, it means memory operand (address in A0) */
1383 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1386 gen_op_mov_TN_reg(ot
, 0, d
);
1388 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1389 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1390 gen_op_set_cc_op(s1
->cc_op
);
1393 s1
->cc_op
= CC_OP_INCB
+ ot
;
1396 s1
->cc_op
= CC_OP_DECB
+ ot
;
1399 gen_op_mov_reg_T0(ot
, d
);
1401 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1402 gen_op_update_inc_cc();
1405 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1408 gen_op_mov_TN_reg(ot
, 0, d
);
1410 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1412 gen_op_mov_TN_reg(ot
, 1, s
);
1413 /* for zero counts, flags are not updated, so must do it dynamically */
1414 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1415 gen_op_set_cc_op(s1
->cc_op
);
1418 gen_op_shift_T0_T1_cc
[ot
][op
]();
1420 gen_op_shift_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
]();
1422 gen_op_mov_reg_T0(ot
, d
);
1423 s1
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1426 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1428 /* currently not optimized */
1429 gen_op_movl_T1_im(c
);
1430 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1433 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1441 int mod
, rm
, code
, override
, must_add_seg
;
1443 override
= s
->override
;
1444 must_add_seg
= s
->addseg
;
1447 mod
= (modrm
>> 6) & 3;
1459 code
= ldub_code(s
->pc
++);
1460 scale
= (code
>> 6) & 3;
1461 index
= ((code
>> 3) & 7) | REX_X(s
);
1468 if ((base
& 7) == 5) {
1470 disp
= (int32_t)ldl_code(s
->pc
);
1472 if (CODE64(s
) && !havesib
) {
1473 disp
+= s
->pc
+ s
->rip_offset
;
1480 disp
= (int8_t)ldub_code(s
->pc
++);
1484 disp
= ldl_code(s
->pc
);
1490 /* for correct popl handling with esp */
1491 if (base
== 4 && s
->popl_esp_hack
)
1492 disp
+= s
->popl_esp_hack
;
1493 #ifdef TARGET_X86_64
1494 if (s
->aflag
== 2) {
1495 gen_op_movq_A0_reg(base
);
1497 gen_op_addq_A0_im(disp
);
1502 gen_op_movl_A0_reg(base
);
1504 gen_op_addl_A0_im(disp
);
1507 #ifdef TARGET_X86_64
1508 if (s
->aflag
== 2) {
1509 gen_op_movq_A0_im(disp
);
1513 gen_op_movl_A0_im(disp
);
1516 /* XXX: index == 4 is always invalid */
1517 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1518 #ifdef TARGET_X86_64
1519 if (s
->aflag
== 2) {
1520 gen_op_addq_A0_reg_sN(scale
, index
);
1524 gen_op_addl_A0_reg_sN(scale
, index
);
1529 if (base
== R_EBP
|| base
== R_ESP
)
1534 #ifdef TARGET_X86_64
1535 if (s
->aflag
== 2) {
1536 gen_op_addq_A0_seg(override
);
1540 gen_op_addl_A0_seg(override
);
1547 disp
= lduw_code(s
->pc
);
1549 gen_op_movl_A0_im(disp
);
1550 rm
= 0; /* avoid SS override */
1557 disp
= (int8_t)ldub_code(s
->pc
++);
1561 disp
= lduw_code(s
->pc
);
1567 gen_op_movl_A0_reg(R_EBX
);
1568 gen_op_addl_A0_reg_sN(0, R_ESI
);
1571 gen_op_movl_A0_reg(R_EBX
);
1572 gen_op_addl_A0_reg_sN(0, R_EDI
);
1575 gen_op_movl_A0_reg(R_EBP
);
1576 gen_op_addl_A0_reg_sN(0, R_ESI
);
1579 gen_op_movl_A0_reg(R_EBP
);
1580 gen_op_addl_A0_reg_sN(0, R_EDI
);
1583 gen_op_movl_A0_reg(R_ESI
);
1586 gen_op_movl_A0_reg(R_EDI
);
1589 gen_op_movl_A0_reg(R_EBP
);
1593 gen_op_movl_A0_reg(R_EBX
);
1597 gen_op_addl_A0_im(disp
);
1598 gen_op_andl_A0_ffff();
1602 if (rm
== 2 || rm
== 3 || rm
== 6)
1607 gen_op_addl_A0_seg(override
);
1617 static void gen_nop_modrm(DisasContext
*s
, int modrm
)
1619 int mod
, rm
, base
, code
;
1621 mod
= (modrm
>> 6) & 3;
1631 code
= ldub_code(s
->pc
++);
1667 /* used for LEA and MOV AX, mem */
1668 static void gen_add_A0_ds_seg(DisasContext
*s
)
1670 int override
, must_add_seg
;
1671 must_add_seg
= s
->addseg
;
1673 if (s
->override
>= 0) {
1674 override
= s
->override
;
1680 #ifdef TARGET_X86_64
1682 gen_op_addq_A0_seg(override
);
1686 gen_op_addl_A0_seg(override
);
1691 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1693 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1695 int mod
, rm
, opreg
, disp
;
1697 mod
= (modrm
>> 6) & 3;
1698 rm
= (modrm
& 7) | REX_B(s
);
1702 gen_op_mov_TN_reg(ot
, 0, reg
);
1703 gen_op_mov_reg_T0(ot
, rm
);
1705 gen_op_mov_TN_reg(ot
, 0, rm
);
1707 gen_op_mov_reg_T0(ot
, reg
);
1710 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
1713 gen_op_mov_TN_reg(ot
, 0, reg
);
1714 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1716 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1718 gen_op_mov_reg_T0(ot
, reg
);
1723 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
1729 ret
= ldub_code(s
->pc
);
1733 ret
= lduw_code(s
->pc
);
1738 ret
= ldl_code(s
->pc
);
1745 static inline int insn_const_size(unsigned int ot
)
1753 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong eip
)
1755 TranslationBlock
*tb
;
1758 pc
= s
->cs_base
+ eip
;
1760 /* NOTE: we handle the case where the TB spans two pages here */
1761 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
1762 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
1763 /* jump to same page: we can use a direct jump */
1764 tcg_gen_goto_tb(tb_num
);
1766 tcg_gen_exit_tb((long)tb
+ tb_num
);
1768 /* jump to another page: currently not optimized */
1774 static inline void gen_jcc(DisasContext
*s
, int b
,
1775 target_ulong val
, target_ulong next_eip
)
1777 TranslationBlock
*tb
;
1784 jcc_op
= (b
>> 1) & 7;
1788 /* we optimize the cmp/jcc case */
1793 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1796 /* some jumps are easy to compute */
1838 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1841 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1853 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1854 gen_op_set_cc_op(s
->cc_op
);
1855 s
->cc_op
= CC_OP_DYNAMIC
;
1859 gen_setcc_slow
[jcc_op
]();
1860 func
= gen_op_jnz_T0_label
;
1870 l1
= gen_new_label();
1873 gen_goto_tb(s
, 0, next_eip
);
1876 gen_goto_tb(s
, 1, val
);
1881 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1882 gen_op_set_cc_op(s
->cc_op
);
1883 s
->cc_op
= CC_OP_DYNAMIC
;
1885 gen_setcc_slow
[jcc_op
]();
1891 l1
= gen_new_label();
1892 l2
= gen_new_label();
1893 gen_op_jnz_T0_label(l1
);
1894 gen_jmp_im(next_eip
);
1895 gen_op_jmp_label(l2
);
1903 static void gen_setcc(DisasContext
*s
, int b
)
1909 jcc_op
= (b
>> 1) & 7;
1911 /* we optimize the cmp/jcc case */
1916 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1921 /* some jumps are easy to compute */
1948 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1951 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1959 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1960 gen_op_set_cc_op(s
->cc_op
);
1961 func
= gen_setcc_slow
[jcc_op
];
1970 /* move T0 to seg_reg and compute if the CPU state may change. Never
1971 call this function with seg_reg == R_CS */
1972 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
1974 if (s
->pe
&& !s
->vm86
) {
1975 /* XXX: optimize by finding processor state dynamically */
1976 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1977 gen_op_set_cc_op(s
->cc_op
);
1978 gen_jmp_im(cur_eip
);
1979 gen_op_movl_seg_T0(seg_reg
);
1980 /* abort translation because the addseg value may change or
1981 because ss32 may change. For R_SS, translation must always
1982 stop as a special handling must be done to disable hardware
1983 interrupts for the next instruction */
1984 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
1987 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
1988 if (seg_reg
== R_SS
)
1993 #define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1996 gen_svm_check_io(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
1998 #if !defined(CONFIG_USER_ONLY)
1999 if(s
->flags
& (1ULL << INTERCEPT_IOIO_PROT
)) {
2000 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2001 gen_op_set_cc_op(s
->cc_op
);
2002 SVM_movq_T1_im(s
->pc
- s
->cs_base
);
2003 gen_jmp_im(pc_start
- s
->cs_base
);
2005 gen_op_svm_check_intercept_io((uint32_t)(type
>> 32), (uint32_t)type
);
2006 s
->cc_op
= CC_OP_DYNAMIC
;
2007 /* FIXME: maybe we could move the io intercept vector to the TB as well
2008 so we know if this is an EOB or not ... let's assume it's not
2015 static inline int svm_is_rep(int prefixes
)
2017 return ((prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) ? 8 : 0);
2021 gen_svm_check_intercept_param(DisasContext
*s
, target_ulong pc_start
,
2022 uint64_t type
, uint64_t param
)
2024 if(!(s
->flags
& (INTERCEPT_SVM_MASK
)))
2025 /* no SVM activated */
2028 /* CRx and DRx reads/writes */
2029 case SVM_EXIT_READ_CR0
... SVM_EXIT_EXCP_BASE
- 1:
2030 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2031 gen_op_set_cc_op(s
->cc_op
);
2032 s
->cc_op
= CC_OP_DYNAMIC
;
2034 gen_jmp_im(pc_start
- s
->cs_base
);
2035 SVM_movq_T1_im(param
);
2037 gen_op_svm_check_intercept_param((uint32_t)(type
>> 32), (uint32_t)type
);
2038 /* this is a special case as we do not know if the interception occurs
2039 so we assume there was none */
2042 if(s
->flags
& (1ULL << INTERCEPT_MSR_PROT
)) {
2043 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2044 gen_op_set_cc_op(s
->cc_op
);
2045 s
->cc_op
= CC_OP_DYNAMIC
;
2047 gen_jmp_im(pc_start
- s
->cs_base
);
2048 SVM_movq_T1_im(param
);
2050 gen_op_svm_check_intercept_param((uint32_t)(type
>> 32), (uint32_t)type
);
2051 /* this is a special case as we do not know if the interception occurs
2052 so we assume there was none */
2057 if(s
->flags
& (1ULL << ((type
- SVM_EXIT_INTR
) + INTERCEPT_INTR
))) {
2058 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2059 gen_op_set_cc_op(s
->cc_op
);
2060 s
->cc_op
= CC_OP_EFLAGS
;
2062 gen_jmp_im(pc_start
- s
->cs_base
);
2063 SVM_movq_T1_im(param
);
2065 gen_op_svm_vmexit(type
>> 32, type
);
2066 /* we can optimize this one so TBs don't get longer
2067 than up to vmexit */
2076 gen_svm_check_intercept(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
2078 return gen_svm_check_intercept_param(s
, pc_start
, type
, 0);
2081 static inline void gen_stack_update(DisasContext
*s
, int addend
)
2083 #ifdef TARGET_X86_64
2085 gen_op_addq_ESP_im(addend
);
2089 gen_op_addl_ESP_im(addend
);
2091 gen_op_addw_ESP_im(addend
);
2095 /* generate a push. It depends on ss32, addseg and dflag */
2096 static void gen_push_T0(DisasContext
*s
)
2098 #ifdef TARGET_X86_64
2100 gen_op_movq_A0_reg(R_ESP
);
2102 gen_op_addq_A0_im(-8);
2103 gen_op_st_T0_A0(OT_QUAD
+ s
->mem_index
);
2105 gen_op_addq_A0_im(-2);
2106 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2108 gen_op_mov_reg_A0(2, R_ESP
);
2112 gen_op_movl_A0_reg(R_ESP
);
2114 gen_op_addl_A0_im(-2);
2116 gen_op_addl_A0_im(-4);
2119 gen_op_movl_T1_A0();
2120 gen_op_addl_A0_seg(R_SS
);
2123 gen_op_andl_A0_ffff();
2124 gen_op_movl_T1_A0();
2125 gen_op_addl_A0_seg(R_SS
);
2127 gen_op_st_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2128 if (s
->ss32
&& !s
->addseg
)
2129 gen_op_mov_reg_A0(1, R_ESP
);
2131 gen_op_mov_reg_T1(s
->ss32
+ 1, R_ESP
);
2135 /* generate a push. It depends on ss32, addseg and dflag */
2136 /* slower version for T1, only used for call Ev */
2137 static void gen_push_T1(DisasContext
*s
)
2139 #ifdef TARGET_X86_64
2141 gen_op_movq_A0_reg(R_ESP
);
2143 gen_op_addq_A0_im(-8);
2144 gen_op_st_T1_A0(OT_QUAD
+ s
->mem_index
);
2146 gen_op_addq_A0_im(-2);
2147 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2149 gen_op_mov_reg_A0(2, R_ESP
);
2153 gen_op_movl_A0_reg(R_ESP
);
2155 gen_op_addl_A0_im(-2);
2157 gen_op_addl_A0_im(-4);
2160 gen_op_addl_A0_seg(R_SS
);
2163 gen_op_andl_A0_ffff();
2164 gen_op_addl_A0_seg(R_SS
);
2166 gen_op_st_T1_A0(s
->dflag
+ 1 + s
->mem_index
);
2168 if (s
->ss32
&& !s
->addseg
)
2169 gen_op_mov_reg_A0(1, R_ESP
);
2171 gen_stack_update(s
, (-2) << s
->dflag
);
2175 /* two step pop is necessary for precise exceptions */
2176 static void gen_pop_T0(DisasContext
*s
)
2178 #ifdef TARGET_X86_64
2180 gen_op_movq_A0_reg(R_ESP
);
2181 gen_op_ld_T0_A0((s
->dflag
? OT_QUAD
: OT_WORD
) + s
->mem_index
);
2185 gen_op_movl_A0_reg(R_ESP
);
2188 gen_op_addl_A0_seg(R_SS
);
2190 gen_op_andl_A0_ffff();
2191 gen_op_addl_A0_seg(R_SS
);
2193 gen_op_ld_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2197 static void gen_pop_update(DisasContext
*s
)
2199 #ifdef TARGET_X86_64
2200 if (CODE64(s
) && s
->dflag
) {
2201 gen_stack_update(s
, 8);
2205 gen_stack_update(s
, 2 << s
->dflag
);
2209 static void gen_stack_A0(DisasContext
*s
)
2211 gen_op_movl_A0_reg(R_ESP
);
2213 gen_op_andl_A0_ffff();
2214 gen_op_movl_T1_A0();
2216 gen_op_addl_A0_seg(R_SS
);
2219 /* NOTE: wrap around in 16 bit not fully handled */
2220 static void gen_pusha(DisasContext
*s
)
2223 gen_op_movl_A0_reg(R_ESP
);
2224 gen_op_addl_A0_im(-16 << s
->dflag
);
2226 gen_op_andl_A0_ffff();
2227 gen_op_movl_T1_A0();
2229 gen_op_addl_A0_seg(R_SS
);
2230 for(i
= 0;i
< 8; i
++) {
2231 gen_op_mov_TN_reg(OT_LONG
, 0, 7 - i
);
2232 gen_op_st_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2233 gen_op_addl_A0_im(2 << s
->dflag
);
2235 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2238 /* NOTE: wrap around in 16 bit not fully handled */
2239 static void gen_popa(DisasContext
*s
)
2242 gen_op_movl_A0_reg(R_ESP
);
2244 gen_op_andl_A0_ffff();
2245 gen_op_movl_T1_A0();
2246 gen_op_addl_T1_im(16 << s
->dflag
);
2248 gen_op_addl_A0_seg(R_SS
);
2249 for(i
= 0;i
< 8; i
++) {
2250 /* ESP is not reloaded */
2252 gen_op_ld_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2253 gen_op_mov_reg_T0(OT_WORD
+ s
->dflag
, 7 - i
);
2255 gen_op_addl_A0_im(2 << s
->dflag
);
2257 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2260 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2265 #ifdef TARGET_X86_64
2267 ot
= s
->dflag
? OT_QUAD
: OT_WORD
;
2270 gen_op_movl_A0_reg(R_ESP
);
2271 gen_op_addq_A0_im(-opsize
);
2272 gen_op_movl_T1_A0();
2275 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2276 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2278 gen_op_enter64_level(level
, (ot
== OT_QUAD
));
2280 gen_op_mov_reg_T1(ot
, R_EBP
);
2281 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2282 gen_op_mov_reg_T1(OT_QUAD
, R_ESP
);
2286 ot
= s
->dflag
+ OT_WORD
;
2287 opsize
= 2 << s
->dflag
;
2289 gen_op_movl_A0_reg(R_ESP
);
2290 gen_op_addl_A0_im(-opsize
);
2292 gen_op_andl_A0_ffff();
2293 gen_op_movl_T1_A0();
2295 gen_op_addl_A0_seg(R_SS
);
2297 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2298 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2300 gen_op_enter_level(level
, s
->dflag
);
2302 gen_op_mov_reg_T1(ot
, R_EBP
);
2303 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2304 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2308 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2310 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2311 gen_op_set_cc_op(s
->cc_op
);
2312 gen_jmp_im(cur_eip
);
2313 gen_op_raise_exception(trapno
);
2317 /* an interrupt is different from an exception because of the
2319 static void gen_interrupt(DisasContext
*s
, int intno
,
2320 target_ulong cur_eip
, target_ulong next_eip
)
2322 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2323 gen_op_set_cc_op(s
->cc_op
);
2324 gen_jmp_im(cur_eip
);
2325 gen_op_raise_interrupt(intno
, (int)(next_eip
- cur_eip
));
2329 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2331 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2332 gen_op_set_cc_op(s
->cc_op
);
2333 gen_jmp_im(cur_eip
);
2338 /* generate a generic end of block. Trace exception is also generated
2340 static void gen_eob(DisasContext
*s
)
2342 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2343 gen_op_set_cc_op(s
->cc_op
);
2344 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2345 gen_op_reset_inhibit_irq();
2347 if (s
->singlestep_enabled
) {
2350 gen_op_single_step();
2357 /* generate a jump to eip. No segment change must happen before as a
2358 direct call to the next block may occur */
2359 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2362 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2363 gen_op_set_cc_op(s
->cc_op
);
2364 s
->cc_op
= CC_OP_DYNAMIC
;
2366 gen_goto_tb(s
, tb_num
, eip
);
2374 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2376 gen_jmp_tb(s
, eip
, 0);
2379 static inline void gen_ldq_env_A0(int idx
, int offset
)
2381 int mem_index
= (idx
>> 2) - 1;
2382 tcg_gen_qemu_ld64(cpu_tmp1
, cpu_A0
, mem_index
);
2383 tcg_gen_st_i64(cpu_tmp1
, cpu_env
, offset
);
2386 static inline void gen_stq_env_A0(int idx
, int offset
)
2388 int mem_index
= (idx
>> 2) - 1;
2389 tcg_gen_ld_i64(cpu_tmp1
, cpu_env
, offset
);
2390 tcg_gen_qemu_st64(cpu_tmp1
, cpu_A0
, mem_index
);
2393 static inline void gen_ldo_env_A0(int idx
, int offset
)
2395 int mem_index
= (idx
>> 2) - 1;
2396 tcg_gen_qemu_ld64(cpu_tmp1
, cpu_A0
, mem_index
);
2397 tcg_gen_st_i64(cpu_tmp1
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2398 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2399 tcg_gen_qemu_ld64(cpu_tmp1
, cpu_tmp0
, mem_index
);
2400 tcg_gen_st_i64(cpu_tmp1
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2403 static inline void gen_sto_env_A0(int idx
, int offset
)
2405 int mem_index
= (idx
>> 2) - 1;
2406 tcg_gen_ld_i64(cpu_tmp1
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2407 tcg_gen_qemu_st64(cpu_tmp1
, cpu_A0
, mem_index
);
2408 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2409 tcg_gen_ld_i64(cpu_tmp1
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2410 tcg_gen_qemu_st64(cpu_tmp1
, cpu_tmp0
, mem_index
);
2413 static inline void gen_op_movo(int d_offset
, int s_offset
)
2415 tcg_gen_ld_i64(cpu_tmp1
, cpu_env
, s_offset
);
2416 tcg_gen_st_i64(cpu_tmp1
, cpu_env
, d_offset
);
2417 tcg_gen_ld_i64(cpu_tmp1
, cpu_env
, s_offset
+ 8);
2418 tcg_gen_st_i64(cpu_tmp1
, cpu_env
, d_offset
+ 8);
2421 static inline void gen_op_movq(int d_offset
, int s_offset
)
2423 tcg_gen_ld_i64(cpu_tmp1
, cpu_env
, s_offset
);
2424 tcg_gen_st_i64(cpu_tmp1
, cpu_env
, d_offset
);
2427 static inline void gen_op_movl(int d_offset
, int s_offset
)
2429 tcg_gen_ld_i32(cpu_tmp2
, cpu_env
, s_offset
);
2430 tcg_gen_st_i32(cpu_tmp2
, cpu_env
, d_offset
);
2433 static inline void gen_op_movq_env_0(int d_offset
)
2435 tcg_gen_movi_i64(cpu_tmp1
, 0);
2436 tcg_gen_st_i64(cpu_tmp1
, cpu_env
, d_offset
);
2439 #define SSE_SPECIAL ((void *)1)
2440 #define SSE_DUMMY ((void *)2)
2442 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2443 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2444 helper_ ## x ## ss, helper_ ## x ## sd, }
2446 static void *sse_op_table1
[256][4] = {
2447 /* 3DNow! extensions */
2448 [0x0e] = { SSE_DUMMY
}, /* femms */
2449 [0x0f] = { SSE_DUMMY
}, /* pf... */
2450 /* pure SSE operations */
2451 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2452 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2453 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd, movsldup, movddup */
2454 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2455 [0x14] = { helper_punpckldq_xmm
, helper_punpcklqdq_xmm
},
2456 [0x15] = { helper_punpckhdq_xmm
, helper_punpckhqdq_xmm
},
2457 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2458 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2460 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2461 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2462 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2463 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2464 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2465 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2466 [0x2e] = { helper_ucomiss
, helper_ucomisd
},
2467 [0x2f] = { helper_comiss
, helper_comisd
},
2468 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2469 [0x51] = SSE_FOP(sqrt
),
2470 [0x52] = { helper_rsqrtps
, NULL
, helper_rsqrtss
, NULL
},
2471 [0x53] = { helper_rcpps
, NULL
, helper_rcpss
, NULL
},
2472 [0x54] = { helper_pand_xmm
, helper_pand_xmm
}, /* andps, andpd */
2473 [0x55] = { helper_pandn_xmm
, helper_pandn_xmm
}, /* andnps, andnpd */
2474 [0x56] = { helper_por_xmm
, helper_por_xmm
}, /* orps, orpd */
2475 [0x57] = { helper_pxor_xmm
, helper_pxor_xmm
}, /* xorps, xorpd */
2476 [0x58] = SSE_FOP(add
),
2477 [0x59] = SSE_FOP(mul
),
2478 [0x5a] = { helper_cvtps2pd
, helper_cvtpd2ps
,
2479 helper_cvtss2sd
, helper_cvtsd2ss
},
2480 [0x5b] = { helper_cvtdq2ps
, helper_cvtps2dq
, helper_cvttps2dq
},
2481 [0x5c] = SSE_FOP(sub
),
2482 [0x5d] = SSE_FOP(min
),
2483 [0x5e] = SSE_FOP(div
),
2484 [0x5f] = SSE_FOP(max
),
2486 [0xc2] = SSE_FOP(cmpeq
),
2487 [0xc6] = { helper_shufps
, helper_shufpd
},
2489 /* MMX ops and their SSE extensions */
2490 [0x60] = MMX_OP2(punpcklbw
),
2491 [0x61] = MMX_OP2(punpcklwd
),
2492 [0x62] = MMX_OP2(punpckldq
),
2493 [0x63] = MMX_OP2(packsswb
),
2494 [0x64] = MMX_OP2(pcmpgtb
),
2495 [0x65] = MMX_OP2(pcmpgtw
),
2496 [0x66] = MMX_OP2(pcmpgtl
),
2497 [0x67] = MMX_OP2(packuswb
),
2498 [0x68] = MMX_OP2(punpckhbw
),
2499 [0x69] = MMX_OP2(punpckhwd
),
2500 [0x6a] = MMX_OP2(punpckhdq
),
2501 [0x6b] = MMX_OP2(packssdw
),
2502 [0x6c] = { NULL
, helper_punpcklqdq_xmm
},
2503 [0x6d] = { NULL
, helper_punpckhqdq_xmm
},
2504 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2505 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2506 [0x70] = { helper_pshufw_mmx
,
2509 helper_pshuflw_xmm
},
2510 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2511 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2512 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2513 [0x74] = MMX_OP2(pcmpeqb
),
2514 [0x75] = MMX_OP2(pcmpeqw
),
2515 [0x76] = MMX_OP2(pcmpeql
),
2516 [0x77] = { SSE_DUMMY
}, /* emms */
2517 [0x7c] = { NULL
, helper_haddpd
, NULL
, helper_haddps
},
2518 [0x7d] = { NULL
, helper_hsubpd
, NULL
, helper_hsubps
},
2519 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2520 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2521 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2522 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2523 [0xd0] = { NULL
, helper_addsubpd
, NULL
, helper_addsubps
},
2524 [0xd1] = MMX_OP2(psrlw
),
2525 [0xd2] = MMX_OP2(psrld
),
2526 [0xd3] = MMX_OP2(psrlq
),
2527 [0xd4] = MMX_OP2(paddq
),
2528 [0xd5] = MMX_OP2(pmullw
),
2529 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2530 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2531 [0xd8] = MMX_OP2(psubusb
),
2532 [0xd9] = MMX_OP2(psubusw
),
2533 [0xda] = MMX_OP2(pminub
),
2534 [0xdb] = MMX_OP2(pand
),
2535 [0xdc] = MMX_OP2(paddusb
),
2536 [0xdd] = MMX_OP2(paddusw
),
2537 [0xde] = MMX_OP2(pmaxub
),
2538 [0xdf] = MMX_OP2(pandn
),
2539 [0xe0] = MMX_OP2(pavgb
),
2540 [0xe1] = MMX_OP2(psraw
),
2541 [0xe2] = MMX_OP2(psrad
),
2542 [0xe3] = MMX_OP2(pavgw
),
2543 [0xe4] = MMX_OP2(pmulhuw
),
2544 [0xe5] = MMX_OP2(pmulhw
),
2545 [0xe6] = { NULL
, helper_cvttpd2dq
, helper_cvtdq2pd
, helper_cvtpd2dq
},
2546 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2547 [0xe8] = MMX_OP2(psubsb
),
2548 [0xe9] = MMX_OP2(psubsw
),
2549 [0xea] = MMX_OP2(pminsw
),
2550 [0xeb] = MMX_OP2(por
),
2551 [0xec] = MMX_OP2(paddsb
),
2552 [0xed] = MMX_OP2(paddsw
),
2553 [0xee] = MMX_OP2(pmaxsw
),
2554 [0xef] = MMX_OP2(pxor
),
2555 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu */
2556 [0xf1] = MMX_OP2(psllw
),
2557 [0xf2] = MMX_OP2(pslld
),
2558 [0xf3] = MMX_OP2(psllq
),
2559 [0xf4] = MMX_OP2(pmuludq
),
2560 [0xf5] = MMX_OP2(pmaddwd
),
2561 [0xf6] = MMX_OP2(psadbw
),
2562 [0xf7] = MMX_OP2(maskmov
),
2563 [0xf8] = MMX_OP2(psubb
),
2564 [0xf9] = MMX_OP2(psubw
),
2565 [0xfa] = MMX_OP2(psubl
),
2566 [0xfb] = MMX_OP2(psubq
),
2567 [0xfc] = MMX_OP2(paddb
),
2568 [0xfd] = MMX_OP2(paddw
),
2569 [0xfe] = MMX_OP2(paddl
),
2572 static void *sse_op_table2
[3 * 8][2] = {
2573 [0 + 2] = MMX_OP2(psrlw
),
2574 [0 + 4] = MMX_OP2(psraw
),
2575 [0 + 6] = MMX_OP2(psllw
),
2576 [8 + 2] = MMX_OP2(psrld
),
2577 [8 + 4] = MMX_OP2(psrad
),
2578 [8 + 6] = MMX_OP2(pslld
),
2579 [16 + 2] = MMX_OP2(psrlq
),
2580 [16 + 3] = { NULL
, helper_psrldq_xmm
},
2581 [16 + 6] = MMX_OP2(psllq
),
2582 [16 + 7] = { NULL
, helper_pslldq_xmm
},
2585 static void *sse_op_table3
[4 * 3] = {
2588 X86_64_ONLY(helper_cvtsq2ss
),
2589 X86_64_ONLY(helper_cvtsq2sd
),
2593 X86_64_ONLY(helper_cvttss2sq
),
2594 X86_64_ONLY(helper_cvttsd2sq
),
2598 X86_64_ONLY(helper_cvtss2sq
),
2599 X86_64_ONLY(helper_cvtsd2sq
),
2602 static void *sse_op_table4
[8][4] = {
2613 static void *sse_op_table5
[256] = {
2614 [0x0c] = helper_pi2fw
,
2615 [0x0d] = helper_pi2fd
,
2616 [0x1c] = helper_pf2iw
,
2617 [0x1d] = helper_pf2id
,
2618 [0x8a] = helper_pfnacc
,
2619 [0x8e] = helper_pfpnacc
,
2620 [0x90] = helper_pfcmpge
,
2621 [0x94] = helper_pfmin
,
2622 [0x96] = helper_pfrcp
,
2623 [0x97] = helper_pfrsqrt
,
2624 [0x9a] = helper_pfsub
,
2625 [0x9e] = helper_pfadd
,
2626 [0xa0] = helper_pfcmpgt
,
2627 [0xa4] = helper_pfmax
,
2628 [0xa6] = helper_movq
, /* pfrcpit1; no need to actually increase precision */
2629 [0xa7] = helper_movq
, /* pfrsqit1 */
2630 [0xaa] = helper_pfsubr
,
2631 [0xae] = helper_pfacc
,
2632 [0xb0] = helper_pfcmpeq
,
2633 [0xb4] = helper_pfmul
,
2634 [0xb6] = helper_movq
, /* pfrcpit2 */
2635 [0xb7] = helper_pmulhrw_mmx
,
2636 [0xbb] = helper_pswapd
,
2637 [0xbf] = helper_pavgb_mmx
/* pavgusb */
2640 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
2642 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
2643 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
2647 if (s
->prefix
& PREFIX_DATA
)
2649 else if (s
->prefix
& PREFIX_REPZ
)
2651 else if (s
->prefix
& PREFIX_REPNZ
)
2655 sse_op2
= sse_op_table1
[b
][b1
];
2658 if ((b
<= 0x5f && b
>= 0x10) || b
== 0xc6 || b
== 0xc2) {
2668 /* simple MMX/SSE operation */
2669 if (s
->flags
& HF_TS_MASK
) {
2670 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
2673 if (s
->flags
& HF_EM_MASK
) {
2675 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
2678 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
2681 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
2684 tcg_gen_helper_0_0(helper_emms
);
2689 tcg_gen_helper_0_0(helper_emms
);
2692 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2693 the static cpu state) */
2695 tcg_gen_helper_0_0(helper_enter_mmx
);
2698 modrm
= ldub_code(s
->pc
++);
2699 reg
= ((modrm
>> 3) & 7);
2702 mod
= (modrm
>> 6) & 3;
2703 if (sse_op2
== SSE_SPECIAL
) {
2706 case 0x0e7: /* movntq */
2709 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2710 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2712 case 0x1e7: /* movntdq */
2713 case 0x02b: /* movntps */
2714 case 0x12b: /* movntps */
2715 case 0x3f0: /* lddqu */
2718 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2719 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
2721 case 0x6e: /* movd mm, ea */
2722 #ifdef TARGET_X86_64
2723 if (s
->dflag
== 2) {
2724 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2725 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2729 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2730 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
2731 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2732 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx
, cpu_ptr0
, cpu_T
[0]);
2735 case 0x16e: /* movd xmm, ea */
2736 #ifdef TARGET_X86_64
2737 if (s
->dflag
== 2) {
2738 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2739 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
2740 offsetof(CPUX86State
,xmm_regs
[reg
]));
2741 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm
, cpu_ptr0
, cpu_T
[0]);
2745 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2746 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
2747 offsetof(CPUX86State
,xmm_regs
[reg
]));
2748 tcg_gen_trunc_tl_i32(cpu_tmp2
, cpu_T
[0]);
2749 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm
, cpu_ptr0
, cpu_tmp2
);
2752 case 0x6f: /* movq mm, ea */
2754 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2755 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2758 tcg_gen_ld_i64(cpu_tmp1
, cpu_env
,
2759 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2760 tcg_gen_st_i64(cpu_tmp1
, cpu_env
,
2761 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2764 case 0x010: /* movups */
2765 case 0x110: /* movupd */
2766 case 0x028: /* movaps */
2767 case 0x128: /* movapd */
2768 case 0x16f: /* movdqa xmm, ea */
2769 case 0x26f: /* movdqu xmm, ea */
2771 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2772 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
2774 rm
= (modrm
& 7) | REX_B(s
);
2775 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
2776 offsetof(CPUX86State
,xmm_regs
[rm
]));
2779 case 0x210: /* movss xmm, ea */
2781 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2782 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
2783 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2785 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2786 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2787 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2789 rm
= (modrm
& 7) | REX_B(s
);
2790 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2791 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2794 case 0x310: /* movsd xmm, ea */
2796 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2797 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2799 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2800 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2802 rm
= (modrm
& 7) | REX_B(s
);
2803 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2804 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2807 case 0x012: /* movlps */
2808 case 0x112: /* movlpd */
2810 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2811 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2814 rm
= (modrm
& 7) | REX_B(s
);
2815 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2816 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2819 case 0x212: /* movsldup */
2821 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2822 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
2824 rm
= (modrm
& 7) | REX_B(s
);
2825 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2826 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2827 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2828 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(2)));
2830 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2831 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2832 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2833 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2835 case 0x312: /* movddup */
2837 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2838 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2840 rm
= (modrm
& 7) | REX_B(s
);
2841 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2842 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2844 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2845 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2847 case 0x016: /* movhps */
2848 case 0x116: /* movhpd */
2850 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2851 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2854 rm
= (modrm
& 7) | REX_B(s
);
2855 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2856 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2859 case 0x216: /* movshdup */
2861 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2862 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
2864 rm
= (modrm
& 7) | REX_B(s
);
2865 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2866 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
2867 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2868 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
2870 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2871 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2872 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2873 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2875 case 0x7e: /* movd ea, mm */
2876 #ifdef TARGET_X86_64
2877 if (s
->dflag
== 2) {
2878 tcg_gen_ld_i64(cpu_T
[0], cpu_env
,
2879 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2880 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
2884 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
2885 offsetof(CPUX86State
,fpregs
[reg
].mmx
.MMX_L(0)));
2886 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2889 case 0x17e: /* movd ea, xmm */
2890 #ifdef TARGET_X86_64
2891 if (s
->dflag
== 2) {
2892 tcg_gen_ld_i64(cpu_T
[0], cpu_env
,
2893 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2894 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
2898 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
2899 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2900 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2903 case 0x27e: /* movq xmm, ea */
2905 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2906 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2908 rm
= (modrm
& 7) | REX_B(s
);
2909 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2910 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2912 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2914 case 0x7f: /* movq ea, mm */
2916 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2917 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2920 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
2921 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2924 case 0x011: /* movups */
2925 case 0x111: /* movupd */
2926 case 0x029: /* movaps */
2927 case 0x129: /* movapd */
2928 case 0x17f: /* movdqa ea, xmm */
2929 case 0x27f: /* movdqu ea, xmm */
2931 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2932 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
2934 rm
= (modrm
& 7) | REX_B(s
);
2935 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
2936 offsetof(CPUX86State
,xmm_regs
[reg
]));
2939 case 0x211: /* movss ea, xmm */
2941 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2942 gen_op_movl_T0_env(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2943 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
2945 rm
= (modrm
& 7) | REX_B(s
);
2946 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
2947 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2950 case 0x311: /* movsd ea, xmm */
2952 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2953 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2955 rm
= (modrm
& 7) | REX_B(s
);
2956 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2957 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2960 case 0x013: /* movlps */
2961 case 0x113: /* movlpd */
2963 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2964 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2969 case 0x017: /* movhps */
2970 case 0x117: /* movhpd */
2972 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2973 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2978 case 0x71: /* shift mm, im */
2981 case 0x171: /* shift xmm, im */
2984 val
= ldub_code(s
->pc
++);
2986 gen_op_movl_T0_im(val
);
2987 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
2989 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
2990 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
2992 gen_op_movl_T0_im(val
);
2993 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
2995 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
2996 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
2998 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
3002 rm
= (modrm
& 7) | REX_B(s
);
3003 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3006 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3008 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op2_offset
);
3009 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op1_offset
);
3010 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3012 case 0x050: /* movmskps */
3013 rm
= (modrm
& 7) | REX_B(s
);
3014 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3015 offsetof(CPUX86State
,xmm_regs
[rm
]));
3016 tcg_gen_helper_1_1(helper_movmskps
, cpu_tmp2
, cpu_ptr0
);
3017 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2
);
3018 gen_op_mov_reg_T0(OT_LONG
, reg
);
3020 case 0x150: /* movmskpd */
3021 rm
= (modrm
& 7) | REX_B(s
);
3022 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3023 offsetof(CPUX86State
,xmm_regs
[rm
]));
3024 tcg_gen_helper_1_1(helper_movmskpd
, cpu_tmp2
, cpu_ptr0
);
3025 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2
);
3026 gen_op_mov_reg_T0(OT_LONG
, reg
);
3028 case 0x02a: /* cvtpi2ps */
3029 case 0x12a: /* cvtpi2pd */
3030 tcg_gen_helper_0_0(helper_enter_mmx
);
3032 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3033 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3034 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3037 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3039 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3040 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3041 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3044 tcg_gen_helper_0_2(helper_cvtpi2ps
, cpu_ptr0
, cpu_ptr1
);
3048 tcg_gen_helper_0_2(helper_cvtpi2pd
, cpu_ptr0
, cpu_ptr1
);
3052 case 0x22a: /* cvtsi2ss */
3053 case 0x32a: /* cvtsi2sd */
3054 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3055 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3056 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3057 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3058 sse_op2
= sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)];
3059 tcg_gen_trunc_tl_i32(cpu_tmp2
, cpu_T
[0]);
3060 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_tmp2
);
3062 case 0x02c: /* cvttps2pi */
3063 case 0x12c: /* cvttpd2pi */
3064 case 0x02d: /* cvtps2pi */
3065 case 0x12d: /* cvtpd2pi */
3066 tcg_gen_helper_0_0(helper_enter_mmx
);
3068 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3069 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3070 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3072 rm
= (modrm
& 7) | REX_B(s
);
3073 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3075 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
3076 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3077 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3080 tcg_gen_helper_0_2(helper_cvttps2pi
, cpu_ptr0
, cpu_ptr1
);
3083 tcg_gen_helper_0_2(helper_cvttpd2pi
, cpu_ptr0
, cpu_ptr1
);
3086 tcg_gen_helper_0_2(helper_cvtps2pi
, cpu_ptr0
, cpu_ptr1
);
3089 tcg_gen_helper_0_2(helper_cvtpd2pi
, cpu_ptr0
, cpu_ptr1
);
3093 case 0x22c: /* cvttss2si */
3094 case 0x32c: /* cvttsd2si */
3095 case 0x22d: /* cvtss2si */
3096 case 0x32d: /* cvtsd2si */
3097 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3099 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3101 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_Q(0)));
3103 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3104 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3106 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3108 rm
= (modrm
& 7) | REX_B(s
);
3109 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3111 sse_op2
= sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
3113 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op2_offset
);
3114 if (ot
== OT_LONG
) {
3115 tcg_gen_helper_1_1(sse_op2
, cpu_tmp2
, cpu_ptr0
);
3116 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2
);
3118 tcg_gen_helper_1_1(sse_op2
, cpu_T
[0], cpu_ptr0
);
3120 gen_op_mov_reg_T0(ot
, reg
);
3122 case 0xc4: /* pinsrw */
3125 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3126 val
= ldub_code(s
->pc
++);
3129 tcg_gen_st16_tl(cpu_T
[0], cpu_env
,
3130 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_W(val
)));
3133 tcg_gen_st16_tl(cpu_T
[0], cpu_env
,
3134 offsetof(CPUX86State
,fpregs
[reg
].mmx
.MMX_W(val
)));
3137 case 0xc5: /* pextrw */
3141 val
= ldub_code(s
->pc
++);
3144 rm
= (modrm
& 7) | REX_B(s
);
3145 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
,
3146 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_W(val
)));
3150 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
,
3151 offsetof(CPUX86State
,fpregs
[rm
].mmx
.MMX_W(val
)));
3153 reg
= ((modrm
>> 3) & 7) | rex_r
;
3154 gen_op_mov_reg_T0(OT_LONG
, reg
);
3156 case 0x1d6: /* movq ea, xmm */
3158 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3159 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3161 rm
= (modrm
& 7) | REX_B(s
);
3162 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3163 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3164 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3167 case 0x2d6: /* movq2dq */
3168 tcg_gen_helper_0_0(helper_enter_mmx
);
3170 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3171 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3172 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3174 case 0x3d6: /* movdq2q */
3175 tcg_gen_helper_0_0(helper_enter_mmx
);
3176 rm
= (modrm
& 7) | REX_B(s
);
3177 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
),
3178 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3180 case 0xd7: /* pmovmskb */
3185 rm
= (modrm
& 7) | REX_B(s
);
3186 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, offsetof(CPUX86State
,xmm_regs
[rm
]));
3187 tcg_gen_helper_1_1(helper_pmovmskb_xmm
, cpu_tmp2
, cpu_ptr0
);
3190 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3191 tcg_gen_helper_1_1(helper_pmovmskb_mmx
, cpu_tmp2
, cpu_ptr0
);
3193 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2
);
3194 reg
= ((modrm
>> 3) & 7) | rex_r
;
3195 gen_op_mov_reg_T0(OT_LONG
, reg
);
3201 /* generic MMX or SSE operation */
3204 /* maskmov : we must prepare A0 */
3207 #ifdef TARGET_X86_64
3208 if (s
->aflag
== 2) {
3209 gen_op_movq_A0_reg(R_EDI
);
3213 gen_op_movl_A0_reg(R_EDI
);
3215 gen_op_andl_A0_ffff();
3217 gen_add_A0_ds_seg(s
);
3219 case 0x70: /* pshufx insn */
3220 case 0xc6: /* pshufx insn */
3221 case 0xc2: /* compare insns */
3228 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3230 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3231 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3232 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f && b
!= 0x5b) ||
3234 /* specific case for SSE single instructions */
3237 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3238 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3241 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
3244 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3247 rm
= (modrm
& 7) | REX_B(s
);
3248 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3251 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3253 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3254 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3255 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3258 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3262 case 0x0f: /* 3DNow! data insns */
3263 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
3265 val
= ldub_code(s
->pc
++);
3266 sse_op2
= sse_op_table5
[val
];
3269 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3270 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3271 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3273 case 0x70: /* pshufx insn */
3274 case 0xc6: /* pshufx insn */
3275 val
= ldub_code(s
->pc
++);
3276 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3277 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3278 tcg_gen_helper_0_3(sse_op2
, cpu_ptr0
, cpu_ptr1
, tcg_const_i32(val
));
3282 val
= ldub_code(s
->pc
++);
3285 sse_op2
= sse_op_table4
[val
][b1
];
3286 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3287 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3288 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3291 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3292 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3293 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3296 if (b
== 0x2e || b
== 0x2f) {
3297 /* just to keep the EFLAGS optimization correct */
3299 s
->cc_op
= CC_OP_EFLAGS
;
3304 /* convert one instruction. s->is_jmp is set if the translation must
3305 be stopped. Return the next pc value */
3306 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
3308 int b
, prefixes
, aflag
, dflag
;
3310 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
3311 target_ulong next_eip
, tval
;
3321 #ifdef TARGET_X86_64
3326 s
->rip_offset
= 0; /* for relative ip address */
3328 b
= ldub_code(s
->pc
);
3330 /* check prefixes */
3331 #ifdef TARGET_X86_64
3335 prefixes
|= PREFIX_REPZ
;
3338 prefixes
|= PREFIX_REPNZ
;
3341 prefixes
|= PREFIX_LOCK
;
3362 prefixes
|= PREFIX_DATA
;
3365 prefixes
|= PREFIX_ADR
;
3369 rex_w
= (b
>> 3) & 1;
3370 rex_r
= (b
& 0x4) << 1;
3371 s
->rex_x
= (b
& 0x2) << 2;
3372 REX_B(s
) = (b
& 0x1) << 3;
3373 x86_64_hregs
= 1; /* select uniform byte register addressing */
3377 /* 0x66 is ignored if rex.w is set */
3380 if (prefixes
& PREFIX_DATA
)
3383 if (!(prefixes
& PREFIX_ADR
))
3390 prefixes
|= PREFIX_REPZ
;
3393 prefixes
|= PREFIX_REPNZ
;
3396 prefixes
|= PREFIX_LOCK
;
3417 prefixes
|= PREFIX_DATA
;
3420 prefixes
|= PREFIX_ADR
;
3423 if (prefixes
& PREFIX_DATA
)
3425 if (prefixes
& PREFIX_ADR
)
3429 s
->prefix
= prefixes
;
3433 /* lock generation */
3434 if (prefixes
& PREFIX_LOCK
)
3437 /* now check op code */
3441 /**************************/
3442 /* extended op code */
3443 b
= ldub_code(s
->pc
++) | 0x100;
3446 /**************************/
3464 ot
= dflag
+ OT_WORD
;
3467 case 0: /* OP Ev, Gv */
3468 modrm
= ldub_code(s
->pc
++);
3469 reg
= ((modrm
>> 3) & 7) | rex_r
;
3470 mod
= (modrm
>> 6) & 3;
3471 rm
= (modrm
& 7) | REX_B(s
);
3473 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3475 } else if (op
== OP_XORL
&& rm
== reg
) {
3477 /* xor reg, reg optimisation */
3479 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3480 gen_op_mov_reg_T0(ot
, reg
);
3481 gen_op_update1_cc();
3486 gen_op_mov_TN_reg(ot
, 1, reg
);
3487 gen_op(s
, op
, ot
, opreg
);
3489 case 1: /* OP Gv, Ev */
3490 modrm
= ldub_code(s
->pc
++);
3491 mod
= (modrm
>> 6) & 3;
3492 reg
= ((modrm
>> 3) & 7) | rex_r
;
3493 rm
= (modrm
& 7) | REX_B(s
);
3495 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3496 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3497 } else if (op
== OP_XORL
&& rm
== reg
) {
3500 gen_op_mov_TN_reg(ot
, 1, rm
);
3502 gen_op(s
, op
, ot
, reg
);
3504 case 2: /* OP A, Iv */
3505 val
= insn_get(s
, ot
);
3506 gen_op_movl_T1_im(val
);
3507 gen_op(s
, op
, ot
, OR_EAX
);
3513 case 0x80: /* GRP1 */
3523 ot
= dflag
+ OT_WORD
;
3525 modrm
= ldub_code(s
->pc
++);
3526 mod
= (modrm
>> 6) & 3;
3527 rm
= (modrm
& 7) | REX_B(s
);
3528 op
= (modrm
>> 3) & 7;
3534 s
->rip_offset
= insn_const_size(ot
);
3535 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3546 val
= insn_get(s
, ot
);
3549 val
= (int8_t)insn_get(s
, OT_BYTE
);
3552 gen_op_movl_T1_im(val
);
3553 gen_op(s
, op
, ot
, opreg
);
3557 /**************************/
3558 /* inc, dec, and other misc arith */
3559 case 0x40 ... 0x47: /* inc Gv */
3560 ot
= dflag
? OT_LONG
: OT_WORD
;
3561 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
3563 case 0x48 ... 0x4f: /* dec Gv */
3564 ot
= dflag
? OT_LONG
: OT_WORD
;
3565 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
3567 case 0xf6: /* GRP3 */
3572 ot
= dflag
+ OT_WORD
;
3574 modrm
= ldub_code(s
->pc
++);
3575 mod
= (modrm
>> 6) & 3;
3576 rm
= (modrm
& 7) | REX_B(s
);
3577 op
= (modrm
>> 3) & 7;
3580 s
->rip_offset
= insn_const_size(ot
);
3581 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3582 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3584 gen_op_mov_TN_reg(ot
, 0, rm
);
3589 val
= insn_get(s
, ot
);
3590 gen_op_movl_T1_im(val
);
3591 gen_op_testl_T0_T1_cc();
3592 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3597 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3599 gen_op_mov_reg_T0(ot
, rm
);
3605 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3607 gen_op_mov_reg_T0(ot
, rm
);
3609 gen_op_update_neg_cc();
3610 s
->cc_op
= CC_OP_SUBB
+ ot
;
3615 gen_op_mulb_AL_T0();
3616 s
->cc_op
= CC_OP_MULB
;
3619 gen_op_mulw_AX_T0();
3620 s
->cc_op
= CC_OP_MULW
;
3624 gen_op_mull_EAX_T0();
3625 s
->cc_op
= CC_OP_MULL
;
3627 #ifdef TARGET_X86_64
3629 gen_op_mulq_EAX_T0();
3630 s
->cc_op
= CC_OP_MULQ
;
3638 gen_op_imulb_AL_T0();
3639 s
->cc_op
= CC_OP_MULB
;
3642 gen_op_imulw_AX_T0();
3643 s
->cc_op
= CC_OP_MULW
;
3647 gen_op_imull_EAX_T0();
3648 s
->cc_op
= CC_OP_MULL
;
3650 #ifdef TARGET_X86_64
3652 gen_op_imulq_EAX_T0();
3653 s
->cc_op
= CC_OP_MULQ
;
3661 gen_jmp_im(pc_start
- s
->cs_base
);
3662 gen_op_divb_AL_T0();
3665 gen_jmp_im(pc_start
- s
->cs_base
);
3666 gen_op_divw_AX_T0();
3670 gen_jmp_im(pc_start
- s
->cs_base
);
3672 /* XXX: this is just a test */
3673 tcg_gen_macro_2(cpu_T
[0], cpu_T
[0], MACRO_TEST
);
3675 tcg_gen_helper_0_1(helper_divl_EAX_T0
, cpu_T
[0]);
3678 #ifdef TARGET_X86_64
3680 gen_jmp_im(pc_start
- s
->cs_base
);
3681 gen_op_divq_EAX_T0();
3689 gen_jmp_im(pc_start
- s
->cs_base
);
3690 gen_op_idivb_AL_T0();
3693 gen_jmp_im(pc_start
- s
->cs_base
);
3694 gen_op_idivw_AX_T0();
3698 gen_jmp_im(pc_start
- s
->cs_base
);
3699 tcg_gen_helper_0_1(helper_idivl_EAX_T0
, cpu_T
[0]);
3701 #ifdef TARGET_X86_64
3703 gen_jmp_im(pc_start
- s
->cs_base
);
3704 gen_op_idivq_EAX_T0();
3714 case 0xfe: /* GRP4 */
3715 case 0xff: /* GRP5 */
3719 ot
= dflag
+ OT_WORD
;
3721 modrm
= ldub_code(s
->pc
++);
3722 mod
= (modrm
>> 6) & 3;
3723 rm
= (modrm
& 7) | REX_B(s
);
3724 op
= (modrm
>> 3) & 7;
3725 if (op
>= 2 && b
== 0xfe) {
3729 if (op
== 2 || op
== 4) {
3730 /* operand size for jumps is 64 bit */
3732 } else if (op
== 3 || op
== 5) {
3733 /* for call calls, the operand is 16 or 32 bit, even
3735 ot
= dflag
? OT_LONG
: OT_WORD
;
3736 } else if (op
== 6) {
3737 /* default push size is 64 bit */
3738 ot
= dflag
? OT_QUAD
: OT_WORD
;
3742 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3743 if (op
>= 2 && op
!= 3 && op
!= 5)
3744 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3746 gen_op_mov_TN_reg(ot
, 0, rm
);
3750 case 0: /* inc Ev */
3755 gen_inc(s
, ot
, opreg
, 1);
3757 case 1: /* dec Ev */
3762 gen_inc(s
, ot
, opreg
, -1);
3764 case 2: /* call Ev */
3765 /* XXX: optimize if memory (no 'and' is necessary) */
3767 gen_op_andl_T0_ffff();
3768 next_eip
= s
->pc
- s
->cs_base
;
3769 gen_movtl_T1_im(next_eip
);
3774 case 3: /* lcall Ev */
3775 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3776 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3777 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
3779 if (s
->pe
&& !s
->vm86
) {
3780 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3781 gen_op_set_cc_op(s
->cc_op
);
3782 gen_jmp_im(pc_start
- s
->cs_base
);
3783 gen_op_lcall_protected_T0_T1(dflag
, s
->pc
- pc_start
);
3785 gen_op_lcall_real_T0_T1(dflag
, s
->pc
- s
->cs_base
);
3789 case 4: /* jmp Ev */
3791 gen_op_andl_T0_ffff();
3795 case 5: /* ljmp Ev */
3796 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3797 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3798 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
3800 if (s
->pe
&& !s
->vm86
) {
3801 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3802 gen_op_set_cc_op(s
->cc_op
);
3803 gen_jmp_im(pc_start
- s
->cs_base
);
3804 gen_op_ljmp_protected_T0_T1(s
->pc
- pc_start
);
3806 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
3807 gen_op_movl_T0_T1();
3812 case 6: /* push Ev */
3820 case 0x84: /* test Ev, Gv */
3825 ot
= dflag
+ OT_WORD
;
3827 modrm
= ldub_code(s
->pc
++);
3828 mod
= (modrm
>> 6) & 3;
3829 rm
= (modrm
& 7) | REX_B(s
);
3830 reg
= ((modrm
>> 3) & 7) | rex_r
;
3832 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3833 gen_op_mov_TN_reg(ot
, 1, reg
);
3834 gen_op_testl_T0_T1_cc();
3835 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3838 case 0xa8: /* test eAX, Iv */
3843 ot
= dflag
+ OT_WORD
;
3844 val
= insn_get(s
, ot
);
3846 gen_op_mov_TN_reg(ot
, 0, OR_EAX
);
3847 gen_op_movl_T1_im(val
);
3848 gen_op_testl_T0_T1_cc();
3849 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3852 case 0x98: /* CWDE/CBW */
3853 #ifdef TARGET_X86_64
3855 gen_op_movslq_RAX_EAX();
3859 gen_op_movswl_EAX_AX();
3861 gen_op_movsbw_AX_AL();
3863 case 0x99: /* CDQ/CWD */
3864 #ifdef TARGET_X86_64
3866 gen_op_movsqo_RDX_RAX();
3870 gen_op_movslq_EDX_EAX();
3872 gen_op_movswl_DX_AX();
3874 case 0x1af: /* imul Gv, Ev */
3875 case 0x69: /* imul Gv, Ev, I */
3877 ot
= dflag
+ OT_WORD
;
3878 modrm
= ldub_code(s
->pc
++);
3879 reg
= ((modrm
>> 3) & 7) | rex_r
;
3881 s
->rip_offset
= insn_const_size(ot
);
3884 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3886 val
= insn_get(s
, ot
);
3887 gen_op_movl_T1_im(val
);
3888 } else if (b
== 0x6b) {
3889 val
= (int8_t)insn_get(s
, OT_BYTE
);
3890 gen_op_movl_T1_im(val
);
3892 gen_op_mov_TN_reg(ot
, 1, reg
);
3895 #ifdef TARGET_X86_64
3896 if (ot
== OT_QUAD
) {
3897 gen_op_imulq_T0_T1();
3900 if (ot
== OT_LONG
) {
3901 gen_op_imull_T0_T1();
3903 gen_op_imulw_T0_T1();
3905 gen_op_mov_reg_T0(ot
, reg
);
3906 s
->cc_op
= CC_OP_MULB
+ ot
;
3909 case 0x1c1: /* xadd Ev, Gv */
3913 ot
= dflag
+ OT_WORD
;
3914 modrm
= ldub_code(s
->pc
++);
3915 reg
= ((modrm
>> 3) & 7) | rex_r
;
3916 mod
= (modrm
>> 6) & 3;
3918 rm
= (modrm
& 7) | REX_B(s
);
3919 gen_op_mov_TN_reg(ot
, 0, reg
);
3920 gen_op_mov_TN_reg(ot
, 1, rm
);
3921 gen_op_addl_T0_T1();
3922 gen_op_mov_reg_T1(ot
, reg
);
3923 gen_op_mov_reg_T0(ot
, rm
);
3925 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3926 gen_op_mov_TN_reg(ot
, 0, reg
);
3927 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3928 gen_op_addl_T0_T1();
3929 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3930 gen_op_mov_reg_T1(ot
, reg
);
3932 gen_op_update2_cc();
3933 s
->cc_op
= CC_OP_ADDB
+ ot
;
3936 case 0x1b1: /* cmpxchg Ev, Gv */
3940 ot
= dflag
+ OT_WORD
;
3941 modrm
= ldub_code(s
->pc
++);
3942 reg
= ((modrm
>> 3) & 7) | rex_r
;
3943 mod
= (modrm
>> 6) & 3;
3944 gen_op_mov_TN_reg(ot
, 1, reg
);
3946 rm
= (modrm
& 7) | REX_B(s
);
3947 gen_op_mov_TN_reg(ot
, 0, rm
);
3948 gen_op_cmpxchg_T0_T1_EAX_cc
[ot
]();
3949 gen_op_mov_reg_T0(ot
, rm
);
3951 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3952 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3953 gen_op_cmpxchg_mem_T0_T1_EAX_cc
[ot
+ s
->mem_index
]();
3955 s
->cc_op
= CC_OP_SUBB
+ ot
;
3957 case 0x1c7: /* cmpxchg8b */
3958 modrm
= ldub_code(s
->pc
++);
3959 mod
= (modrm
>> 6) & 3;
3960 if ((mod
== 3) || ((modrm
& 0x38) != 0x8))
3962 gen_jmp_im(pc_start
- s
->cs_base
);
3963 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3964 gen_op_set_cc_op(s
->cc_op
);
3965 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3967 s
->cc_op
= CC_OP_EFLAGS
;
3970 /**************************/
3972 case 0x50 ... 0x57: /* push */
3973 gen_op_mov_TN_reg(OT_LONG
, 0, (b
& 7) | REX_B(s
));
3976 case 0x58 ... 0x5f: /* pop */
3978 ot
= dflag
? OT_QUAD
: OT_WORD
;
3980 ot
= dflag
+ OT_WORD
;
3983 /* NOTE: order is important for pop %sp */
3985 gen_op_mov_reg_T0(ot
, (b
& 7) | REX_B(s
));
3987 case 0x60: /* pusha */
3992 case 0x61: /* popa */
3997 case 0x68: /* push Iv */
4000 ot
= dflag
? OT_QUAD
: OT_WORD
;
4002 ot
= dflag
+ OT_WORD
;
4005 val
= insn_get(s
, ot
);
4007 val
= (int8_t)insn_get(s
, OT_BYTE
);
4008 gen_op_movl_T0_im(val
);
4011 case 0x8f: /* pop Ev */
4013 ot
= dflag
? OT_QUAD
: OT_WORD
;
4015 ot
= dflag
+ OT_WORD
;
4017 modrm
= ldub_code(s
->pc
++);
4018 mod
= (modrm
>> 6) & 3;
4021 /* NOTE: order is important for pop %sp */
4023 rm
= (modrm
& 7) | REX_B(s
);
4024 gen_op_mov_reg_T0(ot
, rm
);
4026 /* NOTE: order is important too for MMU exceptions */
4027 s
->popl_esp_hack
= 1 << ot
;
4028 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4029 s
->popl_esp_hack
= 0;
4033 case 0xc8: /* enter */
4036 val
= lduw_code(s
->pc
);
4038 level
= ldub_code(s
->pc
++);
4039 gen_enter(s
, val
, level
);
4042 case 0xc9: /* leave */
4043 /* XXX: exception not precise (ESP is updated before potential exception) */
4045 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EBP
);
4046 gen_op_mov_reg_T0(OT_QUAD
, R_ESP
);
4047 } else if (s
->ss32
) {
4048 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
4049 gen_op_mov_reg_T0(OT_LONG
, R_ESP
);
4051 gen_op_mov_TN_reg(OT_WORD
, 0, R_EBP
);
4052 gen_op_mov_reg_T0(OT_WORD
, R_ESP
);
4056 ot
= dflag
? OT_QUAD
: OT_WORD
;
4058 ot
= dflag
+ OT_WORD
;
4060 gen_op_mov_reg_T0(ot
, R_EBP
);
4063 case 0x06: /* push es */
4064 case 0x0e: /* push cs */
4065 case 0x16: /* push ss */
4066 case 0x1e: /* push ds */
4069 gen_op_movl_T0_seg(b
>> 3);
4072 case 0x1a0: /* push fs */
4073 case 0x1a8: /* push gs */
4074 gen_op_movl_T0_seg((b
>> 3) & 7);
4077 case 0x07: /* pop es */
4078 case 0x17: /* pop ss */
4079 case 0x1f: /* pop ds */
4084 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4087 /* if reg == SS, inhibit interrupts/trace. */
4088 /* If several instructions disable interrupts, only the
4090 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4091 gen_op_set_inhibit_irq();
4095 gen_jmp_im(s
->pc
- s
->cs_base
);
4099 case 0x1a1: /* pop fs */
4100 case 0x1a9: /* pop gs */
4102 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
4105 gen_jmp_im(s
->pc
- s
->cs_base
);
4110 /**************************/
4113 case 0x89: /* mov Gv, Ev */
4117 ot
= dflag
+ OT_WORD
;
4118 modrm
= ldub_code(s
->pc
++);
4119 reg
= ((modrm
>> 3) & 7) | rex_r
;
4121 /* generate a generic store */
4122 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
4125 case 0xc7: /* mov Ev, Iv */
4129 ot
= dflag
+ OT_WORD
;
4130 modrm
= ldub_code(s
->pc
++);
4131 mod
= (modrm
>> 6) & 3;
4133 s
->rip_offset
= insn_const_size(ot
);
4134 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4136 val
= insn_get(s
, ot
);
4137 gen_op_movl_T0_im(val
);
4139 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4141 gen_op_mov_reg_T0(ot
, (modrm
& 7) | REX_B(s
));
4144 case 0x8b: /* mov Ev, Gv */
4148 ot
= OT_WORD
+ dflag
;
4149 modrm
= ldub_code(s
->pc
++);
4150 reg
= ((modrm
>> 3) & 7) | rex_r
;
4152 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4153 gen_op_mov_reg_T0(ot
, reg
);
4155 case 0x8e: /* mov seg, Gv */
4156 modrm
= ldub_code(s
->pc
++);
4157 reg
= (modrm
>> 3) & 7;
4158 if (reg
>= 6 || reg
== R_CS
)
4160 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
4161 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4163 /* if reg == SS, inhibit interrupts/trace */
4164 /* If several instructions disable interrupts, only the
4166 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4167 gen_op_set_inhibit_irq();
4171 gen_jmp_im(s
->pc
- s
->cs_base
);
4175 case 0x8c: /* mov Gv, seg */
4176 modrm
= ldub_code(s
->pc
++);
4177 reg
= (modrm
>> 3) & 7;
4178 mod
= (modrm
>> 6) & 3;
4181 gen_op_movl_T0_seg(reg
);
4183 ot
= OT_WORD
+ dflag
;
4186 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4189 case 0x1b6: /* movzbS Gv, Eb */
4190 case 0x1b7: /* movzwS Gv, Eb */
4191 case 0x1be: /* movsbS Gv, Eb */
4192 case 0x1bf: /* movswS Gv, Eb */
4195 /* d_ot is the size of destination */
4196 d_ot
= dflag
+ OT_WORD
;
4197 /* ot is the size of source */
4198 ot
= (b
& 1) + OT_BYTE
;
4199 modrm
= ldub_code(s
->pc
++);
4200 reg
= ((modrm
>> 3) & 7) | rex_r
;
4201 mod
= (modrm
>> 6) & 3;
4202 rm
= (modrm
& 7) | REX_B(s
);
4205 gen_op_mov_TN_reg(ot
, 0, rm
);
4206 switch(ot
| (b
& 8)) {
4208 gen_op_movzbl_T0_T0();
4211 gen_op_movsbl_T0_T0();
4214 gen_op_movzwl_T0_T0();
4218 gen_op_movswl_T0_T0();
4221 gen_op_mov_reg_T0(d_ot
, reg
);
4223 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4225 gen_op_lds_T0_A0(ot
+ s
->mem_index
);
4227 gen_op_ldu_T0_A0(ot
+ s
->mem_index
);
4229 gen_op_mov_reg_T0(d_ot
, reg
);
4234 case 0x8d: /* lea */
4235 ot
= dflag
+ OT_WORD
;
4236 modrm
= ldub_code(s
->pc
++);
4237 mod
= (modrm
>> 6) & 3;
4240 reg
= ((modrm
>> 3) & 7) | rex_r
;
4241 /* we must ensure that no segment is added */
4245 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4247 gen_op_mov_reg_A0(ot
- OT_WORD
, reg
);
4250 case 0xa0: /* mov EAX, Ov */
4252 case 0xa2: /* mov Ov, EAX */
4255 target_ulong offset_addr
;
4260 ot
= dflag
+ OT_WORD
;
4261 #ifdef TARGET_X86_64
4262 if (s
->aflag
== 2) {
4263 offset_addr
= ldq_code(s
->pc
);
4265 gen_op_movq_A0_im(offset_addr
);
4270 offset_addr
= insn_get(s
, OT_LONG
);
4272 offset_addr
= insn_get(s
, OT_WORD
);
4274 gen_op_movl_A0_im(offset_addr
);
4276 gen_add_A0_ds_seg(s
);
4278 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4279 gen_op_mov_reg_T0(ot
, R_EAX
);
4281 gen_op_mov_TN_reg(ot
, 0, R_EAX
);
4282 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4286 case 0xd7: /* xlat */
4287 #ifdef TARGET_X86_64
4288 if (s
->aflag
== 2) {
4289 gen_op_movq_A0_reg(R_EBX
);
4290 gen_op_addq_A0_AL();
4294 gen_op_movl_A0_reg(R_EBX
);
4295 gen_op_addl_A0_AL();
4297 gen_op_andl_A0_ffff();
4299 gen_add_A0_ds_seg(s
);
4300 gen_op_ldu_T0_A0(OT_BYTE
+ s
->mem_index
);
4301 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
4303 case 0xb0 ... 0xb7: /* mov R, Ib */
4304 val
= insn_get(s
, OT_BYTE
);
4305 gen_op_movl_T0_im(val
);
4306 gen_op_mov_reg_T0(OT_BYTE
, (b
& 7) | REX_B(s
));
4308 case 0xb8 ... 0xbf: /* mov R, Iv */
4309 #ifdef TARGET_X86_64
4313 tmp
= ldq_code(s
->pc
);
4315 reg
= (b
& 7) | REX_B(s
);
4316 gen_movtl_T0_im(tmp
);
4317 gen_op_mov_reg_T0(OT_QUAD
, reg
);
4321 ot
= dflag
? OT_LONG
: OT_WORD
;
4322 val
= insn_get(s
, ot
);
4323 reg
= (b
& 7) | REX_B(s
);
4324 gen_op_movl_T0_im(val
);
4325 gen_op_mov_reg_T0(ot
, reg
);
4329 case 0x91 ... 0x97: /* xchg R, EAX */
4330 ot
= dflag
+ OT_WORD
;
4331 reg
= (b
& 7) | REX_B(s
);
4335 case 0x87: /* xchg Ev, Gv */
4339 ot
= dflag
+ OT_WORD
;
4340 modrm
= ldub_code(s
->pc
++);
4341 reg
= ((modrm
>> 3) & 7) | rex_r
;
4342 mod
= (modrm
>> 6) & 3;
4344 rm
= (modrm
& 7) | REX_B(s
);
4346 gen_op_mov_TN_reg(ot
, 0, reg
);
4347 gen_op_mov_TN_reg(ot
, 1, rm
);
4348 gen_op_mov_reg_T0(ot
, rm
);
4349 gen_op_mov_reg_T1(ot
, reg
);
4351 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4352 gen_op_mov_TN_reg(ot
, 0, reg
);
4353 /* for xchg, lock is implicit */
4354 if (!(prefixes
& PREFIX_LOCK
))
4356 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4357 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4358 if (!(prefixes
& PREFIX_LOCK
))
4360 gen_op_mov_reg_T1(ot
, reg
);
4363 case 0xc4: /* les Gv */
4368 case 0xc5: /* lds Gv */
4373 case 0x1b2: /* lss Gv */
4376 case 0x1b4: /* lfs Gv */
4379 case 0x1b5: /* lgs Gv */
4382 ot
= dflag
? OT_LONG
: OT_WORD
;
4383 modrm
= ldub_code(s
->pc
++);
4384 reg
= ((modrm
>> 3) & 7) | rex_r
;
4385 mod
= (modrm
>> 6) & 3;
4388 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4389 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4390 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4391 /* load the segment first to handle exceptions properly */
4392 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4393 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
4394 /* then put the data */
4395 gen_op_mov_reg_T1(ot
, reg
);
4397 gen_jmp_im(s
->pc
- s
->cs_base
);
4402 /************************/
4413 ot
= dflag
+ OT_WORD
;
4415 modrm
= ldub_code(s
->pc
++);
4416 mod
= (modrm
>> 6) & 3;
4417 op
= (modrm
>> 3) & 7;
4423 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4426 opreg
= (modrm
& 7) | REX_B(s
);
4431 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
4434 shift
= ldub_code(s
->pc
++);
4436 gen_shifti(s
, op
, ot
, opreg
, shift
);
4451 case 0x1a4: /* shld imm */
4455 case 0x1a5: /* shld cl */
4459 case 0x1ac: /* shrd imm */
4463 case 0x1ad: /* shrd cl */
4467 ot
= dflag
+ OT_WORD
;
4468 modrm
= ldub_code(s
->pc
++);
4469 mod
= (modrm
>> 6) & 3;
4470 rm
= (modrm
& 7) | REX_B(s
);
4471 reg
= ((modrm
>> 3) & 7) | rex_r
;
4474 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4475 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4477 gen_op_mov_TN_reg(ot
, 0, rm
);
4479 gen_op_mov_TN_reg(ot
, 1, reg
);
4482 val
= ldub_code(s
->pc
++);
4489 gen_op_shiftd_T0_T1_im_cc
[ot
][op
](val
);
4491 gen_op_shiftd_mem_T0_T1_im_cc
[ot
+ s
->mem_index
][op
](val
);
4492 if (op
== 0 && ot
!= OT_WORD
)
4493 s
->cc_op
= CC_OP_SHLB
+ ot
;
4495 s
->cc_op
= CC_OP_SARB
+ ot
;
4498 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4499 gen_op_set_cc_op(s
->cc_op
);
4501 gen_op_shiftd_T0_T1_ECX_cc
[ot
][op
]();
4503 gen_op_shiftd_mem_T0_T1_ECX_cc
[ot
+ s
->mem_index
][op
]();
4504 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
4507 gen_op_mov_reg_T0(ot
, rm
);
4511 /************************/
4514 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
4515 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4516 /* XXX: what to do if illegal op ? */
4517 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
4520 modrm
= ldub_code(s
->pc
++);
4521 mod
= (modrm
>> 6) & 3;
4523 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
4526 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4528 case 0x00 ... 0x07: /* fxxxs */
4529 case 0x10 ... 0x17: /* fixxxl */
4530 case 0x20 ... 0x27: /* fxxxl */
4531 case 0x30 ... 0x37: /* fixxx */
4538 gen_op_ld_T0_A0(OT_LONG
);
4539 tcg_gen_trunc_tl_i32(cpu_tmp2
, cpu_T
[0]);
4540 tcg_gen_helper_0_1(helper_flds_FT0
, cpu_tmp2
);
4543 gen_op_ld_T0_A0(OT_LONG
);
4544 tcg_gen_trunc_tl_i32(cpu_tmp2
, cpu_T
[0]);
4545 tcg_gen_helper_0_1(helper_fildl_FT0
, cpu_tmp2
);
4548 tcg_gen_qemu_ld64(cpu_tmp1
, cpu_A0
,
4549 (s
->mem_index
>> 2) - 1);
4550 tcg_gen_helper_0_1(helper_fldl_FT0
, cpu_tmp1
);
4554 gen_op_ld_T0_A0(OT_WORD
);
4555 tcg_gen_trunc_tl_i32(cpu_tmp2
, cpu_T
[0]);
4556 tcg_gen_helper_0_1(helper_fildl_FT0
, cpu_tmp2
);
4560 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0
[op1
]);
4562 /* fcomp needs pop */
4563 tcg_gen_helper_0_0(helper_fpop
);
4567 case 0x08: /* flds */
4568 case 0x0a: /* fsts */
4569 case 0x0b: /* fstps */
4570 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4571 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4572 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4577 gen_op_ld_T0_A0(OT_LONG
);
4578 tcg_gen_trunc_tl_i32(cpu_tmp2
, cpu_T
[0]);
4579 tcg_gen_helper_0_1(helper_flds_ST0
, cpu_tmp2
);
4582 gen_op_ld_T0_A0(OT_LONG
);
4583 tcg_gen_trunc_tl_i32(cpu_tmp2
, cpu_T
[0]);
4584 tcg_gen_helper_0_1(helper_fildl_ST0
, cpu_tmp2
);
4587 tcg_gen_qemu_ld64(cpu_tmp1
, cpu_A0
,
4588 (s
->mem_index
>> 2) - 1);
4589 tcg_gen_helper_0_1(helper_fldl_ST0
, cpu_tmp1
);
4593 gen_op_ld_T0_A0(OT_WORD
);
4594 tcg_gen_trunc_tl_i32(cpu_tmp2
, cpu_T
[0]);
4595 tcg_gen_helper_0_1(helper_fildl_ST0
, cpu_tmp2
);
4600 /* XXX: the corresponding CPUID bit must be tested ! */
4603 tcg_gen_helper_1_0(helper_fisttl_ST0
, cpu_tmp2
);
4604 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2
);
4605 gen_op_st_T0_A0(OT_LONG
);
4608 tcg_gen_helper_1_0(helper_fisttll_ST0
, cpu_tmp1
);
4609 tcg_gen_qemu_st64(cpu_tmp1
, cpu_A0
,
4610 (s
->mem_index
>> 2) - 1);
4614 tcg_gen_helper_1_0(helper_fistt_ST0
, cpu_tmp2
);
4615 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2
);
4616 gen_op_st_T0_A0(OT_WORD
);
4619 tcg_gen_helper_0_0(helper_fpop
);
4624 tcg_gen_helper_1_0(helper_fsts_ST0
, cpu_tmp2
);
4625 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2
);
4626 gen_op_st_T0_A0(OT_LONG
);
4629 tcg_gen_helper_1_0(helper_fistl_ST0
, cpu_tmp2
);
4630 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2
);
4631 gen_op_st_T0_A0(OT_LONG
);
4634 tcg_gen_helper_1_0(helper_fstl_ST0
, cpu_tmp1
);
4635 tcg_gen_qemu_st64(cpu_tmp1
, cpu_A0
,
4636 (s
->mem_index
>> 2) - 1);
4640 tcg_gen_helper_1_0(helper_fist_ST0
, cpu_tmp2
);
4641 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2
);
4642 gen_op_st_T0_A0(OT_WORD
);
4646 tcg_gen_helper_0_0(helper_fpop
);
4650 case 0x0c: /* fldenv mem */
4651 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4652 gen_op_set_cc_op(s
->cc_op
);
4653 gen_jmp_im(pc_start
- s
->cs_base
);
4654 tcg_gen_helper_0_2(helper_fldenv
,
4655 cpu_A0
, tcg_const_i32(s
->dflag
));
4657 case 0x0d: /* fldcw mem */
4658 gen_op_ld_T0_A0(OT_WORD
+ s
->mem_index
);
4659 tcg_gen_trunc_tl_i32(cpu_tmp2
, cpu_T
[0]);
4660 tcg_gen_helper_0_1(helper_fldcw
, cpu_tmp2
);
4662 case 0x0e: /* fnstenv mem */
4663 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4664 gen_op_set_cc_op(s
->cc_op
);
4665 gen_jmp_im(pc_start
- s
->cs_base
);
4666 tcg_gen_helper_0_2(helper_fstenv
,
4667 cpu_A0
, tcg_const_i32(s
->dflag
));
4669 case 0x0f: /* fnstcw mem */
4670 tcg_gen_helper_1_0(helper_fnstcw
, cpu_tmp2
);
4671 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2
);
4672 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
4674 case 0x1d: /* fldt mem */
4675 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4676 gen_op_set_cc_op(s
->cc_op
);
4677 gen_jmp_im(pc_start
- s
->cs_base
);
4678 tcg_gen_helper_0_1(helper_fldt_ST0
, cpu_A0
);
4680 case 0x1f: /* fstpt mem */
4681 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4682 gen_op_set_cc_op(s
->cc_op
);
4683 gen_jmp_im(pc_start
- s
->cs_base
);
4684 tcg_gen_helper_0_1(helper_fstt_ST0
, cpu_A0
);
4685 tcg_gen_helper_0_0(helper_fpop
);
4687 case 0x2c: /* frstor mem */
4688 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4689 gen_op_set_cc_op(s
->cc_op
);
4690 gen_jmp_im(pc_start
- s
->cs_base
);
4691 tcg_gen_helper_0_2(helper_frstor
,
4692 cpu_A0
, tcg_const_i32(s
->dflag
));
4694 case 0x2e: /* fnsave mem */
4695 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4696 gen_op_set_cc_op(s
->cc_op
);
4697 gen_jmp_im(pc_start
- s
->cs_base
);
4698 tcg_gen_helper_0_2(helper_fsave
,
4699 cpu_A0
, tcg_const_i32(s
->dflag
));
4701 case 0x2f: /* fnstsw mem */
4702 tcg_gen_helper_1_0(helper_fnstsw
, cpu_tmp2
);
4703 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2
);
4704 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
4706 case 0x3c: /* fbld */
4707 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4708 gen_op_set_cc_op(s
->cc_op
);
4709 gen_jmp_im(pc_start
- s
->cs_base
);
4710 tcg_gen_helper_0_1(helper_fbld_ST0
, cpu_A0
);
4712 case 0x3e: /* fbstp */
4713 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4714 gen_op_set_cc_op(s
->cc_op
);
4715 gen_jmp_im(pc_start
- s
->cs_base
);
4716 tcg_gen_helper_0_1(helper_fbst_ST0
, cpu_A0
);
4717 tcg_gen_helper_0_0(helper_fpop
);
4719 case 0x3d: /* fildll */
4720 tcg_gen_qemu_ld64(cpu_tmp1
, cpu_A0
,
4721 (s
->mem_index
>> 2) - 1);
4722 tcg_gen_helper_0_1(helper_fildll_ST0
, cpu_tmp1
);
4724 case 0x3f: /* fistpll */
4725 tcg_gen_helper_1_0(helper_fistll_ST0
, cpu_tmp1
);
4726 tcg_gen_qemu_st64(cpu_tmp1
, cpu_A0
,
4727 (s
->mem_index
>> 2) - 1);
4728 tcg_gen_helper_0_0(helper_fpop
);
4734 /* register float ops */
4738 case 0x08: /* fld sti */
4739 tcg_gen_helper_0_0(helper_fpush
);
4740 tcg_gen_helper_0_1(helper_fmov_ST0_STN
, tcg_const_i32((opreg
+ 1) & 7));
4742 case 0x09: /* fxchg sti */
4743 case 0x29: /* fxchg4 sti, undocumented op */
4744 case 0x39: /* fxchg7 sti, undocumented op */
4745 tcg_gen_helper_0_1(helper_fxchg_ST0_STN
, tcg_const_i32(opreg
));
4747 case 0x0a: /* grp d9/2 */
4750 /* check exceptions (FreeBSD FPU probe) */
4751 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4752 gen_op_set_cc_op(s
->cc_op
);
4753 gen_jmp_im(pc_start
- s
->cs_base
);
4754 tcg_gen_helper_0_0(helper_fwait
);
4760 case 0x0c: /* grp d9/4 */
4763 tcg_gen_helper_0_0(helper_fchs_ST0
);
4766 tcg_gen_helper_0_0(helper_fabs_ST0
);
4769 tcg_gen_helper_0_0(helper_fldz_FT0
);
4770 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
4773 tcg_gen_helper_0_0(helper_fxam_ST0
);
4779 case 0x0d: /* grp d9/5 */
4783 tcg_gen_helper_0_0(helper_fpush
);
4784 tcg_gen_helper_0_0(helper_fld1_ST0
);
4787 tcg_gen_helper_0_0(helper_fpush
);
4788 tcg_gen_helper_0_0(helper_fldl2t_ST0
);
4791 tcg_gen_helper_0_0(helper_fpush
);
4792 tcg_gen_helper_0_0(helper_fldl2e_ST0
);
4795 tcg_gen_helper_0_0(helper_fpush
);
4796 tcg_gen_helper_0_0(helper_fldpi_ST0
);
4799 tcg_gen_helper_0_0(helper_fpush
);
4800 tcg_gen_helper_0_0(helper_fldlg2_ST0
);
4803 tcg_gen_helper_0_0(helper_fpush
);
4804 tcg_gen_helper_0_0(helper_fldln2_ST0
);
4807 tcg_gen_helper_0_0(helper_fpush
);
4808 tcg_gen_helper_0_0(helper_fldz_ST0
);
4815 case 0x0e: /* grp d9/6 */
4818 tcg_gen_helper_0_0(helper_f2xm1
);
4821 tcg_gen_helper_0_0(helper_fyl2x
);
4824 tcg_gen_helper_0_0(helper_fptan
);
4826 case 3: /* fpatan */
4827 tcg_gen_helper_0_0(helper_fpatan
);
4829 case 4: /* fxtract */
4830 tcg_gen_helper_0_0(helper_fxtract
);
4832 case 5: /* fprem1 */
4833 tcg_gen_helper_0_0(helper_fprem1
);
4835 case 6: /* fdecstp */
4836 tcg_gen_helper_0_0(helper_fdecstp
);
4839 case 7: /* fincstp */
4840 tcg_gen_helper_0_0(helper_fincstp
);
4844 case 0x0f: /* grp d9/7 */
4847 tcg_gen_helper_0_0(helper_fprem
);
4849 case 1: /* fyl2xp1 */
4850 tcg_gen_helper_0_0(helper_fyl2xp1
);
4853 tcg_gen_helper_0_0(helper_fsqrt
);
4855 case 3: /* fsincos */
4856 tcg_gen_helper_0_0(helper_fsincos
);
4858 case 5: /* fscale */
4859 tcg_gen_helper_0_0(helper_fscale
);
4861 case 4: /* frndint */
4862 tcg_gen_helper_0_0(helper_frndint
);
4865 tcg_gen_helper_0_0(helper_fsin
);
4869 tcg_gen_helper_0_0(helper_fcos
);
4873 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4874 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4875 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4881 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0
[op1
], tcg_const_i32(opreg
));
4883 tcg_gen_helper_0_0(helper_fpop
);
4885 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
4886 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0
[op1
]);
4890 case 0x02: /* fcom */
4891 case 0x22: /* fcom2, undocumented op */
4892 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
4893 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
4895 case 0x03: /* fcomp */
4896 case 0x23: /* fcomp3, undocumented op */
4897 case 0x32: /* fcomp5, undocumented op */
4898 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
4899 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
4900 tcg_gen_helper_0_0(helper_fpop
);
4902 case 0x15: /* da/5 */
4904 case 1: /* fucompp */
4905 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(1));
4906 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
4907 tcg_gen_helper_0_0(helper_fpop
);
4908 tcg_gen_helper_0_0(helper_fpop
);
4916 case 0: /* feni (287 only, just do nop here) */
4918 case 1: /* fdisi (287 only, just do nop here) */
4921 tcg_gen_helper_0_0(helper_fclex
);
4923 case 3: /* fninit */
4924 tcg_gen_helper_0_0(helper_fninit
);
4926 case 4: /* fsetpm (287 only, just do nop here) */
4932 case 0x1d: /* fucomi */
4933 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4934 gen_op_set_cc_op(s
->cc_op
);
4935 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
4936 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0
);
4937 gen_op_fcomi_dummy();
4938 s
->cc_op
= CC_OP_EFLAGS
;
4940 case 0x1e: /* fcomi */
4941 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4942 gen_op_set_cc_op(s
->cc_op
);
4943 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
4944 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0
);
4945 gen_op_fcomi_dummy();
4946 s
->cc_op
= CC_OP_EFLAGS
;
4948 case 0x28: /* ffree sti */
4949 tcg_gen_helper_0_1(helper_ffree_STN
, tcg_const_i32(opreg
));
4951 case 0x2a: /* fst sti */
4952 tcg_gen_helper_0_1(helper_fmov_STN_ST0
, tcg_const_i32(opreg
));
4954 case 0x2b: /* fstp sti */
4955 case 0x0b: /* fstp1 sti, undocumented op */
4956 case 0x3a: /* fstp8 sti, undocumented op */
4957 case 0x3b: /* fstp9 sti, undocumented op */
4958 tcg_gen_helper_0_1(helper_fmov_STN_ST0
, tcg_const_i32(opreg
));
4959 tcg_gen_helper_0_0(helper_fpop
);
4961 case 0x2c: /* fucom st(i) */
4962 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
4963 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
4965 case 0x2d: /* fucomp st(i) */
4966 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
4967 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
4968 tcg_gen_helper_0_0(helper_fpop
);
4970 case 0x33: /* de/3 */
4972 case 1: /* fcompp */
4973 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(1));
4974 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
4975 tcg_gen_helper_0_0(helper_fpop
);
4976 tcg_gen_helper_0_0(helper_fpop
);
4982 case 0x38: /* ffreep sti, undocumented op */
4983 tcg_gen_helper_0_1(helper_ffree_STN
, tcg_const_i32(opreg
));
4984 tcg_gen_helper_0_0(helper_fpop
);
4986 case 0x3c: /* df/4 */
4989 tcg_gen_helper_1_0(helper_fnstsw
, cpu_tmp2
);
4990 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2
);
4991 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4997 case 0x3d: /* fucomip */
4998 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4999 gen_op_set_cc_op(s
->cc_op
);
5000 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5001 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0
);
5002 tcg_gen_helper_0_0(helper_fpop
);
5003 gen_op_fcomi_dummy();
5004 s
->cc_op
= CC_OP_EFLAGS
;
5006 case 0x3e: /* fcomip */
5007 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5008 gen_op_set_cc_op(s
->cc_op
);
5009 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5010 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0
);
5011 tcg_gen_helper_0_0(helper_fpop
);
5012 gen_op_fcomi_dummy();
5013 s
->cc_op
= CC_OP_EFLAGS
;
5015 case 0x10 ... 0x13: /* fcmovxx */
5019 const static uint8_t fcmov_cc
[8] = {
5025 op1
= fcmov_cc
[op
& 3] | ((op
>> 3) & 1);
5027 l1
= gen_new_label();
5028 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T
[0], tcg_const_tl(0), l1
);
5029 tcg_gen_helper_0_1(helper_fmov_ST0_STN
, tcg_const_i32(opreg
));
5038 /************************/
5041 case 0xa4: /* movsS */
5046 ot
= dflag
+ OT_WORD
;
5048 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5049 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5055 case 0xaa: /* stosS */
5060 ot
= dflag
+ OT_WORD
;
5062 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5063 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5068 case 0xac: /* lodsS */
5073 ot
= dflag
+ OT_WORD
;
5074 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5075 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5080 case 0xae: /* scasS */
5085 ot
= dflag
+ OT_WORD
;
5086 if (prefixes
& PREFIX_REPNZ
) {
5087 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
5088 } else if (prefixes
& PREFIX_REPZ
) {
5089 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
5092 s
->cc_op
= CC_OP_SUBB
+ ot
;
5096 case 0xa6: /* cmpsS */
5101 ot
= dflag
+ OT_WORD
;
5102 if (prefixes
& PREFIX_REPNZ
) {
5103 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
5104 } else if (prefixes
& PREFIX_REPZ
) {
5105 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
5108 s
->cc_op
= CC_OP_SUBB
+ ot
;
5111 case 0x6c: /* insS */
5116 ot
= dflag
? OT_LONG
: OT_WORD
;
5117 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
5118 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5119 gen_op_andl_T0_ffff();
5120 if (gen_svm_check_io(s
, pc_start
,
5121 SVM_IOIO_TYPE_MASK
| (1 << (4+ot
)) |
5122 svm_is_rep(prefixes
) | 4 | (1 << (7+s
->aflag
))))
5124 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5125 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5130 case 0x6e: /* outsS */
5135 ot
= dflag
? OT_LONG
: OT_WORD
;
5136 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
5137 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5138 gen_op_andl_T0_ffff();
5139 if (gen_svm_check_io(s
, pc_start
,
5140 (1 << (4+ot
)) | svm_is_rep(prefixes
) |
5141 4 | (1 << (7+s
->aflag
))))
5143 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5144 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5150 /************************/
5158 ot
= dflag
? OT_LONG
: OT_WORD
;
5159 val
= ldub_code(s
->pc
++);
5160 gen_op_movl_T0_im(val
);
5161 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5162 if (gen_svm_check_io(s
, pc_start
,
5163 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) |
5167 gen_op_mov_reg_T1(ot
, R_EAX
);
5174 ot
= dflag
? OT_LONG
: OT_WORD
;
5175 val
= ldub_code(s
->pc
++);
5176 gen_op_movl_T0_im(val
);
5177 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5178 if (gen_svm_check_io(s
, pc_start
, svm_is_rep(prefixes
) |
5181 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5189 ot
= dflag
? OT_LONG
: OT_WORD
;
5190 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5191 gen_op_andl_T0_ffff();
5192 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5193 if (gen_svm_check_io(s
, pc_start
,
5194 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) |
5198 gen_op_mov_reg_T1(ot
, R_EAX
);
5205 ot
= dflag
? OT_LONG
: OT_WORD
;
5206 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5207 gen_op_andl_T0_ffff();
5208 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5209 if (gen_svm_check_io(s
, pc_start
,
5210 svm_is_rep(prefixes
) | (1 << (4+ot
))))
5212 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5216 /************************/
5218 case 0xc2: /* ret im */
5219 val
= ldsw_code(s
->pc
);
5222 if (CODE64(s
) && s
->dflag
)
5224 gen_stack_update(s
, val
+ (2 << s
->dflag
));
5226 gen_op_andl_T0_ffff();
5230 case 0xc3: /* ret */
5234 gen_op_andl_T0_ffff();
5238 case 0xca: /* lret im */
5239 val
= ldsw_code(s
->pc
);
5242 if (s
->pe
&& !s
->vm86
) {
5243 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5244 gen_op_set_cc_op(s
->cc_op
);
5245 gen_jmp_im(pc_start
- s
->cs_base
);
5246 gen_op_lret_protected(s
->dflag
, val
);
5250 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5252 gen_op_andl_T0_ffff();
5253 /* NOTE: keeping EIP updated is not a problem in case of
5257 gen_op_addl_A0_im(2 << s
->dflag
);
5258 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5259 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
5260 /* add stack offset */
5261 gen_stack_update(s
, val
+ (4 << s
->dflag
));
5265 case 0xcb: /* lret */
5268 case 0xcf: /* iret */
5269 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IRET
))
5273 gen_op_iret_real(s
->dflag
);
5274 s
->cc_op
= CC_OP_EFLAGS
;
5275 } else if (s
->vm86
) {
5277 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5279 gen_op_iret_real(s
->dflag
);
5280 s
->cc_op
= CC_OP_EFLAGS
;
5283 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5284 gen_op_set_cc_op(s
->cc_op
);
5285 gen_jmp_im(pc_start
- s
->cs_base
);
5286 gen_op_iret_protected(s
->dflag
, s
->pc
- s
->cs_base
);
5287 s
->cc_op
= CC_OP_EFLAGS
;
5291 case 0xe8: /* call im */
5294 tval
= (int32_t)insn_get(s
, OT_LONG
);
5296 tval
= (int16_t)insn_get(s
, OT_WORD
);
5297 next_eip
= s
->pc
- s
->cs_base
;
5301 gen_movtl_T0_im(next_eip
);
5306 case 0x9a: /* lcall im */
5308 unsigned int selector
, offset
;
5312 ot
= dflag
? OT_LONG
: OT_WORD
;
5313 offset
= insn_get(s
, ot
);
5314 selector
= insn_get(s
, OT_WORD
);
5316 gen_op_movl_T0_im(selector
);
5317 gen_op_movl_T1_imu(offset
);
5320 case 0xe9: /* jmp im */
5322 tval
= (int32_t)insn_get(s
, OT_LONG
);
5324 tval
= (int16_t)insn_get(s
, OT_WORD
);
5325 tval
+= s
->pc
- s
->cs_base
;
5330 case 0xea: /* ljmp im */
5332 unsigned int selector
, offset
;
5336 ot
= dflag
? OT_LONG
: OT_WORD
;
5337 offset
= insn_get(s
, ot
);
5338 selector
= insn_get(s
, OT_WORD
);
5340 gen_op_movl_T0_im(selector
);
5341 gen_op_movl_T1_imu(offset
);
5344 case 0xeb: /* jmp Jb */
5345 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5346 tval
+= s
->pc
- s
->cs_base
;
5351 case 0x70 ... 0x7f: /* jcc Jb */
5352 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5354 case 0x180 ... 0x18f: /* jcc Jv */
5356 tval
= (int32_t)insn_get(s
, OT_LONG
);
5358 tval
= (int16_t)insn_get(s
, OT_WORD
);
5361 next_eip
= s
->pc
- s
->cs_base
;
5365 gen_jcc(s
, b
, tval
, next_eip
);
5368 case 0x190 ... 0x19f: /* setcc Gv */
5369 modrm
= ldub_code(s
->pc
++);
5371 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
5373 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5374 ot
= dflag
+ OT_WORD
;
5375 modrm
= ldub_code(s
->pc
++);
5376 reg
= ((modrm
>> 3) & 7) | rex_r
;
5377 mod
= (modrm
>> 6) & 3;
5380 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5381 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
5383 rm
= (modrm
& 7) | REX_B(s
);
5384 gen_op_mov_TN_reg(ot
, 1, rm
);
5386 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
5389 /************************/
5391 case 0x9c: /* pushf */
5392 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PUSHF
))
5394 if (s
->vm86
&& s
->iopl
!= 3) {
5395 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5397 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5398 gen_op_set_cc_op(s
->cc_op
);
5399 gen_op_movl_T0_eflags();
5403 case 0x9d: /* popf */
5404 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_POPF
))
5406 if (s
->vm86
&& s
->iopl
!= 3) {
5407 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5412 gen_op_movl_eflags_T0_cpl0();
5414 gen_op_movw_eflags_T0_cpl0();
5417 if (s
->cpl
<= s
->iopl
) {
5419 gen_op_movl_eflags_T0_io();
5421 gen_op_movw_eflags_T0_io();
5425 gen_op_movl_eflags_T0();
5427 gen_op_movw_eflags_T0();
5432 s
->cc_op
= CC_OP_EFLAGS
;
5433 /* abort translation because TF flag may change */
5434 gen_jmp_im(s
->pc
- s
->cs_base
);
5438 case 0x9e: /* sahf */
5441 gen_op_mov_TN_reg(OT_BYTE
, 0, R_AH
);
5442 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5443 gen_op_set_cc_op(s
->cc_op
);
5444 gen_op_movb_eflags_T0();
5445 s
->cc_op
= CC_OP_EFLAGS
;
5447 case 0x9f: /* lahf */
5450 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5451 gen_op_set_cc_op(s
->cc_op
);
5452 gen_op_movl_T0_eflags();
5453 gen_op_mov_reg_T0(OT_BYTE
, R_AH
);
5455 case 0xf5: /* cmc */
5456 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5457 gen_op_set_cc_op(s
->cc_op
);
5459 s
->cc_op
= CC_OP_EFLAGS
;
5461 case 0xf8: /* clc */
5462 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5463 gen_op_set_cc_op(s
->cc_op
);
5465 s
->cc_op
= CC_OP_EFLAGS
;
5467 case 0xf9: /* stc */
5468 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5469 gen_op_set_cc_op(s
->cc_op
);
5471 s
->cc_op
= CC_OP_EFLAGS
;
5473 case 0xfc: /* cld */
5476 case 0xfd: /* std */
5480 /************************/
5481 /* bit operations */
5482 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5483 ot
= dflag
+ OT_WORD
;
5484 modrm
= ldub_code(s
->pc
++);
5485 op
= (modrm
>> 3) & 7;
5486 mod
= (modrm
>> 6) & 3;
5487 rm
= (modrm
& 7) | REX_B(s
);
5490 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5491 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5493 gen_op_mov_TN_reg(ot
, 0, rm
);
5496 val
= ldub_code(s
->pc
++);
5497 gen_op_movl_T1_im(val
);
5501 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5502 s
->cc_op
= CC_OP_SARB
+ ot
;
5505 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5507 gen_op_mov_reg_T0(ot
, rm
);
5508 gen_op_update_bt_cc();
5511 case 0x1a3: /* bt Gv, Ev */
5514 case 0x1ab: /* bts */
5517 case 0x1b3: /* btr */
5520 case 0x1bb: /* btc */
5523 ot
= dflag
+ OT_WORD
;
5524 modrm
= ldub_code(s
->pc
++);
5525 reg
= ((modrm
>> 3) & 7) | rex_r
;
5526 mod
= (modrm
>> 6) & 3;
5527 rm
= (modrm
& 7) | REX_B(s
);
5528 gen_op_mov_TN_reg(OT_LONG
, 1, reg
);
5530 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5531 /* specific case: we need to add a displacement */
5532 gen_op_add_bit_A0_T1
[ot
- OT_WORD
]();
5533 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5535 gen_op_mov_TN_reg(ot
, 0, rm
);
5537 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5538 s
->cc_op
= CC_OP_SARB
+ ot
;
5541 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5543 gen_op_mov_reg_T0(ot
, rm
);
5544 gen_op_update_bt_cc();
5547 case 0x1bc: /* bsf */
5548 case 0x1bd: /* bsr */
5549 ot
= dflag
+ OT_WORD
;
5550 modrm
= ldub_code(s
->pc
++);
5551 reg
= ((modrm
>> 3) & 7) | rex_r
;
5552 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5553 /* NOTE: in order to handle the 0 case, we must load the
5554 result. It could be optimized with a generated jump */
5555 gen_op_mov_TN_reg(ot
, 1, reg
);
5556 gen_op_bsx_T0_cc
[ot
- OT_WORD
][b
& 1]();
5557 gen_op_mov_reg_T1(ot
, reg
);
5558 s
->cc_op
= CC_OP_LOGICB
+ ot
;
5560 /************************/
5562 case 0x27: /* daa */
5565 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5566 gen_op_set_cc_op(s
->cc_op
);
5568 s
->cc_op
= CC_OP_EFLAGS
;
5570 case 0x2f: /* das */
5573 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5574 gen_op_set_cc_op(s
->cc_op
);
5576 s
->cc_op
= CC_OP_EFLAGS
;
5578 case 0x37: /* aaa */
5581 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5582 gen_op_set_cc_op(s
->cc_op
);
5584 s
->cc_op
= CC_OP_EFLAGS
;
5586 case 0x3f: /* aas */
5589 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5590 gen_op_set_cc_op(s
->cc_op
);
5592 s
->cc_op
= CC_OP_EFLAGS
;
5594 case 0xd4: /* aam */
5597 val
= ldub_code(s
->pc
++);
5599 gen_exception(s
, EXCP00_DIVZ
, pc_start
- s
->cs_base
);
5602 s
->cc_op
= CC_OP_LOGICB
;
5605 case 0xd5: /* aad */
5608 val
= ldub_code(s
->pc
++);
5610 s
->cc_op
= CC_OP_LOGICB
;
5612 /************************/
5614 case 0x90: /* nop */
5615 /* XXX: xchg + rex handling */
5616 /* XXX: correct lock test for all insn */
5617 if (prefixes
& PREFIX_LOCK
)
5619 if (prefixes
& PREFIX_REPZ
) {
5620 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PAUSE
);
5623 case 0x9b: /* fwait */
5624 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
5625 (HF_MP_MASK
| HF_TS_MASK
)) {
5626 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5628 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5629 gen_op_set_cc_op(s
->cc_op
);
5630 gen_jmp_im(pc_start
- s
->cs_base
);
5631 tcg_gen_helper_0_0(helper_fwait
);
5634 case 0xcc: /* int3 */
5635 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5637 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5639 case 0xcd: /* int N */
5640 val
= ldub_code(s
->pc
++);
5641 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5643 if (s
->vm86
&& s
->iopl
!= 3) {
5644 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5646 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5649 case 0xce: /* into */
5652 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5654 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5655 gen_op_set_cc_op(s
->cc_op
);
5656 gen_jmp_im(pc_start
- s
->cs_base
);
5657 gen_op_into(s
->pc
- pc_start
);
5659 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5660 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_ICEBP
))
5663 gen_debug(s
, pc_start
- s
->cs_base
);
5666 tb_flush(cpu_single_env
);
5667 cpu_set_log(CPU_LOG_INT
| CPU_LOG_TB_IN_ASM
);
5670 case 0xfa: /* cli */
5672 if (s
->cpl
<= s
->iopl
) {
5675 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5681 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5685 case 0xfb: /* sti */
5687 if (s
->cpl
<= s
->iopl
) {
5690 /* interruptions are enabled only the first insn after sti */
5691 /* If several instructions disable interrupts, only the
5693 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5694 gen_op_set_inhibit_irq();
5695 /* give a chance to handle pending irqs */
5696 gen_jmp_im(s
->pc
- s
->cs_base
);
5699 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5705 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5709 case 0x62: /* bound */
5712 ot
= dflag
? OT_LONG
: OT_WORD
;
5713 modrm
= ldub_code(s
->pc
++);
5714 reg
= (modrm
>> 3) & 7;
5715 mod
= (modrm
>> 6) & 3;
5718 gen_op_mov_TN_reg(ot
, 0, reg
);
5719 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5720 gen_jmp_im(pc_start
- s
->cs_base
);
5726 case 0x1c8 ... 0x1cf: /* bswap reg */
5727 reg
= (b
& 7) | REX_B(s
);
5728 #ifdef TARGET_X86_64
5730 gen_op_mov_TN_reg(OT_QUAD
, 0, reg
);
5731 tcg_gen_bswap_i64(cpu_T
[0], cpu_T
[0]);
5732 gen_op_mov_reg_T0(OT_QUAD
, reg
);
5736 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
5738 tmp0
= tcg_temp_new(TCG_TYPE_I32
);
5739 tcg_gen_trunc_i64_i32(tmp0
, cpu_T
[0]);
5740 tcg_gen_bswap_i32(tmp0
, tmp0
);
5741 tcg_gen_extu_i32_i64(cpu_T
[0], tmp0
);
5742 gen_op_mov_reg_T0(OT_LONG
, reg
);
5746 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
5747 tcg_gen_bswap_i32(cpu_T
[0], cpu_T
[0]);
5748 gen_op_mov_reg_T0(OT_LONG
, reg
);
5752 case 0xd6: /* salc */
5755 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5756 gen_op_set_cc_op(s
->cc_op
);
5759 case 0xe0: /* loopnz */
5760 case 0xe1: /* loopz */
5761 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5762 gen_op_set_cc_op(s
->cc_op
);
5764 case 0xe2: /* loop */
5765 case 0xe3: /* jecxz */
5769 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5770 next_eip
= s
->pc
- s
->cs_base
;
5775 l1
= gen_new_label();
5776 l2
= gen_new_label();
5779 gen_op_jz_ecx
[s
->aflag
](l1
);
5781 gen_op_dec_ECX
[s
->aflag
]();
5784 gen_op_loop
[s
->aflag
][b
](l1
);
5787 gen_jmp_im(next_eip
);
5788 gen_op_jmp_label(l2
);
5795 case 0x130: /* wrmsr */
5796 case 0x132: /* rdmsr */
5798 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5802 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 0);
5805 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 1);
5812 case 0x131: /* rdtsc */
5813 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RDTSC
))
5815 gen_jmp_im(pc_start
- s
->cs_base
);
5818 case 0x133: /* rdpmc */
5819 gen_jmp_im(pc_start
- s
->cs_base
);
5822 case 0x134: /* sysenter */
5826 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5828 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5829 gen_op_set_cc_op(s
->cc_op
);
5830 s
->cc_op
= CC_OP_DYNAMIC
;
5832 gen_jmp_im(pc_start
- s
->cs_base
);
5837 case 0x135: /* sysexit */
5841 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5843 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5844 gen_op_set_cc_op(s
->cc_op
);
5845 s
->cc_op
= CC_OP_DYNAMIC
;
5847 gen_jmp_im(pc_start
- s
->cs_base
);
5852 #ifdef TARGET_X86_64
5853 case 0x105: /* syscall */
5854 /* XXX: is it usable in real mode ? */
5855 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5856 gen_op_set_cc_op(s
->cc_op
);
5857 s
->cc_op
= CC_OP_DYNAMIC
;
5859 gen_jmp_im(pc_start
- s
->cs_base
);
5860 gen_op_syscall(s
->pc
- pc_start
);
5863 case 0x107: /* sysret */
5865 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5867 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5868 gen_op_set_cc_op(s
->cc_op
);
5869 s
->cc_op
= CC_OP_DYNAMIC
;
5871 gen_jmp_im(pc_start
- s
->cs_base
);
5872 gen_op_sysret(s
->dflag
);
5873 /* condition codes are modified only in long mode */
5875 s
->cc_op
= CC_OP_EFLAGS
;
5880 case 0x1a2: /* cpuid */
5881 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CPUID
))
5885 case 0xf4: /* hlt */
5887 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5889 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_HLT
))
5891 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5892 gen_op_set_cc_op(s
->cc_op
);
5893 gen_jmp_im(s
->pc
- s
->cs_base
);
5899 modrm
= ldub_code(s
->pc
++);
5900 mod
= (modrm
>> 6) & 3;
5901 op
= (modrm
>> 3) & 7;
5904 if (!s
->pe
|| s
->vm86
)
5906 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_READ
))
5908 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
5912 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5915 if (!s
->pe
|| s
->vm86
)
5918 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5920 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_WRITE
))
5922 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5923 gen_jmp_im(pc_start
- s
->cs_base
);
5928 if (!s
->pe
|| s
->vm86
)
5930 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_READ
))
5932 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
5936 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5939 if (!s
->pe
|| s
->vm86
)
5942 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5944 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_WRITE
))
5946 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5947 gen_jmp_im(pc_start
- s
->cs_base
);
5953 if (!s
->pe
|| s
->vm86
)
5955 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5956 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5957 gen_op_set_cc_op(s
->cc_op
);
5962 s
->cc_op
= CC_OP_EFLAGS
;
5969 modrm
= ldub_code(s
->pc
++);
5970 mod
= (modrm
>> 6) & 3;
5971 op
= (modrm
>> 3) & 7;
5977 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_GDTR_READ
))
5979 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5980 gen_op_movl_T0_env(offsetof(CPUX86State
, gdt
.limit
));
5981 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5982 gen_add_A0_im(s
, 2);
5983 gen_op_movtl_T0_env(offsetof(CPUX86State
, gdt
.base
));
5985 gen_op_andl_T0_im(0xffffff);
5986 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
5991 case 0: /* monitor */
5992 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
5995 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MONITOR
))
5997 gen_jmp_im(pc_start
- s
->cs_base
);
5998 #ifdef TARGET_X86_64
5999 if (s
->aflag
== 2) {
6000 gen_op_movq_A0_reg(R_EBX
);
6001 gen_op_addq_A0_AL();
6005 gen_op_movl_A0_reg(R_EBX
);
6006 gen_op_addl_A0_AL();
6008 gen_op_andl_A0_ffff();
6010 gen_add_A0_ds_seg(s
);
6014 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
6017 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6018 gen_op_set_cc_op(s
->cc_op
);
6019 s
->cc_op
= CC_OP_DYNAMIC
;
6021 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MWAIT
))
6023 gen_jmp_im(s
->pc
- s
->cs_base
);
6031 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IDTR_READ
))
6033 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6034 gen_op_movl_T0_env(offsetof(CPUX86State
, idt
.limit
));
6035 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
6036 gen_add_A0_im(s
, 2);
6037 gen_op_movtl_T0_env(offsetof(CPUX86State
, idt
.base
));
6039 gen_op_andl_T0_im(0xffffff);
6040 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6048 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMRUN
))
6050 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6051 gen_op_set_cc_op(s
->cc_op
);
6052 gen_jmp_im(s
->pc
- s
->cs_base
);
6054 s
->cc_op
= CC_OP_EFLAGS
;
6057 case 1: /* VMMCALL */
6058 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMMCALL
))
6060 /* FIXME: cause #UD if hflags & SVM */
6063 case 2: /* VMLOAD */
6064 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMLOAD
))
6068 case 3: /* VMSAVE */
6069 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMSAVE
))
6074 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_STGI
))
6079 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CLGI
))
6083 case 6: /* SKINIT */
6084 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SKINIT
))
6088 case 7: /* INVLPGA */
6089 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPGA
))
6096 } else if (s
->cpl
!= 0) {
6097 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6099 if (gen_svm_check_intercept(s
, pc_start
,
6100 op
==2 ? SVM_EXIT_GDTR_WRITE
: SVM_EXIT_IDTR_WRITE
))
6102 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6103 gen_op_ld_T1_A0(OT_WORD
+ s
->mem_index
);
6104 gen_add_A0_im(s
, 2);
6105 gen_op_ld_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6107 gen_op_andl_T0_im(0xffffff);
6109 gen_op_movtl_env_T0(offsetof(CPUX86State
,gdt
.base
));
6110 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
6112 gen_op_movtl_env_T0(offsetof(CPUX86State
,idt
.base
));
6113 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
6118 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
))
6120 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
6121 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
6125 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6127 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
))
6129 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6131 gen_jmp_im(s
->pc
- s
->cs_base
);
6135 case 7: /* invlpg */
6137 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6140 #ifdef TARGET_X86_64
6141 if (CODE64(s
) && rm
== 0) {
6143 gen_op_movtl_T0_env(offsetof(CPUX86State
,segs
[R_GS
].base
));
6144 gen_op_movtl_T1_env(offsetof(CPUX86State
,kernelgsbase
));
6145 gen_op_movtl_env_T1(offsetof(CPUX86State
,segs
[R_GS
].base
));
6146 gen_op_movtl_env_T0(offsetof(CPUX86State
,kernelgsbase
));
6153 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPG
))
6155 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6157 gen_jmp_im(s
->pc
- s
->cs_base
);
6166 case 0x108: /* invd */
6167 case 0x109: /* wbinvd */
6169 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6171 if (gen_svm_check_intercept(s
, pc_start
, (b
& 2) ? SVM_EXIT_INVD
: SVM_EXIT_WBINVD
))
6176 case 0x63: /* arpl or movslS (x86_64) */
6177 #ifdef TARGET_X86_64
6180 /* d_ot is the size of destination */
6181 d_ot
= dflag
+ OT_WORD
;
6183 modrm
= ldub_code(s
->pc
++);
6184 reg
= ((modrm
>> 3) & 7) | rex_r
;
6185 mod
= (modrm
>> 6) & 3;
6186 rm
= (modrm
& 7) | REX_B(s
);
6189 gen_op_mov_TN_reg(OT_LONG
, 0, rm
);
6191 if (d_ot
== OT_QUAD
)
6192 gen_op_movslq_T0_T0();
6193 gen_op_mov_reg_T0(d_ot
, reg
);
6195 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6196 if (d_ot
== OT_QUAD
) {
6197 gen_op_lds_T0_A0(OT_LONG
+ s
->mem_index
);
6199 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6201 gen_op_mov_reg_T0(d_ot
, reg
);
6206 if (!s
->pe
|| s
->vm86
)
6208 ot
= dflag
? OT_LONG
: OT_WORD
;
6209 modrm
= ldub_code(s
->pc
++);
6210 reg
= (modrm
>> 3) & 7;
6211 mod
= (modrm
>> 6) & 3;
6214 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6215 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
6217 gen_op_mov_TN_reg(ot
, 0, rm
);
6219 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6220 gen_op_set_cc_op(s
->cc_op
);
6222 s
->cc_op
= CC_OP_EFLAGS
;
6224 gen_op_st_T0_A0(ot
+ s
->mem_index
);
6226 gen_op_mov_reg_T0(ot
, rm
);
6228 gen_op_arpl_update();
6231 case 0x102: /* lar */
6232 case 0x103: /* lsl */
6233 if (!s
->pe
|| s
->vm86
)
6235 ot
= dflag
? OT_LONG
: OT_WORD
;
6236 modrm
= ldub_code(s
->pc
++);
6237 reg
= ((modrm
>> 3) & 7) | rex_r
;
6238 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
6239 gen_op_mov_TN_reg(ot
, 1, reg
);
6240 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6241 gen_op_set_cc_op(s
->cc_op
);
6246 s
->cc_op
= CC_OP_EFLAGS
;
6247 gen_op_mov_reg_T1(ot
, reg
);
6250 modrm
= ldub_code(s
->pc
++);
6251 mod
= (modrm
>> 6) & 3;
6252 op
= (modrm
>> 3) & 7;
6254 case 0: /* prefetchnta */
6255 case 1: /* prefetchnt0 */
6256 case 2: /* prefetchnt0 */
6257 case 3: /* prefetchnt0 */
6260 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6261 /* nothing more to do */
6263 default: /* nop (multi byte) */
6264 gen_nop_modrm(s
, modrm
);
6268 case 0x119 ... 0x11f: /* nop (multi byte) */
6269 modrm
= ldub_code(s
->pc
++);
6270 gen_nop_modrm(s
, modrm
);
6272 case 0x120: /* mov reg, crN */
6273 case 0x122: /* mov crN, reg */
6275 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6277 modrm
= ldub_code(s
->pc
++);
6278 if ((modrm
& 0xc0) != 0xc0)
6280 rm
= (modrm
& 7) | REX_B(s
);
6281 reg
= ((modrm
>> 3) & 7) | rex_r
;
6293 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
+ reg
);
6294 gen_op_mov_TN_reg(ot
, 0, rm
);
6295 gen_op_movl_crN_T0(reg
);
6296 gen_jmp_im(s
->pc
- s
->cs_base
);
6299 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
+ reg
);
6300 #if !defined(CONFIG_USER_ONLY)
6302 gen_op_movtl_T0_cr8();
6305 gen_op_movtl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
6306 gen_op_mov_reg_T0(ot
, rm
);
6314 case 0x121: /* mov reg, drN */
6315 case 0x123: /* mov drN, reg */
6317 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6319 modrm
= ldub_code(s
->pc
++);
6320 if ((modrm
& 0xc0) != 0xc0)
6322 rm
= (modrm
& 7) | REX_B(s
);
6323 reg
= ((modrm
>> 3) & 7) | rex_r
;
6328 /* XXX: do it dynamically with CR4.DE bit */
6329 if (reg
== 4 || reg
== 5 || reg
>= 8)
6332 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_DR0
+ reg
);
6333 gen_op_mov_TN_reg(ot
, 0, rm
);
6334 gen_op_movl_drN_T0(reg
);
6335 gen_jmp_im(s
->pc
- s
->cs_base
);
6338 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_DR0
+ reg
);
6339 gen_op_movtl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
6340 gen_op_mov_reg_T0(ot
, rm
);
6344 case 0x106: /* clts */
6346 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6348 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
6350 /* abort block because static cpu state changed */
6351 gen_jmp_im(s
->pc
- s
->cs_base
);
6355 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6356 case 0x1c3: /* MOVNTI reg, mem */
6357 if (!(s
->cpuid_features
& CPUID_SSE2
))
6359 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
6360 modrm
= ldub_code(s
->pc
++);
6361 mod
= (modrm
>> 6) & 3;
6364 reg
= ((modrm
>> 3) & 7) | rex_r
;
6365 /* generate a generic store */
6366 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
6369 modrm
= ldub_code(s
->pc
++);
6370 mod
= (modrm
>> 6) & 3;
6371 op
= (modrm
>> 3) & 7;
6373 case 0: /* fxsave */
6374 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6375 (s
->flags
& HF_EM_MASK
))
6377 if (s
->flags
& HF_TS_MASK
) {
6378 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6381 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6382 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6383 gen_op_set_cc_op(s
->cc_op
);
6384 gen_jmp_im(pc_start
- s
->cs_base
);
6385 tcg_gen_helper_0_2(helper_fxsave
,
6386 cpu_A0
, tcg_const_i32((s
->dflag
== 2)));
6388 case 1: /* fxrstor */
6389 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6390 (s
->flags
& HF_EM_MASK
))
6392 if (s
->flags
& HF_TS_MASK
) {
6393 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6396 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6397 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6398 gen_op_set_cc_op(s
->cc_op
);
6399 gen_jmp_im(pc_start
- s
->cs_base
);
6400 tcg_gen_helper_0_2(helper_fxrstor
,
6401 cpu_A0
, tcg_const_i32((s
->dflag
== 2)));
6403 case 2: /* ldmxcsr */
6404 case 3: /* stmxcsr */
6405 if (s
->flags
& HF_TS_MASK
) {
6406 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6409 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
6412 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6414 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6415 gen_op_movl_env_T0(offsetof(CPUX86State
, mxcsr
));
6417 gen_op_movl_T0_env(offsetof(CPUX86State
, mxcsr
));
6418 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
6421 case 5: /* lfence */
6422 case 6: /* mfence */
6423 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
6426 case 7: /* sfence / clflush */
6427 if ((modrm
& 0xc7) == 0xc0) {
6429 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6430 if (!(s
->cpuid_features
& CPUID_SSE
))
6434 if (!(s
->cpuid_features
& CPUID_CLFLUSH
))
6436 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6443 case 0x10d: /* 3DNow! prefetch(w) */
6444 modrm
= ldub_code(s
->pc
++);
6445 mod
= (modrm
>> 6) & 3;
6448 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6449 /* ignore for now */
6451 case 0x1aa: /* rsm */
6452 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RSM
))
6454 if (!(s
->flags
& HF_SMM_MASK
))
6456 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6457 gen_op_set_cc_op(s
->cc_op
);
6458 s
->cc_op
= CC_OP_DYNAMIC
;
6460 gen_jmp_im(s
->pc
- s
->cs_base
);
6464 case 0x10e ... 0x10f:
6465 /* 3DNow! instructions, ignore prefixes */
6466 s
->prefix
&= ~(PREFIX_REPZ
| PREFIX_REPNZ
| PREFIX_DATA
);
6467 case 0x110 ... 0x117:
6468 case 0x128 ... 0x12f:
6469 case 0x150 ... 0x177:
6470 case 0x17c ... 0x17f:
6472 case 0x1c4 ... 0x1c6:
6473 case 0x1d0 ... 0x1fe:
6474 gen_sse(s
, b
, pc_start
, rex_r
);
6479 /* lock generation */
6480 if (s
->prefix
& PREFIX_LOCK
)
6484 if (s
->prefix
& PREFIX_LOCK
)
6486 /* XXX: ensure that no lock was generated */
6487 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
6491 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6492 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6494 /* flags read by an operation */
6495 static uint16_t opc_read_flags
[NB_OPS
] = {
6496 [INDEX_op_aas
] = CC_A
,
6497 [INDEX_op_aaa
] = CC_A
,
6498 [INDEX_op_das
] = CC_A
| CC_C
,
6499 [INDEX_op_daa
] = CC_A
| CC_C
,
6501 /* subtle: due to the incl/decl implementation, C is used */
6502 [INDEX_op_update_inc_cc
] = CC_C
,
6504 [INDEX_op_into
] = CC_O
,
6506 [INDEX_op_jb_subb
] = CC_C
,
6507 [INDEX_op_jb_subw
] = CC_C
,
6508 [INDEX_op_jb_subl
] = CC_C
,
6510 [INDEX_op_jz_subb
] = CC_Z
,
6511 [INDEX_op_jz_subw
] = CC_Z
,
6512 [INDEX_op_jz_subl
] = CC_Z
,
6514 [INDEX_op_jbe_subb
] = CC_Z
| CC_C
,
6515 [INDEX_op_jbe_subw
] = CC_Z
| CC_C
,
6516 [INDEX_op_jbe_subl
] = CC_Z
| CC_C
,
6518 [INDEX_op_js_subb
] = CC_S
,
6519 [INDEX_op_js_subw
] = CC_S
,
6520 [INDEX_op_js_subl
] = CC_S
,
6522 [INDEX_op_jl_subb
] = CC_O
| CC_S
,
6523 [INDEX_op_jl_subw
] = CC_O
| CC_S
,
6524 [INDEX_op_jl_subl
] = CC_O
| CC_S
,
6526 [INDEX_op_jle_subb
] = CC_O
| CC_S
| CC_Z
,
6527 [INDEX_op_jle_subw
] = CC_O
| CC_S
| CC_Z
,
6528 [INDEX_op_jle_subl
] = CC_O
| CC_S
| CC_Z
,
6530 [INDEX_op_loopnzw
] = CC_Z
,
6531 [INDEX_op_loopnzl
] = CC_Z
,
6532 [INDEX_op_loopzw
] = CC_Z
,
6533 [INDEX_op_loopzl
] = CC_Z
,
6535 [INDEX_op_seto_T0_cc
] = CC_O
,
6536 [INDEX_op_setb_T0_cc
] = CC_C
,
6537 [INDEX_op_setz_T0_cc
] = CC_Z
,
6538 [INDEX_op_setbe_T0_cc
] = CC_Z
| CC_C
,
6539 [INDEX_op_sets_T0_cc
] = CC_S
,
6540 [INDEX_op_setp_T0_cc
] = CC_P
,
6541 [INDEX_op_setl_T0_cc
] = CC_O
| CC_S
,
6542 [INDEX_op_setle_T0_cc
] = CC_O
| CC_S
| CC_Z
,
6544 [INDEX_op_setb_T0_subb
] = CC_C
,
6545 [INDEX_op_setb_T0_subw
] = CC_C
,
6546 [INDEX_op_setb_T0_subl
] = CC_C
,
6548 [INDEX_op_setz_T0_subb
] = CC_Z
,
6549 [INDEX_op_setz_T0_subw
] = CC_Z
,
6550 [INDEX_op_setz_T0_subl
] = CC_Z
,
6552 [INDEX_op_setbe_T0_subb
] = CC_Z
| CC_C
,
6553 [INDEX_op_setbe_T0_subw
] = CC_Z
| CC_C
,
6554 [INDEX_op_setbe_T0_subl
] = CC_Z
| CC_C
,
6556 [INDEX_op_sets_T0_subb
] = CC_S
,
6557 [INDEX_op_sets_T0_subw
] = CC_S
,
6558 [INDEX_op_sets_T0_subl
] = CC_S
,
6560 [INDEX_op_setl_T0_subb
] = CC_O
| CC_S
,
6561 [INDEX_op_setl_T0_subw
] = CC_O
| CC_S
,
6562 [INDEX_op_setl_T0_subl
] = CC_O
| CC_S
,
6564 [INDEX_op_setle_T0_subb
] = CC_O
| CC_S
| CC_Z
,
6565 [INDEX_op_setle_T0_subw
] = CC_O
| CC_S
| CC_Z
,
6566 [INDEX_op_setle_T0_subl
] = CC_O
| CC_S
| CC_Z
,
6568 [INDEX_op_movl_T0_eflags
] = CC_OSZAPC
,
6569 [INDEX_op_cmc
] = CC_C
,
6570 [INDEX_op_salc
] = CC_C
,
6572 /* needed for correct flag optimisation before string ops */
6573 [INDEX_op_jnz_ecxw
] = CC_OSZAPC
,
6574 [INDEX_op_jnz_ecxl
] = CC_OSZAPC
,
6575 [INDEX_op_jz_ecxw
] = CC_OSZAPC
,
6576 [INDEX_op_jz_ecxl
] = CC_OSZAPC
,
6578 #ifdef TARGET_X86_64
6579 [INDEX_op_jb_subq
] = CC_C
,
6580 [INDEX_op_jz_subq
] = CC_Z
,
6581 [INDEX_op_jbe_subq
] = CC_Z
| CC_C
,
6582 [INDEX_op_js_subq
] = CC_S
,
6583 [INDEX_op_jl_subq
] = CC_O
| CC_S
,
6584 [INDEX_op_jle_subq
] = CC_O
| CC_S
| CC_Z
,
6586 [INDEX_op_loopnzq
] = CC_Z
,
6587 [INDEX_op_loopzq
] = CC_Z
,
6589 [INDEX_op_setb_T0_subq
] = CC_C
,
6590 [INDEX_op_setz_T0_subq
] = CC_Z
,
6591 [INDEX_op_setbe_T0_subq
] = CC_Z
| CC_C
,
6592 [INDEX_op_sets_T0_subq
] = CC_S
,
6593 [INDEX_op_setl_T0_subq
] = CC_O
| CC_S
,
6594 [INDEX_op_setle_T0_subq
] = CC_O
| CC_S
| CC_Z
,
6596 [INDEX_op_jnz_ecxq
] = CC_OSZAPC
,
6597 [INDEX_op_jz_ecxq
] = CC_OSZAPC
,
6600 #define DEF_READF(SUFFIX)\
6601 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6602 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6603 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6604 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6605 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6606 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6607 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6608 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6610 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6611 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6612 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6613 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6614 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6615 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6616 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6617 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6621 #ifndef CONFIG_USER_ONLY
6627 /* flags written by an operation */
6628 static uint16_t opc_write_flags
[NB_OPS
] = {
6629 [INDEX_op_update2_cc
] = CC_OSZAPC
,
6630 [INDEX_op_update1_cc
] = CC_OSZAPC
,
6631 [INDEX_op_cmpl_T0_T1_cc
] = CC_OSZAPC
,
6632 [INDEX_op_update_neg_cc
] = CC_OSZAPC
,
6633 /* subtle: due to the incl/decl implementation, C is used */
6634 [INDEX_op_update_inc_cc
] = CC_OSZAPC
,
6635 [INDEX_op_testl_T0_T1_cc
] = CC_OSZAPC
,
6637 [INDEX_op_mulb_AL_T0
] = CC_OSZAPC
,
6638 [INDEX_op_mulw_AX_T0
] = CC_OSZAPC
,
6639 [INDEX_op_mull_EAX_T0
] = CC_OSZAPC
,
6640 X86_64_DEF([INDEX_op_mulq_EAX_T0
] = CC_OSZAPC
,)
6641 [INDEX_op_imulb_AL_T0
] = CC_OSZAPC
,
6642 [INDEX_op_imulw_AX_T0
] = CC_OSZAPC
,
6643 [INDEX_op_imull_EAX_T0
] = CC_OSZAPC
,
6644 X86_64_DEF([INDEX_op_imulq_EAX_T0
] = CC_OSZAPC
,)
6645 [INDEX_op_imulw_T0_T1
] = CC_OSZAPC
,
6646 [INDEX_op_imull_T0_T1
] = CC_OSZAPC
,
6647 X86_64_DEF([INDEX_op_imulq_T0_T1
] = CC_OSZAPC
,)
6650 [INDEX_op_com_dummy
] = CC_OSZAPC
,
6651 [INDEX_op_com_dummy
] = CC_OSZAPC
,
6652 [INDEX_op_com_dummy
] = CC_OSZAPC
,
6653 [INDEX_op_com_dummy
] = CC_OSZAPC
,
6656 [INDEX_op_aam
] = CC_OSZAPC
,
6657 [INDEX_op_aad
] = CC_OSZAPC
,
6658 [INDEX_op_aas
] = CC_OSZAPC
,
6659 [INDEX_op_aaa
] = CC_OSZAPC
,
6660 [INDEX_op_das
] = CC_OSZAPC
,
6661 [INDEX_op_daa
] = CC_OSZAPC
,
6663 [INDEX_op_movb_eflags_T0
] = CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
,
6664 [INDEX_op_movw_eflags_T0
] = CC_OSZAPC
,
6665 [INDEX_op_movl_eflags_T0
] = CC_OSZAPC
,
6666 [INDEX_op_movw_eflags_T0_io
] = CC_OSZAPC
,
6667 [INDEX_op_movl_eflags_T0_io
] = CC_OSZAPC
,
6668 [INDEX_op_movw_eflags_T0_cpl0
] = CC_OSZAPC
,
6669 [INDEX_op_movl_eflags_T0_cpl0
] = CC_OSZAPC
,
6670 [INDEX_op_clc
] = CC_C
,
6671 [INDEX_op_stc
] = CC_C
,
6672 [INDEX_op_cmc
] = CC_C
,
6674 [INDEX_op_btw_T0_T1_cc
] = CC_OSZAPC
,
6675 [INDEX_op_btl_T0_T1_cc
] = CC_OSZAPC
,
6676 X86_64_DEF([INDEX_op_btq_T0_T1_cc
] = CC_OSZAPC
,)
6677 [INDEX_op_btsw_T0_T1_cc
] = CC_OSZAPC
,
6678 [INDEX_op_btsl_T0_T1_cc
] = CC_OSZAPC
,
6679 X86_64_DEF([INDEX_op_btsq_T0_T1_cc
] = CC_OSZAPC
,)
6680 [INDEX_op_btrw_T0_T1_cc
] = CC_OSZAPC
,
6681 [INDEX_op_btrl_T0_T1_cc
] = CC_OSZAPC
,
6682 X86_64_DEF([INDEX_op_btrq_T0_T1_cc
] = CC_OSZAPC
,)
6683 [INDEX_op_btcw_T0_T1_cc
] = CC_OSZAPC
,
6684 [INDEX_op_btcl_T0_T1_cc
] = CC_OSZAPC
,
6685 X86_64_DEF([INDEX_op_btcq_T0_T1_cc
] = CC_OSZAPC
,)
6687 [INDEX_op_bsfw_T0_cc
] = CC_OSZAPC
,
6688 [INDEX_op_bsfl_T0_cc
] = CC_OSZAPC
,
6689 X86_64_DEF([INDEX_op_bsfq_T0_cc
] = CC_OSZAPC
,)
6690 [INDEX_op_bsrw_T0_cc
] = CC_OSZAPC
,
6691 [INDEX_op_bsrl_T0_cc
] = CC_OSZAPC
,
6692 X86_64_DEF([INDEX_op_bsrq_T0_cc
] = CC_OSZAPC
,)
6694 [INDEX_op_cmpxchgb_T0_T1_EAX_cc
] = CC_OSZAPC
,
6695 [INDEX_op_cmpxchgw_T0_T1_EAX_cc
] = CC_OSZAPC
,
6696 [INDEX_op_cmpxchgl_T0_T1_EAX_cc
] = CC_OSZAPC
,
6697 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc
] = CC_OSZAPC
,)
6699 [INDEX_op_cmpxchg8b
] = CC_Z
,
6700 [INDEX_op_lar
] = CC_Z
,
6701 [INDEX_op_lsl
] = CC_Z
,
6702 [INDEX_op_verr
] = CC_Z
,
6703 [INDEX_op_verw
] = CC_Z
,
6704 [INDEX_op_fcomi_dummy
] = CC_Z
| CC_P
| CC_C
,
6705 [INDEX_op_fcomi_dummy
] = CC_Z
| CC_P
| CC_C
,
6707 #define DEF_WRITEF(SUFFIX)\
6708 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6709 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6710 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6711 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6712 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6713 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6714 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6715 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6717 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6718 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6719 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6720 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6721 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6722 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6723 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6724 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6726 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6727 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6728 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6729 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6730 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6731 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6732 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6733 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6735 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6736 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6737 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6738 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6740 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6741 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6742 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6743 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6745 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6746 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6747 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6748 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6750 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6751 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6752 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6753 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6754 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6755 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6757 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6758 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6759 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6760 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6761 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6762 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6764 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6765 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6766 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6767 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6772 #ifndef CONFIG_USER_ONLY
6778 /* simpler form of an operation if no flags need to be generated */
6779 static uint16_t opc_simpler
[NB_OPS
] = {
6780 [INDEX_op_update2_cc
] = INDEX_op_nop
,
6781 [INDEX_op_update1_cc
] = INDEX_op_nop
,
6782 [INDEX_op_update_neg_cc
] = INDEX_op_nop
,
6784 /* broken: CC_OP logic must be rewritten */
6785 [INDEX_op_update_inc_cc
] = INDEX_op_nop
,
6788 [INDEX_op_shlb_T0_T1_cc
] = INDEX_op_shlb_T0_T1
,
6789 [INDEX_op_shlw_T0_T1_cc
] = INDEX_op_shlw_T0_T1
,
6790 [INDEX_op_shll_T0_T1_cc
] = INDEX_op_shll_T0_T1
,
6791 X86_64_DEF([INDEX_op_shlq_T0_T1_cc
] = INDEX_op_shlq_T0_T1
,)
6793 [INDEX_op_shrb_T0_T1_cc
] = INDEX_op_shrb_T0_T1
,
6794 [INDEX_op_shrw_T0_T1_cc
] = INDEX_op_shrw_T0_T1
,
6795 [INDEX_op_shrl_T0_T1_cc
] = INDEX_op_shrl_T0_T1
,
6796 X86_64_DEF([INDEX_op_shrq_T0_T1_cc
] = INDEX_op_shrq_T0_T1
,)
6798 [INDEX_op_sarb_T0_T1_cc
] = INDEX_op_sarb_T0_T1
,
6799 [INDEX_op_sarw_T0_T1_cc
] = INDEX_op_sarw_T0_T1
,
6800 [INDEX_op_sarl_T0_T1_cc
] = INDEX_op_sarl_T0_T1
,
6801 X86_64_DEF([INDEX_op_sarq_T0_T1_cc
] = INDEX_op_sarq_T0_T1
,)
6803 #define DEF_SIMPLER(SUFFIX)\
6804 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6805 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6806 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6807 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6809 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6810 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6811 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6812 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6816 #ifndef CONFIG_USER_ONLY
6817 DEF_SIMPLER(_kernel
)
6822 static void tcg_macro_func(TCGContext
*s
, int macro_id
, const int *dead_args
)
6827 tcg_gen_helper_0_1(helper_divl_EAX_T0
, cpu_T
[0]);
6833 void optimize_flags_init(void)
6836 /* put default values in arrays */
6837 for(i
= 0; i
< NB_OPS
; i
++) {
6838 if (opc_simpler
[i
] == 0)
6842 tcg_set_macro_func(&tcg_ctx
, tcg_macro_func
);
6844 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
6845 #if TARGET_LONG_BITS > HOST_LONG_BITS
6846 cpu_T
[0] = tcg_global_mem_new(TCG_TYPE_TL
,
6847 TCG_AREG0
, offsetof(CPUState
, t0
), "T0");
6848 cpu_T
[1] = tcg_global_mem_new(TCG_TYPE_TL
,
6849 TCG_AREG0
, offsetof(CPUState
, t1
), "T1");
6850 cpu_A0
= tcg_global_mem_new(TCG_TYPE_TL
,
6851 TCG_AREG0
, offsetof(CPUState
, t2
), "A0");
6853 cpu_T
[0] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG1
, "T0");
6854 cpu_T
[1] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG2
, "T1");
6855 cpu_A0
= tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG3
, "A0");
6856 cpu_tmp1
= tcg_global_reg2_new_hack(TCG_TYPE_I64
, TCG_AREG1
, TCG_AREG2
, "tmp1");
6858 /* the helpers are only registered to print debug info */
6859 TCG_HELPER(helper_divl_EAX_T0
);
6860 TCG_HELPER(helper_idivl_EAX_T0
);
6863 /* CPU flags computation optimization: we move backward thru the
6864 generated code to see which flags are needed. The operation is
6865 modified if suitable */
6866 static void optimize_flags(uint16_t *opc_buf
, int opc_buf_len
)
6869 int live_flags
, write_flags
, op
;
6871 opc_ptr
= opc_buf
+ opc_buf_len
;
6872 /* live_flags contains the flags needed by the next instructions
6873 in the code. At the end of the block, we consider that all the
6875 live_flags
= CC_OSZAPC
;
6876 while (opc_ptr
> opc_buf
) {
6878 /* if none of the flags written by the instruction is used,
6879 then we can try to find a simpler instruction */
6880 write_flags
= opc_write_flags
[op
];
6881 if ((live_flags
& write_flags
) == 0) {
6882 *opc_ptr
= opc_simpler
[op
];
6884 /* compute the live flags before the instruction */
6885 live_flags
&= ~write_flags
;
6886 live_flags
|= opc_read_flags
[op
];
6890 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6891 basic block 'tb'. If search_pc is TRUE, also generate PC
6892 information for each intermediate instruction. */
6893 static inline int gen_intermediate_code_internal(CPUState
*env
,
6894 TranslationBlock
*tb
,
6897 DisasContext dc1
, *dc
= &dc1
;
6898 target_ulong pc_ptr
;
6899 uint16_t *gen_opc_end
;
6902 target_ulong pc_start
;
6903 target_ulong cs_base
;
6905 /* generate intermediate code */
6907 cs_base
= tb
->cs_base
;
6909 cflags
= tb
->cflags
;
6911 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
6912 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
6913 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
6914 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
6916 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
6917 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
6918 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
6919 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
6920 dc
->singlestep_enabled
= env
->singlestep_enabled
;
6921 dc
->cc_op
= CC_OP_DYNAMIC
;
6922 dc
->cs_base
= cs_base
;
6924 dc
->popl_esp_hack
= 0;
6925 /* select memory access functions */
6927 if (flags
& HF_SOFTMMU_MASK
) {
6929 dc
->mem_index
= 2 * 4;
6931 dc
->mem_index
= 1 * 4;
6933 dc
->cpuid_features
= env
->cpuid_features
;
6934 dc
->cpuid_ext_features
= env
->cpuid_ext_features
;
6935 dc
->cpuid_ext2_features
= env
->cpuid_ext2_features
;
6936 #ifdef TARGET_X86_64
6937 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
6938 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
6941 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
6942 (flags
& HF_INHIBIT_IRQ_MASK
)
6943 #ifndef CONFIG_SOFTMMU
6944 || (flags
& HF_SOFTMMU_MASK
)
6948 /* check addseg logic */
6949 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
6950 printf("ERROR addseg\n");
6953 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
6954 #if TARGET_LONG_BITS > HOST_LONG_BITS
6955 cpu_tmp1
= tcg_temp_new(TCG_TYPE_I64
);
6957 cpu_tmp2
= tcg_temp_new(TCG_TYPE_I32
);
6958 cpu_ptr0
= tcg_temp_new(TCG_TYPE_PTR
);
6959 cpu_ptr1
= tcg_temp_new(TCG_TYPE_PTR
);
6961 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
6963 dc
->is_jmp
= DISAS_NEXT
;
6968 if (env
->nb_breakpoints
> 0) {
6969 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
6970 if (env
->breakpoints
[j
] == pc_ptr
) {
6971 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
6977 j
= gen_opc_ptr
- gen_opc_buf
;
6981 gen_opc_instr_start
[lj
++] = 0;
6983 gen_opc_pc
[lj
] = pc_ptr
;
6984 gen_opc_cc_op
[lj
] = dc
->cc_op
;
6985 gen_opc_instr_start
[lj
] = 1;
6987 pc_ptr
= disas_insn(dc
, pc_ptr
);
6988 /* stop translation if indicated */
6991 /* if single step mode, we generate only one instruction and
6992 generate an exception */
6993 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6994 the flag and abort the translation to give the irqs a
6995 change to be happen */
6996 if (dc
->tf
|| dc
->singlestep_enabled
||
6997 (flags
& HF_INHIBIT_IRQ_MASK
) ||
6998 (cflags
& CF_SINGLE_INSN
)) {
6999 gen_jmp_im(pc_ptr
- dc
->cs_base
);
7003 /* if too long translation, stop generation too */
7004 if (gen_opc_ptr
>= gen_opc_end
||
7005 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
7006 gen_jmp_im(pc_ptr
- dc
->cs_base
);
7011 *gen_opc_ptr
= INDEX_op_end
;
7012 /* we don't forget to fill the last values */
7014 j
= gen_opc_ptr
- gen_opc_buf
;
7017 gen_opc_instr_start
[lj
++] = 0;
7021 if (loglevel
& CPU_LOG_TB_CPU
) {
7022 cpu_dump_state(env
, logfile
, fprintf
, X86_DUMP_CCOP
);
7024 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
7026 fprintf(logfile
, "----------------\n");
7027 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
7028 #ifdef TARGET_X86_64
7033 disas_flags
= !dc
->code32
;
7034 target_disas(logfile
, pc_start
, pc_ptr
- pc_start
, disas_flags
);
7035 fprintf(logfile
, "\n");
7036 if (loglevel
& CPU_LOG_TB_OP_OPT
) {
7037 fprintf(logfile
, "OP before opt:\n");
7038 tcg_dump_ops(&tcg_ctx
, logfile
);
7039 fprintf(logfile
, "\n");
7044 /* optimize flag computations */
7045 optimize_flags(gen_opc_buf
, gen_opc_ptr
- gen_opc_buf
);
7048 tb
->size
= pc_ptr
- pc_start
;
7052 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
7054 return gen_intermediate_code_internal(env
, tb
, 0);
7057 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
7059 return gen_intermediate_code_internal(env
, tb
, 1);
7062 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
7063 unsigned long searched_pc
, int pc_pos
, void *puc
)
7067 if (loglevel
& CPU_LOG_TB_OP
) {
7069 fprintf(logfile
, "RESTORE:\n");
7070 for(i
= 0;i
<= pc_pos
; i
++) {
7071 if (gen_opc_instr_start
[i
]) {
7072 fprintf(logfile
, "0x%04x: " TARGET_FMT_lx
"\n", i
, gen_opc_pc
[i
]);
7075 fprintf(logfile
, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx
" cs_base=%x\n",
7076 searched_pc
, pc_pos
, gen_opc_pc
[pc_pos
] - tb
->cs_base
,
7077 (uint32_t)tb
->cs_base
);
7080 env
->eip
= gen_opc_pc
[pc_pos
] - tb
->cs_base
;
7081 cc_op
= gen_opc_cc_op
[pc_pos
];
7082 if (cc_op
!= CC_OP_DYNAMIC
)