4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 /* XXX: move that elsewhere */
33 static uint16_t *gen_opc_ptr
;
34 static uint32_t *gen_opparam_ptr
;
36 #define PREFIX_REPZ 0x01
37 #define PREFIX_REPNZ 0x02
38 #define PREFIX_LOCK 0x04
39 #define PREFIX_DATA 0x08
40 #define PREFIX_ADR 0x10
43 #define X86_64_ONLY(x) x
44 #define X86_64_DEF(x...) x
45 #define CODE64(s) ((s)->code64)
46 #define REX_X(s) ((s)->rex_x)
47 #define REX_B(s) ((s)->rex_b)
48 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
50 #define BUGGY_64(x) NULL
53 #define X86_64_ONLY(x) NULL
54 #define X86_64_DEF(x...)
61 static int x86_64_hregs
;
64 #ifdef USE_DIRECT_JUMP
67 #define TBPARAM(x) (long)(x)
70 typedef struct DisasContext
{
71 /* current insn context */
72 int override
; /* -1 if no override */
75 target_ulong pc
; /* pc = eip + cs_base */
76 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
77 static state change (stop translation) */
78 /* current block context */
79 target_ulong cs_base
; /* base of CS segment */
80 int pe
; /* protected mode */
81 int code32
; /* 32 bit code segment */
83 int lma
; /* long mode active */
84 int code64
; /* 64 bit code segment */
87 int ss32
; /* 32 bit stack segment */
88 int cc_op
; /* current CC operation */
89 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
90 int f_st
; /* currently unused */
91 int vm86
; /* vm86 mode */
94 int tf
; /* TF cpu flag */
95 int singlestep_enabled
; /* "hardware" single step enabled */
96 int jmp_opt
; /* use direct block chaining for direct jumps */
97 int mem_index
; /* select memory access functions */
98 int flags
; /* all execution flags */
99 struct TranslationBlock
*tb
;
100 int popl_esp_hack
; /* for correct popl with esp base handling */
101 int rip_offset
; /* only used in x86_64, but left for simplicity */
105 static void gen_eob(DisasContext
*s
);
106 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
107 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
109 /* i386 arith/logic operations */
129 OP_SHL1
, /* undocumented */
134 #define DEF(s, n, copy_size) INDEX_op_ ## s,
151 /* I386 int registers */
152 OR_EAX
, /* MUST be even numbered */
161 OR_TMP0
= 16, /* temporary operand register */
163 OR_A0
, /* temporary register used when doing address evaluation */
168 #define NB_OP_SIZES 4
170 #define DEF_REGS(prefix, suffix) \
171 prefix ## EAX ## suffix,\
172 prefix ## ECX ## suffix,\
173 prefix ## EDX ## suffix,\
174 prefix ## EBX ## suffix,\
175 prefix ## ESP ## suffix,\
176 prefix ## EBP ## suffix,\
177 prefix ## ESI ## suffix,\
178 prefix ## EDI ## suffix,\
179 prefix ## R8 ## suffix,\
180 prefix ## R9 ## suffix,\
181 prefix ## R10 ## suffix,\
182 prefix ## R11 ## suffix,\
183 prefix ## R12 ## suffix,\
184 prefix ## R13 ## suffix,\
185 prefix ## R14 ## suffix,\
186 prefix ## R15 ## suffix,
188 #define DEF_BREGS(prefixb, prefixh, suffix) \
190 static void prefixb ## ESP ## suffix ## _wrapper(void) \
193 prefixb ## ESP ## suffix (); \
195 prefixh ## EAX ## suffix (); \
198 static void prefixb ## EBP ## suffix ## _wrapper(void) \
201 prefixb ## EBP ## suffix (); \
203 prefixh ## ECX ## suffix (); \
206 static void prefixb ## ESI ## suffix ## _wrapper(void) \
209 prefixb ## ESI ## suffix (); \
211 prefixh ## EDX ## suffix (); \
214 static void prefixb ## EDI ## suffix ## _wrapper(void) \
217 prefixb ## EDI ## suffix (); \
219 prefixh ## EBX ## suffix (); \
222 DEF_BREGS(gen_op_movb_
, gen_op_movh_
, _T0
)
223 DEF_BREGS(gen_op_movb_
, gen_op_movh_
, _T1
)
224 DEF_BREGS(gen_op_movl_T0_
, gen_op_movh_T0_
, )
225 DEF_BREGS(gen_op_movl_T1_
, gen_op_movh_T1_
, )
227 #else /* !TARGET_X86_64 */
229 #define NB_OP_SIZES 3
231 #define DEF_REGS(prefix, suffix) \
232 prefix ## EAX ## suffix,\
233 prefix ## ECX ## suffix,\
234 prefix ## EDX ## suffix,\
235 prefix ## EBX ## suffix,\
236 prefix ## ESP ## suffix,\
237 prefix ## EBP ## suffix,\
238 prefix ## ESI ## suffix,\
239 prefix ## EDI ## suffix,
241 #endif /* !TARGET_X86_64 */
243 static GenOpFunc
*gen_op_mov_reg_T0
[NB_OP_SIZES
][CPU_NB_REGS
] = {
250 gen_op_movb_ESP_T0_wrapper
,
251 gen_op_movb_EBP_T0_wrapper
,
252 gen_op_movb_ESI_T0_wrapper
,
253 gen_op_movb_EDI_T0_wrapper
,
270 DEF_REGS(gen_op_movw_
, _T0
)
273 DEF_REGS(gen_op_movl_
, _T0
)
277 DEF_REGS(gen_op_movq_
, _T0
)
282 static GenOpFunc
*gen_op_mov_reg_T1
[NB_OP_SIZES
][CPU_NB_REGS
] = {
289 gen_op_movb_ESP_T1_wrapper
,
290 gen_op_movb_EBP_T1_wrapper
,
291 gen_op_movb_ESI_T1_wrapper
,
292 gen_op_movb_EDI_T1_wrapper
,
309 DEF_REGS(gen_op_movw_
, _T1
)
312 DEF_REGS(gen_op_movl_
, _T1
)
316 DEF_REGS(gen_op_movq_
, _T1
)
321 static GenOpFunc
*gen_op_mov_reg_A0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
323 DEF_REGS(gen_op_movw_
, _A0
)
326 DEF_REGS(gen_op_movl_
, _A0
)
330 DEF_REGS(gen_op_movq_
, _A0
)
335 static GenOpFunc
*gen_op_mov_TN_reg
[NB_OP_SIZES
][2][CPU_NB_REGS
] =
344 gen_op_movl_T0_ESP_wrapper
,
345 gen_op_movl_T0_EBP_wrapper
,
346 gen_op_movl_T0_ESI_wrapper
,
347 gen_op_movl_T0_EDI_wrapper
,
369 gen_op_movl_T1_ESP_wrapper
,
370 gen_op_movl_T1_EBP_wrapper
,
371 gen_op_movl_T1_ESI_wrapper
,
372 gen_op_movl_T1_EDI_wrapper
,
391 DEF_REGS(gen_op_movl_T0_
, )
394 DEF_REGS(gen_op_movl_T1_
, )
399 DEF_REGS(gen_op_movl_T0_
, )
402 DEF_REGS(gen_op_movl_T1_
, )
408 DEF_REGS(gen_op_movl_T0_
, )
411 DEF_REGS(gen_op_movl_T1_
, )
417 static GenOpFunc
*gen_op_movl_A0_reg
[CPU_NB_REGS
] = {
418 DEF_REGS(gen_op_movl_A0_
, )
421 static GenOpFunc
*gen_op_addl_A0_reg_sN
[4][CPU_NB_REGS
] = {
423 DEF_REGS(gen_op_addl_A0_
, )
426 DEF_REGS(gen_op_addl_A0_
, _s1
)
429 DEF_REGS(gen_op_addl_A0_
, _s2
)
432 DEF_REGS(gen_op_addl_A0_
, _s3
)
437 static GenOpFunc
*gen_op_movq_A0_reg
[CPU_NB_REGS
] = {
438 DEF_REGS(gen_op_movq_A0_
, )
441 static GenOpFunc
*gen_op_addq_A0_reg_sN
[4][CPU_NB_REGS
] = {
443 DEF_REGS(gen_op_addq_A0_
, )
446 DEF_REGS(gen_op_addq_A0_
, _s1
)
449 DEF_REGS(gen_op_addq_A0_
, _s2
)
452 DEF_REGS(gen_op_addq_A0_
, _s3
)
457 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
459 DEF_REGS(gen_op_cmovw_
, _T1_T0
)
462 DEF_REGS(gen_op_cmovl_
, _T1_T0
)
466 DEF_REGS(gen_op_cmovq_
, _T1_T0
)
471 static GenOpFunc
*gen_op_arith_T0_T1_cc
[8] = {
482 #define DEF_ARITHC(SUFFIX)\
484 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
485 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
488 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
489 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
492 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
493 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
496 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
497 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
500 static GenOpFunc
*gen_op_arithc_T0_T1_cc
[4][2] = {
504 static GenOpFunc
*gen_op_arithc_mem_T0_T1_cc
[3 * 4][2] = {
506 #ifndef CONFIG_USER_ONLY
512 static const int cc_op_arithb
[8] = {
523 #define DEF_CMPXCHG(SUFFIX)\
524 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
525 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
526 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
527 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
529 static GenOpFunc
*gen_op_cmpxchg_T0_T1_EAX_cc
[4] = {
533 static GenOpFunc
*gen_op_cmpxchg_mem_T0_T1_EAX_cc
[3 * 4] = {
535 #ifndef CONFIG_USER_ONLY
541 #define DEF_SHIFT(SUFFIX)\
543 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
544 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
545 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
546 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
547 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
548 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
549 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
550 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
553 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
554 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
555 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
556 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
557 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
558 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
559 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
560 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
563 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
565 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
566 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
568 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
569 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
570 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
573 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
574 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
575 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
576 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
577 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
578 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
579 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
580 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
583 static GenOpFunc
*gen_op_shift_T0_T1_cc
[4][8] = {
587 static GenOpFunc
*gen_op_shift_mem_T0_T1_cc
[3 * 4][8] = {
589 #ifndef CONFIG_USER_ONLY
595 #define DEF_SHIFTD(SUFFIX, op)\
601 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
602 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
605 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
606 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
611 static GenOpFunc1
*gen_op_shiftd_T0_T1_im_cc
[4][2] = {
615 static GenOpFunc
*gen_op_shiftd_T0_T1_ECX_cc
[4][2] = {
619 static GenOpFunc1
*gen_op_shiftd_mem_T0_T1_im_cc
[3 * 4][2] = {
621 #ifndef CONFIG_USER_ONLY
622 DEF_SHIFTD(_kernel
, im
)
623 DEF_SHIFTD(_user
, im
)
627 static GenOpFunc
*gen_op_shiftd_mem_T0_T1_ECX_cc
[3 * 4][2] = {
628 DEF_SHIFTD(_raw
, ECX
)
629 #ifndef CONFIG_USER_ONLY
630 DEF_SHIFTD(_kernel
, ECX
)
631 DEF_SHIFTD(_user
, ECX
)
635 static GenOpFunc
*gen_op_btx_T0_T1_cc
[3][4] = {
638 gen_op_btsw_T0_T1_cc
,
639 gen_op_btrw_T0_T1_cc
,
640 gen_op_btcw_T0_T1_cc
,
644 gen_op_btsl_T0_T1_cc
,
645 gen_op_btrl_T0_T1_cc
,
646 gen_op_btcl_T0_T1_cc
,
651 gen_op_btsq_T0_T1_cc
,
652 gen_op_btrq_T0_T1_cc
,
653 gen_op_btcq_T0_T1_cc
,
658 static GenOpFunc
*gen_op_add_bit_A0_T1
[3] = {
659 gen_op_add_bitw_A0_T1
,
660 gen_op_add_bitl_A0_T1
,
661 X86_64_ONLY(gen_op_add_bitq_A0_T1
),
664 static GenOpFunc
*gen_op_bsx_T0_cc
[3][2] = {
681 static GenOpFunc
*gen_op_lds_T0_A0
[3 * 4] = {
682 gen_op_ldsb_raw_T0_A0
,
683 gen_op_ldsw_raw_T0_A0
,
684 X86_64_ONLY(gen_op_ldsl_raw_T0_A0
),
686 #ifndef CONFIG_USER_ONLY
687 gen_op_ldsb_kernel_T0_A0
,
688 gen_op_ldsw_kernel_T0_A0
,
689 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0
),
692 gen_op_ldsb_user_T0_A0
,
693 gen_op_ldsw_user_T0_A0
,
694 X86_64_ONLY(gen_op_ldsl_user_T0_A0
),
699 static GenOpFunc
*gen_op_ldu_T0_A0
[3 * 4] = {
700 gen_op_ldub_raw_T0_A0
,
701 gen_op_lduw_raw_T0_A0
,
705 #ifndef CONFIG_USER_ONLY
706 gen_op_ldub_kernel_T0_A0
,
707 gen_op_lduw_kernel_T0_A0
,
711 gen_op_ldub_user_T0_A0
,
712 gen_op_lduw_user_T0_A0
,
718 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719 static GenOpFunc
*gen_op_ld_T0_A0
[3 * 4] = {
720 gen_op_ldub_raw_T0_A0
,
721 gen_op_lduw_raw_T0_A0
,
722 gen_op_ldl_raw_T0_A0
,
723 X86_64_ONLY(gen_op_ldq_raw_T0_A0
),
725 #ifndef CONFIG_USER_ONLY
726 gen_op_ldub_kernel_T0_A0
,
727 gen_op_lduw_kernel_T0_A0
,
728 gen_op_ldl_kernel_T0_A0
,
729 X86_64_ONLY(gen_op_ldq_kernel_T0_A0
),
731 gen_op_ldub_user_T0_A0
,
732 gen_op_lduw_user_T0_A0
,
733 gen_op_ldl_user_T0_A0
,
734 X86_64_ONLY(gen_op_ldq_user_T0_A0
),
738 static GenOpFunc
*gen_op_ld_T1_A0
[3 * 4] = {
739 gen_op_ldub_raw_T1_A0
,
740 gen_op_lduw_raw_T1_A0
,
741 gen_op_ldl_raw_T1_A0
,
742 X86_64_ONLY(gen_op_ldq_raw_T1_A0
),
744 #ifndef CONFIG_USER_ONLY
745 gen_op_ldub_kernel_T1_A0
,
746 gen_op_lduw_kernel_T1_A0
,
747 gen_op_ldl_kernel_T1_A0
,
748 X86_64_ONLY(gen_op_ldq_kernel_T1_A0
),
750 gen_op_ldub_user_T1_A0
,
751 gen_op_lduw_user_T1_A0
,
752 gen_op_ldl_user_T1_A0
,
753 X86_64_ONLY(gen_op_ldq_user_T1_A0
),
757 static GenOpFunc
*gen_op_st_T0_A0
[3 * 4] = {
758 gen_op_stb_raw_T0_A0
,
759 gen_op_stw_raw_T0_A0
,
760 gen_op_stl_raw_T0_A0
,
761 X86_64_ONLY(gen_op_stq_raw_T0_A0
),
763 #ifndef CONFIG_USER_ONLY
764 gen_op_stb_kernel_T0_A0
,
765 gen_op_stw_kernel_T0_A0
,
766 gen_op_stl_kernel_T0_A0
,
767 X86_64_ONLY(gen_op_stq_kernel_T0_A0
),
769 gen_op_stb_user_T0_A0
,
770 gen_op_stw_user_T0_A0
,
771 gen_op_stl_user_T0_A0
,
772 X86_64_ONLY(gen_op_stq_user_T0_A0
),
776 static GenOpFunc
*gen_op_st_T1_A0
[3 * 4] = {
778 gen_op_stw_raw_T1_A0
,
779 gen_op_stl_raw_T1_A0
,
780 X86_64_ONLY(gen_op_stq_raw_T1_A0
),
782 #ifndef CONFIG_USER_ONLY
784 gen_op_stw_kernel_T1_A0
,
785 gen_op_stl_kernel_T1_A0
,
786 X86_64_ONLY(gen_op_stq_kernel_T1_A0
),
789 gen_op_stw_user_T1_A0
,
790 gen_op_stl_user_T1_A0
,
791 X86_64_ONLY(gen_op_stq_user_T1_A0
),
795 static inline void gen_jmp_im(target_ulong pc
)
798 if (pc
== (uint32_t)pc
) {
799 gen_op_movl_eip_im(pc
);
800 } else if (pc
== (int32_t)pc
) {
801 gen_op_movq_eip_im(pc
);
803 gen_op_movq_eip_im64(pc
>> 32, pc
);
806 gen_op_movl_eip_im(pc
);
810 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
814 override
= s
->override
;
818 gen_op_movq_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
819 gen_op_addq_A0_reg_sN
[0][R_ESI
]();
821 gen_op_movq_A0_reg
[R_ESI
]();
827 if (s
->addseg
&& override
< 0)
830 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
831 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
833 gen_op_movl_A0_reg
[R_ESI
]();
836 /* 16 address, always override */
839 gen_op_movl_A0_reg
[R_ESI
]();
840 gen_op_andl_A0_ffff();
841 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
845 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
849 gen_op_movq_A0_reg
[R_EDI
]();
854 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
855 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
857 gen_op_movl_A0_reg
[R_EDI
]();
860 gen_op_movl_A0_reg
[R_EDI
]();
861 gen_op_andl_A0_ffff();
862 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
866 static GenOpFunc
*gen_op_movl_T0_Dshift
[4] = {
867 gen_op_movl_T0_Dshiftb
,
868 gen_op_movl_T0_Dshiftw
,
869 gen_op_movl_T0_Dshiftl
,
870 X86_64_ONLY(gen_op_movl_T0_Dshiftq
),
873 static GenOpFunc1
*gen_op_jnz_ecx
[3] = {
876 X86_64_ONLY(gen_op_jnz_ecxq
),
879 static GenOpFunc1
*gen_op_jz_ecx
[3] = {
882 X86_64_ONLY(gen_op_jz_ecxq
),
885 static GenOpFunc
*gen_op_dec_ECX
[3] = {
888 X86_64_ONLY(gen_op_decq_ECX
),
891 static GenOpFunc1
*gen_op_string_jnz_sub
[2][4] = {
896 X86_64_ONLY(gen_op_jnz_subq
),
902 X86_64_ONLY(gen_op_jz_subq
),
906 static GenOpFunc
*gen_op_in_DX_T0
[3] = {
912 static GenOpFunc
*gen_op_out_DX_T0
[3] = {
918 static GenOpFunc
*gen_op_in
[3] = {
924 static GenOpFunc
*gen_op_out
[3] = {
930 static GenOpFunc
*gen_check_io_T0
[3] = {
936 static GenOpFunc
*gen_check_io_DX
[3] = {
942 static void gen_check_io(DisasContext
*s
, int ot
, int use_dx
, target_ulong cur_eip
)
944 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
945 if (s
->cc_op
!= CC_OP_DYNAMIC
)
946 gen_op_set_cc_op(s
->cc_op
);
949 gen_check_io_DX
[ot
]();
951 gen_check_io_T0
[ot
]();
955 static inline void gen_movs(DisasContext
*s
, int ot
)
957 gen_string_movl_A0_ESI(s
);
958 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
959 gen_string_movl_A0_EDI(s
);
960 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
961 gen_op_movl_T0_Dshift
[ot
]();
964 gen_op_addq_ESI_T0();
965 gen_op_addq_EDI_T0();
969 gen_op_addl_ESI_T0();
970 gen_op_addl_EDI_T0();
972 gen_op_addw_ESI_T0();
973 gen_op_addw_EDI_T0();
977 static inline void gen_update_cc_op(DisasContext
*s
)
979 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
980 gen_op_set_cc_op(s
->cc_op
);
981 s
->cc_op
= CC_OP_DYNAMIC
;
985 /* XXX: does not work with gdbstub "ice" single step - not a
987 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
991 l1
= gen_new_label();
992 l2
= gen_new_label();
993 gen_op_jnz_ecx
[s
->aflag
](l1
);
995 gen_jmp_tb(s
, next_eip
, 1);
1000 static inline void gen_stos(DisasContext
*s
, int ot
)
1002 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
1003 gen_string_movl_A0_EDI(s
);
1004 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1005 gen_op_movl_T0_Dshift
[ot
]();
1006 #ifdef TARGET_X86_64
1007 if (s
->aflag
== 2) {
1008 gen_op_addq_EDI_T0();
1012 gen_op_addl_EDI_T0();
1014 gen_op_addw_EDI_T0();
1018 static inline void gen_lods(DisasContext
*s
, int ot
)
1020 gen_string_movl_A0_ESI(s
);
1021 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1022 gen_op_mov_reg_T0
[ot
][R_EAX
]();
1023 gen_op_movl_T0_Dshift
[ot
]();
1024 #ifdef TARGET_X86_64
1025 if (s
->aflag
== 2) {
1026 gen_op_addq_ESI_T0();
1030 gen_op_addl_ESI_T0();
1032 gen_op_addw_ESI_T0();
1036 static inline void gen_scas(DisasContext
*s
, int ot
)
1038 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
1039 gen_string_movl_A0_EDI(s
);
1040 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
1041 gen_op_cmpl_T0_T1_cc();
1042 gen_op_movl_T0_Dshift
[ot
]();
1043 #ifdef TARGET_X86_64
1044 if (s
->aflag
== 2) {
1045 gen_op_addq_EDI_T0();
1049 gen_op_addl_EDI_T0();
1051 gen_op_addw_EDI_T0();
1055 static inline void gen_cmps(DisasContext
*s
, int ot
)
1057 gen_string_movl_A0_ESI(s
);
1058 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1059 gen_string_movl_A0_EDI(s
);
1060 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
1061 gen_op_cmpl_T0_T1_cc();
1062 gen_op_movl_T0_Dshift
[ot
]();
1063 #ifdef TARGET_X86_64
1064 if (s
->aflag
== 2) {
1065 gen_op_addq_ESI_T0();
1066 gen_op_addq_EDI_T0();
1070 gen_op_addl_ESI_T0();
1071 gen_op_addl_EDI_T0();
1073 gen_op_addw_ESI_T0();
1074 gen_op_addw_EDI_T0();
1078 static inline void gen_ins(DisasContext
*s
, int ot
)
1080 gen_string_movl_A0_EDI(s
);
1082 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1083 gen_op_in_DX_T0
[ot
]();
1084 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1085 gen_op_movl_T0_Dshift
[ot
]();
1086 #ifdef TARGET_X86_64
1087 if (s
->aflag
== 2) {
1088 gen_op_addq_EDI_T0();
1092 gen_op_addl_EDI_T0();
1094 gen_op_addw_EDI_T0();
1098 static inline void gen_outs(DisasContext
*s
, int ot
)
1100 gen_string_movl_A0_ESI(s
);
1101 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1102 gen_op_out_DX_T0
[ot
]();
1103 gen_op_movl_T0_Dshift
[ot
]();
1104 #ifdef TARGET_X86_64
1105 if (s
->aflag
== 2) {
1106 gen_op_addq_ESI_T0();
1110 gen_op_addl_ESI_T0();
1112 gen_op_addw_ESI_T0();
1116 /* same method as Valgrind : we generate jumps to current or next
1118 #define GEN_REPZ(op) \
1119 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1120 target_ulong cur_eip, target_ulong next_eip) \
1123 gen_update_cc_op(s); \
1124 l2 = gen_jz_ecx_string(s, next_eip); \
1125 gen_ ## op(s, ot); \
1126 gen_op_dec_ECX[s->aflag](); \
1127 /* a loop would cause two single step exceptions if ECX = 1 \
1128 before rep string_insn */ \
1130 gen_op_jz_ecx[s->aflag](l2); \
1131 gen_jmp(s, cur_eip); \
1134 #define GEN_REPZ2(op) \
1135 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1136 target_ulong cur_eip, \
1137 target_ulong next_eip, \
1141 gen_update_cc_op(s); \
1142 l2 = gen_jz_ecx_string(s, next_eip); \
1143 gen_ ## op(s, ot); \
1144 gen_op_dec_ECX[s->aflag](); \
1145 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1146 gen_op_string_jnz_sub[nz][ot](l2);\
1148 gen_op_jz_ecx[s->aflag](l2); \
1149 gen_jmp(s, cur_eip); \
1171 static GenOpFunc1
*gen_jcc_sub
[4][8] = {
1202 #ifdef TARGET_X86_64
1205 BUGGY_64(gen_op_jb_subq
),
1207 BUGGY_64(gen_op_jbe_subq
),
1210 BUGGY_64(gen_op_jl_subq
),
1211 BUGGY_64(gen_op_jle_subq
),
1215 static GenOpFunc1
*gen_op_loop
[3][4] = {
1226 #ifdef TARGET_X86_64
1235 static GenOpFunc
*gen_setcc_slow
[8] = {
1246 static GenOpFunc
*gen_setcc_sub
[4][8] = {
1249 gen_op_setb_T0_subb
,
1250 gen_op_setz_T0_subb
,
1251 gen_op_setbe_T0_subb
,
1252 gen_op_sets_T0_subb
,
1254 gen_op_setl_T0_subb
,
1255 gen_op_setle_T0_subb
,
1259 gen_op_setb_T0_subw
,
1260 gen_op_setz_T0_subw
,
1261 gen_op_setbe_T0_subw
,
1262 gen_op_sets_T0_subw
,
1264 gen_op_setl_T0_subw
,
1265 gen_op_setle_T0_subw
,
1269 gen_op_setb_T0_subl
,
1270 gen_op_setz_T0_subl
,
1271 gen_op_setbe_T0_subl
,
1272 gen_op_sets_T0_subl
,
1274 gen_op_setl_T0_subl
,
1275 gen_op_setle_T0_subl
,
1277 #ifdef TARGET_X86_64
1280 gen_op_setb_T0_subq
,
1281 gen_op_setz_T0_subq
,
1282 gen_op_setbe_T0_subq
,
1283 gen_op_sets_T0_subq
,
1285 gen_op_setl_T0_subq
,
1286 gen_op_setle_T0_subq
,
1291 static GenOpFunc
*gen_op_fp_arith_ST0_FT0
[8] = {
1292 gen_op_fadd_ST0_FT0
,
1293 gen_op_fmul_ST0_FT0
,
1294 gen_op_fcom_ST0_FT0
,
1295 gen_op_fcom_ST0_FT0
,
1296 gen_op_fsub_ST0_FT0
,
1297 gen_op_fsubr_ST0_FT0
,
1298 gen_op_fdiv_ST0_FT0
,
1299 gen_op_fdivr_ST0_FT0
,
1302 /* NOTE the exception in "r" op ordering */
1303 static GenOpFunc1
*gen_op_fp_arith_STN_ST0
[8] = {
1304 gen_op_fadd_STN_ST0
,
1305 gen_op_fmul_STN_ST0
,
1308 gen_op_fsubr_STN_ST0
,
1309 gen_op_fsub_STN_ST0
,
1310 gen_op_fdivr_STN_ST0
,
1311 gen_op_fdiv_STN_ST0
,
1314 /* if d == OR_TMP0, it means memory operand (address in A0) */
1315 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1317 GenOpFunc
*gen_update_cc
;
1320 gen_op_mov_TN_reg
[ot
][0][d
]();
1322 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1327 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1328 gen_op_set_cc_op(s1
->cc_op
);
1330 gen_op_arithc_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1331 gen_op_mov_reg_T0
[ot
][d
]();
1333 gen_op_arithc_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
- OP_ADCL
]();
1335 s1
->cc_op
= CC_OP_DYNAMIC
;
1338 gen_op_addl_T0_T1();
1339 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1340 gen_update_cc
= gen_op_update2_cc
;
1343 gen_op_subl_T0_T1();
1344 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1345 gen_update_cc
= gen_op_update2_cc
;
1351 gen_op_arith_T0_T1_cc
[op
]();
1352 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1353 gen_update_cc
= gen_op_update1_cc
;
1356 gen_op_cmpl_T0_T1_cc();
1357 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1358 gen_update_cc
= NULL
;
1361 if (op
!= OP_CMPL
) {
1363 gen_op_mov_reg_T0
[ot
][d
]();
1365 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1367 /* the flags update must happen after the memory write (precise
1368 exception support) */
1374 /* if d == OR_TMP0, it means memory operand (address in A0) */
1375 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1378 gen_op_mov_TN_reg
[ot
][0][d
]();
1380 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1381 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1382 gen_op_set_cc_op(s1
->cc_op
);
1385 s1
->cc_op
= CC_OP_INCB
+ ot
;
1388 s1
->cc_op
= CC_OP_DECB
+ ot
;
1391 gen_op_mov_reg_T0
[ot
][d
]();
1393 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1394 gen_op_update_inc_cc();
1397 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1400 gen_op_mov_TN_reg
[ot
][0][d
]();
1402 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1404 gen_op_mov_TN_reg
[ot
][1][s
]();
1405 /* for zero counts, flags are not updated, so must do it dynamically */
1406 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1407 gen_op_set_cc_op(s1
->cc_op
);
1410 gen_op_shift_T0_T1_cc
[ot
][op
]();
1412 gen_op_shift_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
]();
1414 gen_op_mov_reg_T0
[ot
][d
]();
1415 s1
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1418 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1420 /* currently not optimized */
1421 gen_op_movl_T1_im(c
);
1422 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1425 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1433 int mod
, rm
, code
, override
, must_add_seg
;
1435 override
= s
->override
;
1436 must_add_seg
= s
->addseg
;
1439 mod
= (modrm
>> 6) & 3;
1451 code
= ldub_code(s
->pc
++);
1452 scale
= (code
>> 6) & 3;
1453 index
= ((code
>> 3) & 7) | REX_X(s
);
1460 if ((base
& 7) == 5) {
1462 disp
= (int32_t)ldl_code(s
->pc
);
1464 if (CODE64(s
) && !havesib
) {
1465 disp
+= s
->pc
+ s
->rip_offset
;
1472 disp
= (int8_t)ldub_code(s
->pc
++);
1476 disp
= ldl_code(s
->pc
);
1482 /* for correct popl handling with esp */
1483 if (base
== 4 && s
->popl_esp_hack
)
1484 disp
+= s
->popl_esp_hack
;
1485 #ifdef TARGET_X86_64
1486 if (s
->aflag
== 2) {
1487 gen_op_movq_A0_reg
[base
]();
1489 if ((int32_t)disp
== disp
)
1490 gen_op_addq_A0_im(disp
);
1492 gen_op_addq_A0_im64(disp
>> 32, disp
);
1497 gen_op_movl_A0_reg
[base
]();
1499 gen_op_addl_A0_im(disp
);
1502 #ifdef TARGET_X86_64
1503 if (s
->aflag
== 2) {
1504 if ((int32_t)disp
== disp
)
1505 gen_op_movq_A0_im(disp
);
1507 gen_op_movq_A0_im64(disp
>> 32, disp
);
1511 gen_op_movl_A0_im(disp
);
1514 /* XXX: index == 4 is always invalid */
1515 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1516 #ifdef TARGET_X86_64
1517 if (s
->aflag
== 2) {
1518 gen_op_addq_A0_reg_sN
[scale
][index
]();
1522 gen_op_addl_A0_reg_sN
[scale
][index
]();
1527 if (base
== R_EBP
|| base
== R_ESP
)
1532 #ifdef TARGET_X86_64
1533 if (s
->aflag
== 2) {
1534 gen_op_addq_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1538 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1545 disp
= lduw_code(s
->pc
);
1547 gen_op_movl_A0_im(disp
);
1548 rm
= 0; /* avoid SS override */
1555 disp
= (int8_t)ldub_code(s
->pc
++);
1559 disp
= lduw_code(s
->pc
);
1565 gen_op_movl_A0_reg
[R_EBX
]();
1566 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1569 gen_op_movl_A0_reg
[R_EBX
]();
1570 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1573 gen_op_movl_A0_reg
[R_EBP
]();
1574 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1577 gen_op_movl_A0_reg
[R_EBP
]();
1578 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1581 gen_op_movl_A0_reg
[R_ESI
]();
1584 gen_op_movl_A0_reg
[R_EDI
]();
1587 gen_op_movl_A0_reg
[R_EBP
]();
1591 gen_op_movl_A0_reg
[R_EBX
]();
1595 gen_op_addl_A0_im(disp
);
1596 gen_op_andl_A0_ffff();
1600 if (rm
== 2 || rm
== 3 || rm
== 6)
1605 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1615 /* used for LEA and MOV AX, mem */
1616 static void gen_add_A0_ds_seg(DisasContext
*s
)
1618 int override
, must_add_seg
;
1619 must_add_seg
= s
->addseg
;
1621 if (s
->override
>= 0) {
1622 override
= s
->override
;
1628 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1632 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1634 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1636 int mod
, rm
, opreg
, disp
;
1638 mod
= (modrm
>> 6) & 3;
1639 rm
= (modrm
& 7) | REX_B(s
);
1643 gen_op_mov_TN_reg
[ot
][0][reg
]();
1644 gen_op_mov_reg_T0
[ot
][rm
]();
1646 gen_op_mov_TN_reg
[ot
][0][rm
]();
1648 gen_op_mov_reg_T0
[ot
][reg
]();
1651 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
1654 gen_op_mov_TN_reg
[ot
][0][reg
]();
1655 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1657 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1659 gen_op_mov_reg_T0
[ot
][reg
]();
1664 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
1670 ret
= ldub_code(s
->pc
);
1674 ret
= lduw_code(s
->pc
);
1679 ret
= ldl_code(s
->pc
);
1686 static inline int insn_const_size(unsigned int ot
)
1694 static inline void gen_jcc(DisasContext
*s
, int b
,
1695 target_ulong val
, target_ulong next_eip
)
1697 TranslationBlock
*tb
;
1704 jcc_op
= (b
>> 1) & 7;
1708 /* we optimize the cmp/jcc case */
1713 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1716 /* some jumps are easy to compute */
1758 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1761 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1773 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1774 gen_op_set_cc_op(s
->cc_op
);
1777 gen_setcc_slow
[jcc_op
]();
1778 func
= gen_op_jnz_T0_label
;
1788 l1
= gen_new_label();
1791 gen_op_goto_tb0(TBPARAM(tb
));
1792 gen_jmp_im(next_eip
);
1793 gen_op_movl_T0_im((long)tb
+ 0);
1797 gen_op_goto_tb1(TBPARAM(tb
));
1799 gen_op_movl_T0_im((long)tb
+ 1);
1805 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1806 gen_op_set_cc_op(s
->cc_op
);
1807 s
->cc_op
= CC_OP_DYNAMIC
;
1809 gen_setcc_slow
[jcc_op
]();
1815 l1
= gen_new_label();
1816 l2
= gen_new_label();
1817 gen_op_jnz_T0_label(l1
);
1818 gen_jmp_im(next_eip
);
1819 gen_op_jmp_label(l2
);
1827 static void gen_setcc(DisasContext
*s
, int b
)
1833 jcc_op
= (b
>> 1) & 7;
1835 /* we optimize the cmp/jcc case */
1840 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1845 /* some jumps are easy to compute */
1872 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1875 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1883 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1884 gen_op_set_cc_op(s
->cc_op
);
1885 func
= gen_setcc_slow
[jcc_op
];
1894 /* move T0 to seg_reg and compute if the CPU state may change. Never
1895 call this function with seg_reg == R_CS */
1896 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
1898 if (s
->pe
&& !s
->vm86
) {
1899 /* XXX: optimize by finding processor state dynamically */
1900 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1901 gen_op_set_cc_op(s
->cc_op
);
1902 gen_jmp_im(cur_eip
);
1903 gen_op_movl_seg_T0(seg_reg
);
1904 /* abort translation because the addseg value may change or
1905 because ss32 may change. For R_SS, translation must always
1906 stop as a special handling must be done to disable hardware
1907 interrupts for the next instruction */
1908 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
1911 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
1912 if (seg_reg
== R_SS
)
1917 static inline void gen_stack_update(DisasContext
*s
, int addend
)
1919 #ifdef TARGET_X86_64
1922 gen_op_addq_ESP_8();
1924 gen_op_addq_ESP_im(addend
);
1929 gen_op_addl_ESP_2();
1930 else if (addend
== 4)
1931 gen_op_addl_ESP_4();
1933 gen_op_addl_ESP_im(addend
);
1936 gen_op_addw_ESP_2();
1937 else if (addend
== 4)
1938 gen_op_addw_ESP_4();
1940 gen_op_addw_ESP_im(addend
);
1944 /* generate a push. It depends on ss32, addseg and dflag */
1945 static void gen_push_T0(DisasContext
*s
)
1947 #ifdef TARGET_X86_64
1949 /* XXX: check 16 bit behaviour */
1950 gen_op_movq_A0_reg
[R_ESP
]();
1952 gen_op_st_T0_A0
[OT_QUAD
+ s
->mem_index
]();
1953 gen_op_movq_ESP_A0();
1957 gen_op_movl_A0_reg
[R_ESP
]();
1964 gen_op_movl_T1_A0();
1965 gen_op_addl_A0_SS();
1968 gen_op_andl_A0_ffff();
1969 gen_op_movl_T1_A0();
1970 gen_op_addl_A0_SS();
1972 gen_op_st_T0_A0
[s
->dflag
+ 1 + s
->mem_index
]();
1973 if (s
->ss32
&& !s
->addseg
)
1974 gen_op_movl_ESP_A0();
1976 gen_op_mov_reg_T1
[s
->ss32
+ 1][R_ESP
]();
1980 /* generate a push. It depends on ss32, addseg and dflag */
1981 /* slower version for T1, only used for call Ev */
1982 static void gen_push_T1(DisasContext
*s
)
1984 #ifdef TARGET_X86_64
1986 /* XXX: check 16 bit behaviour */
1987 gen_op_movq_A0_reg
[R_ESP
]();
1989 gen_op_st_T1_A0
[OT_QUAD
+ s
->mem_index
]();
1990 gen_op_movq_ESP_A0();
1994 gen_op_movl_A0_reg
[R_ESP
]();
2001 gen_op_addl_A0_SS();
2004 gen_op_andl_A0_ffff();
2005 gen_op_addl_A0_SS();
2007 gen_op_st_T1_A0
[s
->dflag
+ 1 + s
->mem_index
]();
2009 if (s
->ss32
&& !s
->addseg
)
2010 gen_op_movl_ESP_A0();
2012 gen_stack_update(s
, (-2) << s
->dflag
);
2016 /* two step pop is necessary for precise exceptions */
2017 static void gen_pop_T0(DisasContext
*s
)
2019 #ifdef TARGET_X86_64
2021 /* XXX: check 16 bit behaviour */
2022 gen_op_movq_A0_reg
[R_ESP
]();
2023 gen_op_ld_T0_A0
[OT_QUAD
+ s
->mem_index
]();
2027 gen_op_movl_A0_reg
[R_ESP
]();
2030 gen_op_addl_A0_SS();
2032 gen_op_andl_A0_ffff();
2033 gen_op_addl_A0_SS();
2035 gen_op_ld_T0_A0
[s
->dflag
+ 1 + s
->mem_index
]();
2039 static void gen_pop_update(DisasContext
*s
)
2041 #ifdef TARGET_X86_64
2043 gen_stack_update(s
, 8);
2047 gen_stack_update(s
, 2 << s
->dflag
);
2051 static void gen_stack_A0(DisasContext
*s
)
2053 gen_op_movl_A0_ESP();
2055 gen_op_andl_A0_ffff();
2056 gen_op_movl_T1_A0();
2058 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2061 /* NOTE: wrap around in 16 bit not fully handled */
2062 static void gen_pusha(DisasContext
*s
)
2065 gen_op_movl_A0_ESP();
2066 gen_op_addl_A0_im(-16 << s
->dflag
);
2068 gen_op_andl_A0_ffff();
2069 gen_op_movl_T1_A0();
2071 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2072 for(i
= 0;i
< 8; i
++) {
2073 gen_op_mov_TN_reg
[OT_LONG
][0][7 - i
]();
2074 gen_op_st_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
2075 gen_op_addl_A0_im(2 << s
->dflag
);
2077 gen_op_mov_reg_T1
[OT_WORD
+ s
->ss32
][R_ESP
]();
2080 /* NOTE: wrap around in 16 bit not fully handled */
2081 static void gen_popa(DisasContext
*s
)
2084 gen_op_movl_A0_ESP();
2086 gen_op_andl_A0_ffff();
2087 gen_op_movl_T1_A0();
2088 gen_op_addl_T1_im(16 << s
->dflag
);
2090 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2091 for(i
= 0;i
< 8; i
++) {
2092 /* ESP is not reloaded */
2094 gen_op_ld_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
2095 gen_op_mov_reg_T0
[OT_WORD
+ s
->dflag
][7 - i
]();
2097 gen_op_addl_A0_im(2 << s
->dflag
);
2099 gen_op_mov_reg_T1
[OT_WORD
+ s
->ss32
][R_ESP
]();
2102 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2106 ot
= s
->dflag
+ OT_WORD
;
2108 opsize
= 2 << s
->dflag
;
2110 gen_op_movl_A0_ESP();
2111 gen_op_addl_A0_im(-opsize
);
2113 gen_op_andl_A0_ffff();
2114 gen_op_movl_T1_A0();
2116 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2118 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
2119 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2121 gen_op_enter_level(level
, s
->dflag
);
2123 gen_op_mov_reg_T1
[ot
][R_EBP
]();
2124 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2125 gen_op_mov_reg_T1
[OT_WORD
+ s
->ss32
][R_ESP
]();
2128 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2130 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2131 gen_op_set_cc_op(s
->cc_op
);
2132 gen_jmp_im(cur_eip
);
2133 gen_op_raise_exception(trapno
);
2137 /* an interrupt is different from an exception because of the
2138 priviledge checks */
2139 static void gen_interrupt(DisasContext
*s
, int intno
,
2140 target_ulong cur_eip
, target_ulong next_eip
)
2142 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2143 gen_op_set_cc_op(s
->cc_op
);
2144 gen_jmp_im(cur_eip
);
2145 gen_op_raise_interrupt(intno
, (int)(next_eip
- cur_eip
));
2149 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2151 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2152 gen_op_set_cc_op(s
->cc_op
);
2153 gen_jmp_im(cur_eip
);
2158 /* generate a generic end of block. Trace exception is also generated
2160 static void gen_eob(DisasContext
*s
)
2162 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2163 gen_op_set_cc_op(s
->cc_op
);
2164 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2165 gen_op_reset_inhibit_irq();
2167 if (s
->singlestep_enabled
) {
2170 gen_op_raise_exception(EXCP01_SSTP
);
2178 /* generate a jump to eip. No segment change must happen before as a
2179 direct call to the next block may occur */
2180 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2182 TranslationBlock
*tb
= s
->tb
;
2185 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2186 gen_op_set_cc_op(s
->cc_op
);
2188 gen_op_goto_tb1(TBPARAM(tb
));
2190 gen_op_goto_tb0(TBPARAM(tb
));
2192 gen_op_movl_T0_im((long)tb
+ tb_num
);
2201 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2203 gen_jmp_tb(s
, eip
, 0);
2206 static void gen_movtl_T0_im(target_ulong val
)
2208 #ifdef TARGET_X86_64
2209 if ((int32_t)val
== val
) {
2210 gen_op_movl_T0_im(val
);
2212 gen_op_movq_T0_im64(val
>> 32, val
);
2215 gen_op_movl_T0_im(val
);
2219 static void gen_movtl_T1_im(target_ulong val
)
2221 #ifdef TARGET_X86_64
2222 if ((int32_t)val
== val
) {
2223 gen_op_movl_T1_im(val
);
2225 gen_op_movq_T1_im64(val
>> 32, val
);
2228 gen_op_movl_T1_im(val
);
2232 static GenOpFunc1
*gen_ldq_env_A0
[3] = {
2233 gen_op_ldq_raw_env_A0
,
2234 #ifndef CONFIG_USER_ONLY
2235 gen_op_ldq_kernel_env_A0
,
2236 gen_op_ldq_user_env_A0
,
2240 static GenOpFunc1
*gen_stq_env_A0
[3] = {
2241 gen_op_stq_raw_env_A0
,
2242 #ifndef CONFIG_USER_ONLY
2243 gen_op_stq_kernel_env_A0
,
2244 gen_op_stq_user_env_A0
,
2248 static GenOpFunc1
*gen_ldo_env_A0
[3] = {
2249 gen_op_ldo_raw_env_A0
,
2250 #ifndef CONFIG_USER_ONLY
2251 gen_op_ldo_kernel_env_A0
,
2252 gen_op_ldo_user_env_A0
,
2256 static GenOpFunc1
*gen_sto_env_A0
[3] = {
2257 gen_op_sto_raw_env_A0
,
2258 #ifndef CONFIG_USER_ONLY
2259 gen_op_sto_kernel_env_A0
,
2260 gen_op_sto_user_env_A0
,
2264 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2266 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2267 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2268 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2270 static GenOpFunc2
*sse_op_table1
[256][4] = {
2271 /* pure SSE operations */
2272 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2273 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2274 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2275 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2276 [0x14] = { gen_op_punpckldq_xmm
, gen_op_punpcklqdq_xmm
},
2277 [0x15] = { gen_op_punpckhdq_xmm
, gen_op_punpckhqdq_xmm
},
2278 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2279 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2281 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2282 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2283 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2284 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2285 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2286 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2287 [0x2e] = { gen_op_ucomiss
, gen_op_ucomisd
},
2288 [0x2f] = { gen_op_comiss
, gen_op_comisd
},
2289 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2290 [0x51] = SSE_FOP(sqrt
),
2291 [0x52] = { gen_op_rsqrtps
, NULL
, gen_op_rsqrtss
, NULL
},
2292 [0x53] = { gen_op_rcpps
, NULL
, gen_op_rcpss
, NULL
},
2293 [0x54] = { gen_op_pand_xmm
, gen_op_pand_xmm
}, /* andps, andpd */
2294 [0x55] = { gen_op_pandn_xmm
, gen_op_pandn_xmm
}, /* andnps, andnpd */
2295 [0x56] = { gen_op_por_xmm
, gen_op_por_xmm
}, /* orps, orpd */
2296 [0x57] = { gen_op_pxor_xmm
, gen_op_pxor_xmm
}, /* xorps, xorpd */
2297 [0x58] = SSE_FOP(add
),
2298 [0x59] = SSE_FOP(mul
),
2299 [0x5a] = { gen_op_cvtps2pd
, gen_op_cvtpd2ps
,
2300 gen_op_cvtss2sd
, gen_op_cvtsd2ss
},
2301 [0x5b] = { gen_op_cvtdq2ps
, gen_op_cvtps2dq
, gen_op_cvttps2dq
},
2302 [0x5c] = SSE_FOP(sub
),
2303 [0x5d] = SSE_FOP(min
),
2304 [0x5e] = SSE_FOP(div
),
2305 [0x5f] = SSE_FOP(max
),
2307 [0xc2] = SSE_FOP(cmpeq
),
2308 [0xc6] = { (GenOpFunc2
*)gen_op_shufps
, (GenOpFunc2
*)gen_op_shufpd
},
2310 /* MMX ops and their SSE extensions */
2311 [0x60] = MMX_OP2(punpcklbw
),
2312 [0x61] = MMX_OP2(punpcklwd
),
2313 [0x62] = MMX_OP2(punpckldq
),
2314 [0x63] = MMX_OP2(packsswb
),
2315 [0x64] = MMX_OP2(pcmpgtb
),
2316 [0x65] = MMX_OP2(pcmpgtw
),
2317 [0x66] = MMX_OP2(pcmpgtl
),
2318 [0x67] = MMX_OP2(packuswb
),
2319 [0x68] = MMX_OP2(punpckhbw
),
2320 [0x69] = MMX_OP2(punpckhwd
),
2321 [0x6a] = MMX_OP2(punpckhdq
),
2322 [0x6b] = MMX_OP2(packssdw
),
2323 [0x6c] = { NULL
, gen_op_punpcklqdq_xmm
},
2324 [0x6d] = { NULL
, gen_op_punpckhqdq_xmm
},
2325 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2326 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2327 [0x70] = { (GenOpFunc2
*)gen_op_pshufw_mmx
,
2328 (GenOpFunc2
*)gen_op_pshufd_xmm
,
2329 (GenOpFunc2
*)gen_op_pshufhw_xmm
,
2330 (GenOpFunc2
*)gen_op_pshuflw_xmm
},
2331 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2332 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2333 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2334 [0x74] = MMX_OP2(pcmpeqb
),
2335 [0x75] = MMX_OP2(pcmpeqw
),
2336 [0x76] = MMX_OP2(pcmpeql
),
2337 [0x77] = { SSE_SPECIAL
}, /* emms */
2338 [0x7c] = { NULL
, gen_op_haddpd
, NULL
, gen_op_haddps
},
2339 [0x7d] = { NULL
, gen_op_hsubpd
, NULL
, gen_op_hsubps
},
2340 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2341 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2342 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2343 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2344 [0xd0] = { NULL
, gen_op_addsubpd
, NULL
, gen_op_addsubps
},
2345 [0xd1] = MMX_OP2(psrlw
),
2346 [0xd2] = MMX_OP2(psrld
),
2347 [0xd3] = MMX_OP2(psrlq
),
2348 [0xd4] = MMX_OP2(paddq
),
2349 [0xd5] = MMX_OP2(pmullw
),
2350 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2351 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2352 [0xd8] = MMX_OP2(psubusb
),
2353 [0xd9] = MMX_OP2(psubusw
),
2354 [0xda] = MMX_OP2(pminub
),
2355 [0xdb] = MMX_OP2(pand
),
2356 [0xdc] = MMX_OP2(paddusb
),
2357 [0xdd] = MMX_OP2(paddusw
),
2358 [0xde] = MMX_OP2(pmaxub
),
2359 [0xdf] = MMX_OP2(pandn
),
2360 [0xe0] = MMX_OP2(pavgb
),
2361 [0xe1] = MMX_OP2(psraw
),
2362 [0xe2] = MMX_OP2(psrad
),
2363 [0xe3] = MMX_OP2(pavgw
),
2364 [0xe4] = MMX_OP2(pmulhuw
),
2365 [0xe5] = MMX_OP2(pmulhw
),
2366 [0xe6] = { NULL
, gen_op_cvttpd2dq
, gen_op_cvtdq2pd
, gen_op_cvtpd2dq
},
2367 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2368 [0xe8] = MMX_OP2(psubsb
),
2369 [0xe9] = MMX_OP2(psubsw
),
2370 [0xea] = MMX_OP2(pminsw
),
2371 [0xeb] = MMX_OP2(por
),
2372 [0xec] = MMX_OP2(paddsb
),
2373 [0xed] = MMX_OP2(paddsw
),
2374 [0xee] = MMX_OP2(pmaxsw
),
2375 [0xef] = MMX_OP2(pxor
),
2376 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu (PNI) */
2377 [0xf1] = MMX_OP2(psllw
),
2378 [0xf2] = MMX_OP2(pslld
),
2379 [0xf3] = MMX_OP2(psllq
),
2380 [0xf4] = MMX_OP2(pmuludq
),
2381 [0xf5] = MMX_OP2(pmaddwd
),
2382 [0xf6] = MMX_OP2(psadbw
),
2383 [0xf7] = MMX_OP2(maskmov
),
2384 [0xf8] = MMX_OP2(psubb
),
2385 [0xf9] = MMX_OP2(psubw
),
2386 [0xfa] = MMX_OP2(psubl
),
2387 [0xfb] = MMX_OP2(psubq
),
2388 [0xfc] = MMX_OP2(paddb
),
2389 [0xfd] = MMX_OP2(paddw
),
2390 [0xfe] = MMX_OP2(paddl
),
2393 static GenOpFunc2
*sse_op_table2
[3 * 8][2] = {
2394 [0 + 2] = MMX_OP2(psrlw
),
2395 [0 + 4] = MMX_OP2(psraw
),
2396 [0 + 6] = MMX_OP2(psllw
),
2397 [8 + 2] = MMX_OP2(psrld
),
2398 [8 + 4] = MMX_OP2(psrad
),
2399 [8 + 6] = MMX_OP2(pslld
),
2400 [16 + 2] = MMX_OP2(psrlq
),
2401 [16 + 3] = { NULL
, gen_op_psrldq_xmm
},
2402 [16 + 6] = MMX_OP2(psllq
),
2403 [16 + 7] = { NULL
, gen_op_pslldq_xmm
},
2406 static GenOpFunc1
*sse_op_table3
[4 * 3] = {
2409 X86_64_ONLY(gen_op_cvtsq2ss
),
2410 X86_64_ONLY(gen_op_cvtsq2sd
),
2414 X86_64_ONLY(gen_op_cvttss2sq
),
2415 X86_64_ONLY(gen_op_cvttsd2sq
),
2419 X86_64_ONLY(gen_op_cvtss2sq
),
2420 X86_64_ONLY(gen_op_cvtsd2sq
),
2423 static GenOpFunc2
*sse_op_table4
[8][4] = {
2434 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
2436 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
2437 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
2438 GenOpFunc2
*sse_op2
;
2439 GenOpFunc3
*sse_op3
;
2442 if (s
->prefix
& PREFIX_DATA
)
2444 else if (s
->prefix
& PREFIX_REPZ
)
2446 else if (s
->prefix
& PREFIX_REPNZ
)
2450 sse_op2
= sse_op_table1
[b
][b1
];
2453 if (b
<= 0x5f || b
== 0xc6 || b
== 0xc2) {
2463 /* simple MMX/SSE operation */
2464 if (s
->flags
& HF_TS_MASK
) {
2465 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
2468 if (s
->flags
& HF_EM_MASK
) {
2470 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
2473 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
2480 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2481 the static cpu state) */
2486 modrm
= ldub_code(s
->pc
++);
2487 reg
= ((modrm
>> 3) & 7);
2490 mod
= (modrm
>> 6) & 3;
2491 if (sse_op2
== SSE_SPECIAL
) {
2494 case 0x0e7: /* movntq */
2497 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2498 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2500 case 0x1e7: /* movntdq */
2501 case 0x02b: /* movntps */
2502 case 0x12b: /* movntps */
2503 case 0x2f0: /* lddqu */
2506 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2507 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2509 case 0x6e: /* movd mm, ea */
2510 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2511 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2513 case 0x16e: /* movd xmm, ea */
2514 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2515 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2517 case 0x6f: /* movq mm, ea */
2519 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2520 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2523 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
].mmx
),
2524 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2527 case 0x010: /* movups */
2528 case 0x110: /* movupd */
2529 case 0x028: /* movaps */
2530 case 0x128: /* movapd */
2531 case 0x16f: /* movdqa xmm, ea */
2532 case 0x26f: /* movdqu xmm, ea */
2534 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2535 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2537 rm
= (modrm
& 7) | REX_B(s
);
2538 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
2539 offsetof(CPUX86State
,xmm_regs
[rm
]));
2542 case 0x210: /* movss xmm, ea */
2544 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2545 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
2546 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2548 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2549 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2550 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2552 rm
= (modrm
& 7) | REX_B(s
);
2553 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2554 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2557 case 0x310: /* movsd xmm, ea */
2559 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2560 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2562 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2563 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2565 rm
= (modrm
& 7) | REX_B(s
);
2566 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2567 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2570 case 0x012: /* movlps */
2571 case 0x112: /* movlpd */
2573 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2574 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2577 rm
= (modrm
& 7) | REX_B(s
);
2578 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2579 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2582 case 0x016: /* movhps */
2583 case 0x116: /* movhpd */
2585 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2586 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2589 rm
= (modrm
& 7) | REX_B(s
);
2590 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2591 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2594 case 0x216: /* movshdup */
2596 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2597 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2599 rm
= (modrm
& 7) | REX_B(s
);
2600 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2601 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
2602 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2603 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
2605 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2606 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2607 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2608 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2610 case 0x7e: /* movd ea, mm */
2611 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2612 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2614 case 0x17e: /* movd ea, xmm */
2615 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2616 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2618 case 0x27e: /* movq xmm, ea */
2620 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2621 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2623 rm
= (modrm
& 7) | REX_B(s
);
2624 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2625 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2627 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2629 case 0x7f: /* movq ea, mm */
2631 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2632 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2635 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
2636 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2639 case 0x011: /* movups */
2640 case 0x111: /* movupd */
2641 case 0x029: /* movaps */
2642 case 0x129: /* movapd */
2643 case 0x17f: /* movdqa ea, xmm */
2644 case 0x27f: /* movdqu ea, xmm */
2646 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2647 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2649 rm
= (modrm
& 7) | REX_B(s
);
2650 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
2651 offsetof(CPUX86State
,xmm_regs
[reg
]));
2654 case 0x211: /* movss ea, xmm */
2656 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2657 gen_op_movl_T0_env(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2658 gen_op_st_T0_A0
[OT_LONG
+ s
->mem_index
]();
2660 rm
= (modrm
& 7) | REX_B(s
);
2661 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
2662 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2665 case 0x311: /* movsd ea, xmm */
2667 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2668 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2670 rm
= (modrm
& 7) | REX_B(s
);
2671 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2672 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2675 case 0x013: /* movlps */
2676 case 0x113: /* movlpd */
2678 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2679 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2684 case 0x017: /* movhps */
2685 case 0x117: /* movhpd */
2687 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2688 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2693 case 0x71: /* shift mm, im */
2696 case 0x171: /* shift xmm, im */
2699 val
= ldub_code(s
->pc
++);
2701 gen_op_movl_T0_im(val
);
2702 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
2704 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
2705 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
2707 gen_op_movl_T0_im(val
);
2708 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
2710 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
2711 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
2713 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
2717 rm
= (modrm
& 7) | REX_B(s
);
2718 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2721 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2723 sse_op2(op2_offset
, op1_offset
);
2725 case 0x050: /* movmskps */
2726 gen_op_movmskps(offsetof(CPUX86State
,xmm_regs
[reg
]));
2727 rm
= (modrm
& 7) | REX_B(s
);
2728 gen_op_mov_reg_T0
[OT_LONG
][rm
]();
2730 case 0x150: /* movmskpd */
2731 gen_op_movmskpd(offsetof(CPUX86State
,xmm_regs
[reg
]));
2732 rm
= (modrm
& 7) | REX_B(s
);
2733 gen_op_mov_reg_T0
[OT_LONG
][rm
]();
2735 case 0x02a: /* cvtpi2ps */
2736 case 0x12a: /* cvtpi2pd */
2739 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2740 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
2741 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
2744 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2746 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2749 gen_op_cvtpi2ps(op1_offset
, op2_offset
);
2753 gen_op_cvtpi2pd(op1_offset
, op2_offset
);
2757 case 0x22a: /* cvtsi2ss */
2758 case 0x32a: /* cvtsi2sd */
2759 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
2760 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
2761 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2762 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)](op1_offset
);
2764 case 0x02c: /* cvttps2pi */
2765 case 0x12c: /* cvttpd2pi */
2766 case 0x02d: /* cvtps2pi */
2767 case 0x12d: /* cvtpd2pi */
2770 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2771 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
2772 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
2774 rm
= (modrm
& 7) | REX_B(s
);
2775 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2777 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
2780 gen_op_cvttps2pi(op1_offset
, op2_offset
);
2783 gen_op_cvttpd2pi(op1_offset
, op2_offset
);
2786 gen_op_cvtps2pi(op1_offset
, op2_offset
);
2789 gen_op_cvtpd2pi(op1_offset
, op2_offset
);
2793 case 0x22c: /* cvttss2si */
2794 case 0x32c: /* cvttsd2si */
2795 case 0x22d: /* cvtss2si */
2796 case 0x32d: /* cvtsd2si */
2797 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
2798 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2799 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
2800 (b
& 1) * 4](op1_offset
);
2801 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
2803 case 0xc4: /* pinsrw */
2805 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
2806 val
= ldub_code(s
->pc
++);
2809 gen_op_pinsrw_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]), val
);
2812 gen_op_pinsrw_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
), val
);
2815 case 0xc5: /* pextrw */
2819 val
= ldub_code(s
->pc
++);
2822 rm
= (modrm
& 7) | REX_B(s
);
2823 gen_op_pextrw_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]), val
);
2827 gen_op_pextrw_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
), val
);
2829 reg
= ((modrm
>> 3) & 7) | rex_r
;
2830 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
2832 case 0x1d6: /* movq ea, xmm */
2834 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2835 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2837 rm
= (modrm
& 7) | REX_B(s
);
2838 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2839 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2840 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2843 case 0x2d6: /* movq2dq */
2845 rm
= (modrm
& 7) | REX_B(s
);
2846 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2847 offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
));
2848 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2850 case 0x3d6: /* movdq2q */
2853 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
2854 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2856 case 0xd7: /* pmovmskb */
2861 rm
= (modrm
& 7) | REX_B(s
);
2862 gen_op_pmovmskb_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]));
2865 gen_op_pmovmskb_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2867 reg
= ((modrm
>> 3) & 7) | rex_r
;
2868 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
2874 /* generic MMX or SSE operation */
2876 /* maskmov : we must prepare A0 */
2879 #ifdef TARGET_X86_64
2881 gen_op_movq_A0_reg
[R_EDI
]();
2885 gen_op_movl_A0_reg
[R_EDI
]();
2887 gen_op_andl_A0_ffff();
2889 gen_add_A0_ds_seg(s
);
2892 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2894 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2895 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
2896 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f) ||
2898 /* specific case for SSE single instructions */
2901 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
2902 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
2905 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
2908 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
2911 rm
= (modrm
& 7) | REX_B(s
);
2912 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2915 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
2917 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2918 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
2919 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
2922 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2926 case 0x70: /* pshufx insn */
2927 case 0xc6: /* pshufx insn */
2928 val
= ldub_code(s
->pc
++);
2929 sse_op3
= (GenOpFunc3
*)sse_op2
;
2930 sse_op3(op1_offset
, op2_offset
, val
);
2934 val
= ldub_code(s
->pc
++);
2937 sse_op2
= sse_op_table4
[val
][b1
];
2938 sse_op2(op1_offset
, op2_offset
);
2941 sse_op2(op1_offset
, op2_offset
);
2944 if (b
== 0x2e || b
== 0x2f) {
2945 s
->cc_op
= CC_OP_EFLAGS
;
2951 /* convert one instruction. s->is_jmp is set if the translation must
2952 be stopped. Return the next pc value */
2953 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
2955 int b
, prefixes
, aflag
, dflag
;
2957 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
2958 target_ulong next_eip
, tval
;
2968 #ifdef TARGET_X86_64
2973 s
->rip_offset
= 0; /* for relative ip address */
2975 b
= ldub_code(s
->pc
);
2977 /* check prefixes */
2978 #ifdef TARGET_X86_64
2982 prefixes
|= PREFIX_REPZ
;
2985 prefixes
|= PREFIX_REPNZ
;
2988 prefixes
|= PREFIX_LOCK
;
3009 prefixes
|= PREFIX_DATA
;
3012 prefixes
|= PREFIX_ADR
;
3016 rex_w
= (b
>> 3) & 1;
3017 rex_r
= (b
& 0x4) << 1;
3018 s
->rex_x
= (b
& 0x2) << 2;
3019 REX_B(s
) = (b
& 0x1) << 3;
3020 x86_64_hregs
= 1; /* select uniform byte register addressing */
3024 /* 0x66 is ignored if rex.w is set */
3027 if (prefixes
& PREFIX_DATA
)
3030 if (!(prefixes
& PREFIX_ADR
))
3037 prefixes
|= PREFIX_REPZ
;
3040 prefixes
|= PREFIX_REPNZ
;
3043 prefixes
|= PREFIX_LOCK
;
3064 prefixes
|= PREFIX_DATA
;
3067 prefixes
|= PREFIX_ADR
;
3070 if (prefixes
& PREFIX_DATA
)
3072 if (prefixes
& PREFIX_ADR
)
3076 s
->prefix
= prefixes
;
3080 /* lock generation */
3081 if (prefixes
& PREFIX_LOCK
)
3084 /* now check op code */
3088 /**************************/
3089 /* extended op code */
3090 b
= ldub_code(s
->pc
++) | 0x100;
3093 /**************************/
3111 ot
= dflag
+ OT_WORD
;
3114 case 0: /* OP Ev, Gv */
3115 modrm
= ldub_code(s
->pc
++);
3116 reg
= ((modrm
>> 3) & 7) | rex_r
;
3117 mod
= (modrm
>> 6) & 3;
3118 rm
= (modrm
& 7) | REX_B(s
);
3120 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3122 } else if (op
== OP_XORL
&& rm
== reg
) {
3124 /* xor reg, reg optimisation */
3126 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3127 gen_op_mov_reg_T0
[ot
][reg
]();
3128 gen_op_update1_cc();
3133 gen_op_mov_TN_reg
[ot
][1][reg
]();
3134 gen_op(s
, op
, ot
, opreg
);
3136 case 1: /* OP Gv, Ev */
3137 modrm
= ldub_code(s
->pc
++);
3138 mod
= (modrm
>> 6) & 3;
3139 reg
= ((modrm
>> 3) & 7) | rex_r
;
3140 rm
= (modrm
& 7) | REX_B(s
);
3142 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3143 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3144 } else if (op
== OP_XORL
&& rm
== reg
) {
3147 gen_op_mov_TN_reg
[ot
][1][rm
]();
3149 gen_op(s
, op
, ot
, reg
);
3151 case 2: /* OP A, Iv */
3152 val
= insn_get(s
, ot
);
3153 gen_op_movl_T1_im(val
);
3154 gen_op(s
, op
, ot
, OR_EAX
);
3160 case 0x80: /* GRP1 */
3170 ot
= dflag
+ OT_WORD
;
3172 modrm
= ldub_code(s
->pc
++);
3173 mod
= (modrm
>> 6) & 3;
3174 rm
= (modrm
& 7) | REX_B(s
);
3175 op
= (modrm
>> 3) & 7;
3181 s
->rip_offset
= insn_const_size(ot
);
3182 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3193 val
= insn_get(s
, ot
);
3196 val
= (int8_t)insn_get(s
, OT_BYTE
);
3199 gen_op_movl_T1_im(val
);
3200 gen_op(s
, op
, ot
, opreg
);
3204 /**************************/
3205 /* inc, dec, and other misc arith */
3206 case 0x40 ... 0x47: /* inc Gv */
3207 ot
= dflag
? OT_LONG
: OT_WORD
;
3208 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
3210 case 0x48 ... 0x4f: /* dec Gv */
3211 ot
= dflag
? OT_LONG
: OT_WORD
;
3212 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
3214 case 0xf6: /* GRP3 */
3219 ot
= dflag
+ OT_WORD
;
3221 modrm
= ldub_code(s
->pc
++);
3222 mod
= (modrm
>> 6) & 3;
3223 rm
= (modrm
& 7) | REX_B(s
);
3224 op
= (modrm
>> 3) & 7;
3227 s
->rip_offset
= insn_const_size(ot
);
3228 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3229 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3231 gen_op_mov_TN_reg
[ot
][0][rm
]();
3236 val
= insn_get(s
, ot
);
3237 gen_op_movl_T1_im(val
);
3238 gen_op_testl_T0_T1_cc();
3239 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3244 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3246 gen_op_mov_reg_T0
[ot
][rm
]();
3252 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3254 gen_op_mov_reg_T0
[ot
][rm
]();
3256 gen_op_update_neg_cc();
3257 s
->cc_op
= CC_OP_SUBB
+ ot
;
3262 gen_op_mulb_AL_T0();
3263 s
->cc_op
= CC_OP_MULB
;
3266 gen_op_mulw_AX_T0();
3267 s
->cc_op
= CC_OP_MULW
;
3271 gen_op_mull_EAX_T0();
3272 s
->cc_op
= CC_OP_MULL
;
3274 #ifdef TARGET_X86_64
3276 gen_op_mulq_EAX_T0();
3277 s
->cc_op
= CC_OP_MULQ
;
3285 gen_op_imulb_AL_T0();
3286 s
->cc_op
= CC_OP_MULB
;
3289 gen_op_imulw_AX_T0();
3290 s
->cc_op
= CC_OP_MULW
;
3294 gen_op_imull_EAX_T0();
3295 s
->cc_op
= CC_OP_MULL
;
3297 #ifdef TARGET_X86_64
3299 gen_op_imulq_EAX_T0();
3300 s
->cc_op
= CC_OP_MULQ
;
3308 gen_jmp_im(pc_start
- s
->cs_base
);
3309 gen_op_divb_AL_T0();
3312 gen_jmp_im(pc_start
- s
->cs_base
);
3313 gen_op_divw_AX_T0();
3317 gen_jmp_im(pc_start
- s
->cs_base
);
3318 gen_op_divl_EAX_T0();
3320 #ifdef TARGET_X86_64
3322 gen_jmp_im(pc_start
- s
->cs_base
);
3323 gen_op_divq_EAX_T0();
3331 gen_jmp_im(pc_start
- s
->cs_base
);
3332 gen_op_idivb_AL_T0();
3335 gen_jmp_im(pc_start
- s
->cs_base
);
3336 gen_op_idivw_AX_T0();
3340 gen_jmp_im(pc_start
- s
->cs_base
);
3341 gen_op_idivl_EAX_T0();
3343 #ifdef TARGET_X86_64
3345 gen_jmp_im(pc_start
- s
->cs_base
);
3346 gen_op_idivq_EAX_T0();
3356 case 0xfe: /* GRP4 */
3357 case 0xff: /* GRP5 */
3361 ot
= dflag
+ OT_WORD
;
3363 modrm
= ldub_code(s
->pc
++);
3364 mod
= (modrm
>> 6) & 3;
3365 rm
= (modrm
& 7) | REX_B(s
);
3366 op
= (modrm
>> 3) & 7;
3367 if (op
>= 2 && b
== 0xfe) {
3371 if (op
>= 2 && op
<= 5) {
3372 /* operand size for jumps is 64 bit */
3374 } else if (op
== 6) {
3375 /* default push size is 64 bit */
3376 ot
= dflag
? OT_QUAD
: OT_WORD
;
3380 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3381 if (op
>= 2 && op
!= 3 && op
!= 5)
3382 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3384 gen_op_mov_TN_reg
[ot
][0][rm
]();
3388 case 0: /* inc Ev */
3393 gen_inc(s
, ot
, opreg
, 1);
3395 case 1: /* dec Ev */
3400 gen_inc(s
, ot
, opreg
, -1);
3402 case 2: /* call Ev */
3403 /* XXX: optimize if memory (no 'and' is necessary) */
3405 gen_op_andl_T0_ffff();
3406 next_eip
= s
->pc
- s
->cs_base
;
3407 gen_movtl_T1_im(next_eip
);
3412 case 3: /* lcall Ev */
3413 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3414 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
3415 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
3417 if (s
->pe
&& !s
->vm86
) {
3418 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3419 gen_op_set_cc_op(s
->cc_op
);
3420 gen_jmp_im(pc_start
- s
->cs_base
);
3421 gen_op_lcall_protected_T0_T1(dflag
, s
->pc
- s
->cs_base
);
3423 gen_op_lcall_real_T0_T1(dflag
, s
->pc
- s
->cs_base
);
3427 case 4: /* jmp Ev */
3429 gen_op_andl_T0_ffff();
3433 case 5: /* ljmp Ev */
3434 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3435 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
3436 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
3438 if (s
->pe
&& !s
->vm86
) {
3439 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3440 gen_op_set_cc_op(s
->cc_op
);
3441 gen_jmp_im(pc_start
- s
->cs_base
);
3442 gen_op_ljmp_protected_T0_T1(s
->pc
- s
->cs_base
);
3444 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
3445 gen_op_movl_T0_T1();
3450 case 6: /* push Ev */
3458 case 0x84: /* test Ev, Gv */
3463 ot
= dflag
+ OT_WORD
;
3465 modrm
= ldub_code(s
->pc
++);
3466 mod
= (modrm
>> 6) & 3;
3467 rm
= (modrm
& 7) | REX_B(s
);
3468 reg
= ((modrm
>> 3) & 7) | rex_r
;
3470 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3471 gen_op_mov_TN_reg
[ot
][1][reg
]();
3472 gen_op_testl_T0_T1_cc();
3473 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3476 case 0xa8: /* test eAX, Iv */
3481 ot
= dflag
+ OT_WORD
;
3482 val
= insn_get(s
, ot
);
3484 gen_op_mov_TN_reg
[ot
][0][OR_EAX
]();
3485 gen_op_movl_T1_im(val
);
3486 gen_op_testl_T0_T1_cc();
3487 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3490 case 0x98: /* CWDE/CBW */
3491 #ifdef TARGET_X86_64
3493 gen_op_movslq_RAX_EAX();
3497 gen_op_movswl_EAX_AX();
3499 gen_op_movsbw_AX_AL();
3501 case 0x99: /* CDQ/CWD */
3502 #ifdef TARGET_X86_64
3504 gen_op_movsqo_RDX_RAX();
3508 gen_op_movslq_EDX_EAX();
3510 gen_op_movswl_DX_AX();
3512 case 0x1af: /* imul Gv, Ev */
3513 case 0x69: /* imul Gv, Ev, I */
3515 ot
= dflag
+ OT_WORD
;
3516 modrm
= ldub_code(s
->pc
++);
3517 reg
= ((modrm
>> 3) & 7) | rex_r
;
3519 s
->rip_offset
= insn_const_size(ot
);
3522 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3524 val
= insn_get(s
, ot
);
3525 gen_op_movl_T1_im(val
);
3526 } else if (b
== 0x6b) {
3527 val
= (int8_t)insn_get(s
, OT_BYTE
);
3528 gen_op_movl_T1_im(val
);
3530 gen_op_mov_TN_reg
[ot
][1][reg
]();
3533 #ifdef TARGET_X86_64
3534 if (ot
== OT_QUAD
) {
3535 gen_op_imulq_T0_T1();
3538 if (ot
== OT_LONG
) {
3539 gen_op_imull_T0_T1();
3541 gen_op_imulw_T0_T1();
3543 gen_op_mov_reg_T0
[ot
][reg
]();
3544 s
->cc_op
= CC_OP_MULB
+ ot
;
3547 case 0x1c1: /* xadd Ev, Gv */
3551 ot
= dflag
+ OT_WORD
;
3552 modrm
= ldub_code(s
->pc
++);
3553 reg
= ((modrm
>> 3) & 7) | rex_r
;
3554 mod
= (modrm
>> 6) & 3;
3556 rm
= (modrm
& 7) | REX_B(s
);
3557 gen_op_mov_TN_reg
[ot
][0][reg
]();
3558 gen_op_mov_TN_reg
[ot
][1][rm
]();
3559 gen_op_addl_T0_T1();
3560 gen_op_mov_reg_T1
[ot
][reg
]();
3561 gen_op_mov_reg_T0
[ot
][rm
]();
3563 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3564 gen_op_mov_TN_reg
[ot
][0][reg
]();
3565 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3566 gen_op_addl_T0_T1();
3567 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3568 gen_op_mov_reg_T1
[ot
][reg
]();
3570 gen_op_update2_cc();
3571 s
->cc_op
= CC_OP_ADDB
+ ot
;
3574 case 0x1b1: /* cmpxchg Ev, Gv */
3578 ot
= dflag
+ OT_WORD
;
3579 modrm
= ldub_code(s
->pc
++);
3580 reg
= ((modrm
>> 3) & 7) | rex_r
;
3581 mod
= (modrm
>> 6) & 3;
3582 gen_op_mov_TN_reg
[ot
][1][reg
]();
3584 rm
= (modrm
& 7) | REX_B(s
);
3585 gen_op_mov_TN_reg
[ot
][0][rm
]();
3586 gen_op_cmpxchg_T0_T1_EAX_cc
[ot
]();
3587 gen_op_mov_reg_T0
[ot
][rm
]();
3589 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3590 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3591 gen_op_cmpxchg_mem_T0_T1_EAX_cc
[ot
+ s
->mem_index
]();
3593 s
->cc_op
= CC_OP_SUBB
+ ot
;
3595 case 0x1c7: /* cmpxchg8b */
3596 modrm
= ldub_code(s
->pc
++);
3597 mod
= (modrm
>> 6) & 3;
3600 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3601 gen_op_set_cc_op(s
->cc_op
);
3602 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3604 s
->cc_op
= CC_OP_EFLAGS
;
3607 /**************************/
3609 case 0x50 ... 0x57: /* push */
3610 gen_op_mov_TN_reg
[OT_LONG
][0][(b
& 7) | REX_B(s
)]();
3613 case 0x58 ... 0x5f: /* pop */
3615 ot
= dflag
? OT_QUAD
: OT_WORD
;
3617 ot
= dflag
+ OT_WORD
;
3620 /* NOTE: order is important for pop %sp */
3622 gen_op_mov_reg_T0
[ot
][(b
& 7) | REX_B(s
)]();
3624 case 0x60: /* pusha */
3629 case 0x61: /* popa */
3634 case 0x68: /* push Iv */
3637 ot
= dflag
? OT_QUAD
: OT_WORD
;
3639 ot
= dflag
+ OT_WORD
;
3642 val
= insn_get(s
, ot
);
3644 val
= (int8_t)insn_get(s
, OT_BYTE
);
3645 gen_op_movl_T0_im(val
);
3648 case 0x8f: /* pop Ev */
3650 ot
= dflag
? OT_QUAD
: OT_WORD
;
3652 ot
= dflag
+ OT_WORD
;
3654 modrm
= ldub_code(s
->pc
++);
3655 mod
= (modrm
>> 6) & 3;
3658 /* NOTE: order is important for pop %sp */
3660 rm
= (modrm
& 7) | REX_B(s
);
3661 gen_op_mov_reg_T0
[ot
][rm
]();
3663 /* NOTE: order is important too for MMU exceptions */
3664 s
->popl_esp_hack
= 1 << ot
;
3665 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3666 s
->popl_esp_hack
= 0;
3670 case 0xc8: /* enter */
3672 /* XXX: long mode support */
3674 val
= lduw_code(s
->pc
);
3676 level
= ldub_code(s
->pc
++);
3677 gen_enter(s
, val
, level
);
3680 case 0xc9: /* leave */
3681 /* XXX: exception not precise (ESP is updated before potential exception) */
3682 /* XXX: may be invalid for 16 bit in long mode */
3684 gen_op_mov_TN_reg
[OT_QUAD
][0][R_EBP
]();
3685 gen_op_mov_reg_T0
[OT_QUAD
][R_ESP
]();
3686 } else if (s
->ss32
) {
3687 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
3688 gen_op_mov_reg_T0
[OT_LONG
][R_ESP
]();
3690 gen_op_mov_TN_reg
[OT_WORD
][0][R_EBP
]();
3691 gen_op_mov_reg_T0
[OT_WORD
][R_ESP
]();
3695 ot
= dflag
? OT_QUAD
: OT_WORD
;
3697 ot
= dflag
+ OT_WORD
;
3699 gen_op_mov_reg_T0
[ot
][R_EBP
]();
3702 case 0x06: /* push es */
3703 case 0x0e: /* push cs */
3704 case 0x16: /* push ss */
3705 case 0x1e: /* push ds */
3708 gen_op_movl_T0_seg(b
>> 3);
3711 case 0x1a0: /* push fs */
3712 case 0x1a8: /* push gs */
3713 gen_op_movl_T0_seg((b
>> 3) & 7);
3716 case 0x07: /* pop es */
3717 case 0x17: /* pop ss */
3718 case 0x1f: /* pop ds */
3723 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
3726 /* if reg == SS, inhibit interrupts/trace. */
3727 /* If several instructions disable interrupts, only the
3729 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
3730 gen_op_set_inhibit_irq();
3734 gen_jmp_im(s
->pc
- s
->cs_base
);
3738 case 0x1a1: /* pop fs */
3739 case 0x1a9: /* pop gs */
3741 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
3744 gen_jmp_im(s
->pc
- s
->cs_base
);
3749 /**************************/
3752 case 0x89: /* mov Gv, Ev */
3756 ot
= dflag
+ OT_WORD
;
3757 modrm
= ldub_code(s
->pc
++);
3758 reg
= ((modrm
>> 3) & 7) | rex_r
;
3760 /* generate a generic store */
3761 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
3764 case 0xc7: /* mov Ev, Iv */
3768 ot
= dflag
+ OT_WORD
;
3769 modrm
= ldub_code(s
->pc
++);
3770 mod
= (modrm
>> 6) & 3;
3772 s
->rip_offset
= insn_const_size(ot
);
3773 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3775 val
= insn_get(s
, ot
);
3776 gen_op_movl_T0_im(val
);
3778 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3780 gen_op_mov_reg_T0
[ot
][(modrm
& 7) | REX_B(s
)]();
3783 case 0x8b: /* mov Ev, Gv */
3787 ot
= OT_WORD
+ dflag
;
3788 modrm
= ldub_code(s
->pc
++);
3789 reg
= ((modrm
>> 3) & 7) | rex_r
;
3791 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3792 gen_op_mov_reg_T0
[ot
][reg
]();
3794 case 0x8e: /* mov seg, Gv */
3795 modrm
= ldub_code(s
->pc
++);
3796 reg
= (modrm
>> 3) & 7;
3797 if (reg
>= 6 || reg
== R_CS
)
3799 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3800 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
3802 /* if reg == SS, inhibit interrupts/trace */
3803 /* If several instructions disable interrupts, only the
3805 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
3806 gen_op_set_inhibit_irq();
3810 gen_jmp_im(s
->pc
- s
->cs_base
);
3814 case 0x8c: /* mov Gv, seg */
3815 modrm
= ldub_code(s
->pc
++);
3816 reg
= (modrm
>> 3) & 7;
3817 mod
= (modrm
>> 6) & 3;
3820 gen_op_movl_T0_seg(reg
);
3822 ot
= OT_WORD
+ dflag
;
3825 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3828 case 0x1b6: /* movzbS Gv, Eb */
3829 case 0x1b7: /* movzwS Gv, Eb */
3830 case 0x1be: /* movsbS Gv, Eb */
3831 case 0x1bf: /* movswS Gv, Eb */
3834 /* d_ot is the size of destination */
3835 d_ot
= dflag
+ OT_WORD
;
3836 /* ot is the size of source */
3837 ot
= (b
& 1) + OT_BYTE
;
3838 modrm
= ldub_code(s
->pc
++);
3839 reg
= ((modrm
>> 3) & 7) | rex_r
;
3840 mod
= (modrm
>> 6) & 3;
3841 rm
= (modrm
& 7) | REX_B(s
);
3844 gen_op_mov_TN_reg
[ot
][0][rm
]();
3845 switch(ot
| (b
& 8)) {
3847 gen_op_movzbl_T0_T0();
3850 gen_op_movsbl_T0_T0();
3853 gen_op_movzwl_T0_T0();
3857 gen_op_movswl_T0_T0();
3860 gen_op_mov_reg_T0
[d_ot
][reg
]();
3862 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3864 gen_op_lds_T0_A0
[ot
+ s
->mem_index
]();
3866 gen_op_ldu_T0_A0
[ot
+ s
->mem_index
]();
3868 gen_op_mov_reg_T0
[d_ot
][reg
]();
3873 case 0x8d: /* lea */
3874 ot
= dflag
+ OT_WORD
;
3875 modrm
= ldub_code(s
->pc
++);
3876 mod
= (modrm
>> 6) & 3;
3879 reg
= ((modrm
>> 3) & 7) | rex_r
;
3880 /* we must ensure that no segment is added */
3884 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3886 gen_op_mov_reg_A0
[ot
- OT_WORD
][reg
]();
3889 case 0xa0: /* mov EAX, Ov */
3891 case 0xa2: /* mov Ov, EAX */
3894 target_ulong offset_addr
;
3899 ot
= dflag
+ OT_WORD
;
3900 #ifdef TARGET_X86_64
3902 offset_addr
= ldq_code(s
->pc
);
3904 if (offset_addr
== (int32_t)offset_addr
)
3905 gen_op_movq_A0_im(offset_addr
);
3907 gen_op_movq_A0_im64(offset_addr
>> 32, offset_addr
);
3912 offset_addr
= insn_get(s
, OT_LONG
);
3914 offset_addr
= insn_get(s
, OT_WORD
);
3916 gen_op_movl_A0_im(offset_addr
);
3918 gen_add_A0_ds_seg(s
);
3920 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3921 gen_op_mov_reg_T0
[ot
][R_EAX
]();
3923 gen_op_mov_TN_reg
[ot
][0][R_EAX
]();
3924 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3928 case 0xd7: /* xlat */
3929 #ifdef TARGET_X86_64
3931 gen_op_movq_A0_reg
[R_EBX
]();
3932 gen_op_addq_A0_AL();
3936 gen_op_movl_A0_reg
[R_EBX
]();
3937 gen_op_addl_A0_AL();
3939 gen_op_andl_A0_ffff();
3941 gen_add_A0_ds_seg(s
);
3942 gen_op_ldu_T0_A0
[OT_BYTE
+ s
->mem_index
]();
3943 gen_op_mov_reg_T0
[OT_BYTE
][R_EAX
]();
3945 case 0xb0 ... 0xb7: /* mov R, Ib */
3946 val
= insn_get(s
, OT_BYTE
);
3947 gen_op_movl_T0_im(val
);
3948 gen_op_mov_reg_T0
[OT_BYTE
][(b
& 7) | REX_B(s
)]();
3950 case 0xb8 ... 0xbf: /* mov R, Iv */
3951 #ifdef TARGET_X86_64
3955 tmp
= ldq_code(s
->pc
);
3957 reg
= (b
& 7) | REX_B(s
);
3958 gen_movtl_T0_im(tmp
);
3959 gen_op_mov_reg_T0
[OT_QUAD
][reg
]();
3963 ot
= dflag
? OT_LONG
: OT_WORD
;
3964 val
= insn_get(s
, ot
);
3965 reg
= (b
& 7) | REX_B(s
);
3966 gen_op_movl_T0_im(val
);
3967 gen_op_mov_reg_T0
[ot
][reg
]();
3971 case 0x91 ... 0x97: /* xchg R, EAX */
3972 ot
= dflag
+ OT_WORD
;
3973 reg
= (b
& 7) | REX_B(s
);
3977 case 0x87: /* xchg Ev, Gv */
3981 ot
= dflag
+ OT_WORD
;
3982 modrm
= ldub_code(s
->pc
++);
3983 reg
= ((modrm
>> 3) & 7) | rex_r
;
3984 mod
= (modrm
>> 6) & 3;
3986 rm
= (modrm
& 7) | REX_B(s
);
3988 gen_op_mov_TN_reg
[ot
][0][reg
]();
3989 gen_op_mov_TN_reg
[ot
][1][rm
]();
3990 gen_op_mov_reg_T0
[ot
][rm
]();
3991 gen_op_mov_reg_T1
[ot
][reg
]();
3993 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3994 gen_op_mov_TN_reg
[ot
][0][reg
]();
3995 /* for xchg, lock is implicit */
3996 if (!(prefixes
& PREFIX_LOCK
))
3998 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3999 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
4000 if (!(prefixes
& PREFIX_LOCK
))
4002 gen_op_mov_reg_T1
[ot
][reg
]();
4005 case 0xc4: /* les Gv */
4010 case 0xc5: /* lds Gv */
4015 case 0x1b2: /* lss Gv */
4018 case 0x1b4: /* lfs Gv */
4021 case 0x1b5: /* lgs Gv */
4024 ot
= dflag
? OT_LONG
: OT_WORD
;
4025 modrm
= ldub_code(s
->pc
++);
4026 reg
= ((modrm
>> 3) & 7) | rex_r
;
4027 mod
= (modrm
>> 6) & 3;
4030 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4031 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
4032 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
4033 /* load the segment first to handle exceptions properly */
4034 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
4035 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
4036 /* then put the data */
4037 gen_op_mov_reg_T1
[ot
][reg
]();
4039 gen_jmp_im(s
->pc
- s
->cs_base
);
4044 /************************/
4055 ot
= dflag
+ OT_WORD
;
4057 modrm
= ldub_code(s
->pc
++);
4058 mod
= (modrm
>> 6) & 3;
4059 op
= (modrm
>> 3) & 7;
4065 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4068 opreg
= (modrm
& 7) | REX_B(s
);
4073 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
4076 shift
= ldub_code(s
->pc
++);
4078 gen_shifti(s
, op
, ot
, opreg
, shift
);
4093 case 0x1a4: /* shld imm */
4097 case 0x1a5: /* shld cl */
4101 case 0x1ac: /* shrd imm */
4105 case 0x1ad: /* shrd cl */
4109 ot
= dflag
+ OT_WORD
;
4110 modrm
= ldub_code(s
->pc
++);
4111 mod
= (modrm
>> 6) & 3;
4112 rm
= (modrm
& 7) | REX_B(s
);
4113 reg
= ((modrm
>> 3) & 7) | rex_r
;
4116 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4117 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
4119 gen_op_mov_TN_reg
[ot
][0][rm
]();
4121 gen_op_mov_TN_reg
[ot
][1][reg
]();
4124 val
= ldub_code(s
->pc
++);
4131 gen_op_shiftd_T0_T1_im_cc
[ot
][op
](val
);
4133 gen_op_shiftd_mem_T0_T1_im_cc
[ot
+ s
->mem_index
][op
](val
);
4134 if (op
== 0 && ot
!= OT_WORD
)
4135 s
->cc_op
= CC_OP_SHLB
+ ot
;
4137 s
->cc_op
= CC_OP_SARB
+ ot
;
4140 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4141 gen_op_set_cc_op(s
->cc_op
);
4143 gen_op_shiftd_T0_T1_ECX_cc
[ot
][op
]();
4145 gen_op_shiftd_mem_T0_T1_ECX_cc
[ot
+ s
->mem_index
][op
]();
4146 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
4149 gen_op_mov_reg_T0
[ot
][rm
]();
4153 /************************/
4156 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
4157 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4158 /* XXX: what to do if illegal op ? */
4159 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
4162 modrm
= ldub_code(s
->pc
++);
4163 mod
= (modrm
>> 6) & 3;
4165 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
4168 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4170 case 0x00 ... 0x07: /* fxxxs */
4171 case 0x10 ... 0x17: /* fixxxl */
4172 case 0x20 ... 0x27: /* fxxxl */
4173 case 0x30 ... 0x37: /* fixxx */
4180 gen_op_flds_FT0_A0();
4183 gen_op_fildl_FT0_A0();
4186 gen_op_fldl_FT0_A0();
4190 gen_op_fild_FT0_A0();
4194 gen_op_fp_arith_ST0_FT0
[op1
]();
4196 /* fcomp needs pop */
4201 case 0x08: /* flds */
4202 case 0x0a: /* fsts */
4203 case 0x0b: /* fstps */
4204 case 0x18: /* fildl */
4205 case 0x1a: /* fistl */
4206 case 0x1b: /* fistpl */
4207 case 0x28: /* fldl */
4208 case 0x2a: /* fstl */
4209 case 0x2b: /* fstpl */
4210 case 0x38: /* filds */
4211 case 0x3a: /* fists */
4212 case 0x3b: /* fistps */
4218 gen_op_flds_ST0_A0();
4221 gen_op_fildl_ST0_A0();
4224 gen_op_fldl_ST0_A0();
4228 gen_op_fild_ST0_A0();
4235 gen_op_fsts_ST0_A0();
4238 gen_op_fistl_ST0_A0();
4241 gen_op_fstl_ST0_A0();
4245 gen_op_fist_ST0_A0();
4253 case 0x0c: /* fldenv mem */
4254 gen_op_fldenv_A0(s
->dflag
);
4256 case 0x0d: /* fldcw mem */
4259 case 0x0e: /* fnstenv mem */
4260 gen_op_fnstenv_A0(s
->dflag
);
4262 case 0x0f: /* fnstcw mem */
4265 case 0x1d: /* fldt mem */
4266 gen_op_fldt_ST0_A0();
4268 case 0x1f: /* fstpt mem */
4269 gen_op_fstt_ST0_A0();
4272 case 0x2c: /* frstor mem */
4273 gen_op_frstor_A0(s
->dflag
);
4275 case 0x2e: /* fnsave mem */
4276 gen_op_fnsave_A0(s
->dflag
);
4278 case 0x2f: /* fnstsw mem */
4281 case 0x3c: /* fbld */
4282 gen_op_fbld_ST0_A0();
4284 case 0x3e: /* fbstp */
4285 gen_op_fbst_ST0_A0();
4288 case 0x3d: /* fildll */
4289 gen_op_fildll_ST0_A0();
4291 case 0x3f: /* fistpll */
4292 gen_op_fistll_ST0_A0();
4299 /* register float ops */
4303 case 0x08: /* fld sti */
4305 gen_op_fmov_ST0_STN((opreg
+ 1) & 7);
4307 case 0x09: /* fxchg sti */
4308 case 0x29: /* fxchg4 sti, undocumented op */
4309 case 0x39: /* fxchg7 sti, undocumented op */
4310 gen_op_fxchg_ST0_STN(opreg
);
4312 case 0x0a: /* grp d9/2 */
4315 /* check exceptions (FreeBSD FPU probe) */
4316 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4317 gen_op_set_cc_op(s
->cc_op
);
4318 gen_jmp_im(pc_start
- s
->cs_base
);
4325 case 0x0c: /* grp d9/4 */
4335 gen_op_fcom_ST0_FT0();
4344 case 0x0d: /* grp d9/5 */
4353 gen_op_fldl2t_ST0();
4357 gen_op_fldl2e_ST0();
4365 gen_op_fldlg2_ST0();
4369 gen_op_fldln2_ST0();
4380 case 0x0e: /* grp d9/6 */
4391 case 3: /* fpatan */
4394 case 4: /* fxtract */
4397 case 5: /* fprem1 */
4400 case 6: /* fdecstp */
4404 case 7: /* fincstp */
4409 case 0x0f: /* grp d9/7 */
4414 case 1: /* fyl2xp1 */
4420 case 3: /* fsincos */
4423 case 5: /* fscale */
4426 case 4: /* frndint */
4438 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4439 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4440 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4446 gen_op_fp_arith_STN_ST0
[op1
](opreg
);
4450 gen_op_fmov_FT0_STN(opreg
);
4451 gen_op_fp_arith_ST0_FT0
[op1
]();
4455 case 0x02: /* fcom */
4456 case 0x22: /* fcom2, undocumented op */
4457 gen_op_fmov_FT0_STN(opreg
);
4458 gen_op_fcom_ST0_FT0();
4460 case 0x03: /* fcomp */
4461 case 0x23: /* fcomp3, undocumented op */
4462 case 0x32: /* fcomp5, undocumented op */
4463 gen_op_fmov_FT0_STN(opreg
);
4464 gen_op_fcom_ST0_FT0();
4467 case 0x15: /* da/5 */
4469 case 1: /* fucompp */
4470 gen_op_fmov_FT0_STN(1);
4471 gen_op_fucom_ST0_FT0();
4481 case 0: /* feni (287 only, just do nop here) */
4483 case 1: /* fdisi (287 only, just do nop here) */
4488 case 3: /* fninit */
4491 case 4: /* fsetpm (287 only, just do nop here) */
4497 case 0x1d: /* fucomi */
4498 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4499 gen_op_set_cc_op(s
->cc_op
);
4500 gen_op_fmov_FT0_STN(opreg
);
4501 gen_op_fucomi_ST0_FT0();
4502 s
->cc_op
= CC_OP_EFLAGS
;
4504 case 0x1e: /* fcomi */
4505 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4506 gen_op_set_cc_op(s
->cc_op
);
4507 gen_op_fmov_FT0_STN(opreg
);
4508 gen_op_fcomi_ST0_FT0();
4509 s
->cc_op
= CC_OP_EFLAGS
;
4511 case 0x28: /* ffree sti */
4512 gen_op_ffree_STN(opreg
);
4514 case 0x2a: /* fst sti */
4515 gen_op_fmov_STN_ST0(opreg
);
4517 case 0x2b: /* fstp sti */
4518 case 0x0b: /* fstp1 sti, undocumented op */
4519 case 0x3a: /* fstp8 sti, undocumented op */
4520 case 0x3b: /* fstp9 sti, undocumented op */
4521 gen_op_fmov_STN_ST0(opreg
);
4524 case 0x2c: /* fucom st(i) */
4525 gen_op_fmov_FT0_STN(opreg
);
4526 gen_op_fucom_ST0_FT0();
4528 case 0x2d: /* fucomp st(i) */
4529 gen_op_fmov_FT0_STN(opreg
);
4530 gen_op_fucom_ST0_FT0();
4533 case 0x33: /* de/3 */
4535 case 1: /* fcompp */
4536 gen_op_fmov_FT0_STN(1);
4537 gen_op_fcom_ST0_FT0();
4545 case 0x38: /* ffreep sti, undocumented op */
4546 gen_op_ffree_STN(opreg
);
4549 case 0x3c: /* df/4 */
4552 gen_op_fnstsw_EAX();
4558 case 0x3d: /* fucomip */
4559 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4560 gen_op_set_cc_op(s
->cc_op
);
4561 gen_op_fmov_FT0_STN(opreg
);
4562 gen_op_fucomi_ST0_FT0();
4564 s
->cc_op
= CC_OP_EFLAGS
;
4566 case 0x3e: /* fcomip */
4567 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4568 gen_op_set_cc_op(s
->cc_op
);
4569 gen_op_fmov_FT0_STN(opreg
);
4570 gen_op_fcomi_ST0_FT0();
4572 s
->cc_op
= CC_OP_EFLAGS
;
4574 case 0x10 ... 0x13: /* fcmovxx */
4578 const static uint8_t fcmov_cc
[8] = {
4584 op1
= fcmov_cc
[op
& 3] | ((op
>> 3) & 1);
4586 gen_op_fcmov_ST0_STN_T0(opreg
);
4593 #ifdef USE_CODE_COPY
4594 s
->tb
->cflags
|= CF_TB_FP_USED
;
4597 /************************/
4600 case 0xa4: /* movsS */
4605 ot
= dflag
+ OT_WORD
;
4607 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4608 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4614 case 0xaa: /* stosS */
4619 ot
= dflag
+ OT_WORD
;
4621 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4622 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4627 case 0xac: /* lodsS */
4632 ot
= dflag
+ OT_WORD
;
4633 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4634 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4639 case 0xae: /* scasS */
4644 ot
= dflag
+ OT_WORD
;
4645 if (prefixes
& PREFIX_REPNZ
) {
4646 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4647 } else if (prefixes
& PREFIX_REPZ
) {
4648 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4651 s
->cc_op
= CC_OP_SUBB
+ ot
;
4655 case 0xa6: /* cmpsS */
4660 ot
= dflag
+ OT_WORD
;
4661 if (prefixes
& PREFIX_REPNZ
) {
4662 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4663 } else if (prefixes
& PREFIX_REPZ
) {
4664 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4667 s
->cc_op
= CC_OP_SUBB
+ ot
;
4670 case 0x6c: /* insS */
4675 ot
= dflag
? OT_LONG
: OT_WORD
;
4676 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4677 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4678 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4683 case 0x6e: /* outsS */
4688 ot
= dflag
? OT_LONG
: OT_WORD
;
4689 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4690 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4691 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4697 /************************/
4704 ot
= dflag
? OT_LONG
: OT_WORD
;
4705 val
= ldub_code(s
->pc
++);
4706 gen_op_movl_T0_im(val
);
4707 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4709 gen_op_mov_reg_T1
[ot
][R_EAX
]();
4716 ot
= dflag
? OT_LONG
: OT_WORD
;
4717 val
= ldub_code(s
->pc
++);
4718 gen_op_movl_T0_im(val
);
4719 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4720 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
4728 ot
= dflag
? OT_LONG
: OT_WORD
;
4729 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
4730 gen_op_andl_T0_ffff();
4731 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4733 gen_op_mov_reg_T1
[ot
][R_EAX
]();
4740 ot
= dflag
? OT_LONG
: OT_WORD
;
4741 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
4742 gen_op_andl_T0_ffff();
4743 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4744 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
4748 /************************/
4750 case 0xc2: /* ret im */
4751 val
= ldsw_code(s
->pc
);
4754 gen_stack_update(s
, val
+ (2 << s
->dflag
));
4756 gen_op_andl_T0_ffff();
4760 case 0xc3: /* ret */
4764 gen_op_andl_T0_ffff();
4768 case 0xca: /* lret im */
4769 val
= ldsw_code(s
->pc
);
4772 if (s
->pe
&& !s
->vm86
) {
4773 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4774 gen_op_set_cc_op(s
->cc_op
);
4775 gen_jmp_im(pc_start
- s
->cs_base
);
4776 gen_op_lret_protected(s
->dflag
, val
);
4780 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
4782 gen_op_andl_T0_ffff();
4783 /* NOTE: keeping EIP updated is not a problem in case of
4787 gen_op_addl_A0_im(2 << s
->dflag
);
4788 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
4789 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
4790 /* add stack offset */
4791 gen_stack_update(s
, val
+ (4 << s
->dflag
));
4795 case 0xcb: /* lret */
4798 case 0xcf: /* iret */
4801 gen_op_iret_real(s
->dflag
);
4802 s
->cc_op
= CC_OP_EFLAGS
;
4803 } else if (s
->vm86
) {
4805 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4807 gen_op_iret_real(s
->dflag
);
4808 s
->cc_op
= CC_OP_EFLAGS
;
4811 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4812 gen_op_set_cc_op(s
->cc_op
);
4813 gen_jmp_im(pc_start
- s
->cs_base
);
4814 gen_op_iret_protected(s
->dflag
, s
->pc
- s
->cs_base
);
4815 s
->cc_op
= CC_OP_EFLAGS
;
4819 case 0xe8: /* call im */
4822 tval
= (int32_t)insn_get(s
, OT_LONG
);
4824 tval
= (int16_t)insn_get(s
, OT_WORD
);
4825 next_eip
= s
->pc
- s
->cs_base
;
4829 gen_movtl_T0_im(next_eip
);
4834 case 0x9a: /* lcall im */
4836 unsigned int selector
, offset
;
4840 ot
= dflag
? OT_LONG
: OT_WORD
;
4841 offset
= insn_get(s
, ot
);
4842 selector
= insn_get(s
, OT_WORD
);
4844 gen_op_movl_T0_im(selector
);
4845 gen_op_movl_T1_imu(offset
);
4848 case 0xe9: /* jmp */
4850 tval
= (int32_t)insn_get(s
, OT_LONG
);
4852 tval
= (int16_t)insn_get(s
, OT_WORD
);
4853 tval
+= s
->pc
- s
->cs_base
;
4858 case 0xea: /* ljmp im */
4860 unsigned int selector
, offset
;
4864 ot
= dflag
? OT_LONG
: OT_WORD
;
4865 offset
= insn_get(s
, ot
);
4866 selector
= insn_get(s
, OT_WORD
);
4868 gen_op_movl_T0_im(selector
);
4869 gen_op_movl_T1_imu(offset
);
4872 case 0xeb: /* jmp Jb */
4873 tval
= (int8_t)insn_get(s
, OT_BYTE
);
4874 tval
+= s
->pc
- s
->cs_base
;
4879 case 0x70 ... 0x7f: /* jcc Jb */
4880 tval
= (int8_t)insn_get(s
, OT_BYTE
);
4882 case 0x180 ... 0x18f: /* jcc Jv */
4884 tval
= (int32_t)insn_get(s
, OT_LONG
);
4886 tval
= (int16_t)insn_get(s
, OT_WORD
);
4889 next_eip
= s
->pc
- s
->cs_base
;
4893 gen_jcc(s
, b
, tval
, next_eip
);
4896 case 0x190 ... 0x19f: /* setcc Gv */
4897 modrm
= ldub_code(s
->pc
++);
4899 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
4901 case 0x140 ... 0x14f: /* cmov Gv, Ev */
4902 ot
= dflag
+ OT_WORD
;
4903 modrm
= ldub_code(s
->pc
++);
4904 reg
= ((modrm
>> 3) & 7) | rex_r
;
4905 mod
= (modrm
>> 6) & 3;
4908 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4909 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
4911 rm
= (modrm
& 7) | REX_B(s
);
4912 gen_op_mov_TN_reg
[ot
][1][rm
]();
4914 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
4917 /************************/
4919 case 0x9c: /* pushf */
4920 if (s
->vm86
&& s
->iopl
!= 3) {
4921 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4923 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4924 gen_op_set_cc_op(s
->cc_op
);
4925 gen_op_movl_T0_eflags();
4929 case 0x9d: /* popf */
4930 if (s
->vm86
&& s
->iopl
!= 3) {
4931 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4936 gen_op_movl_eflags_T0_cpl0();
4938 gen_op_movw_eflags_T0_cpl0();
4941 if (s
->cpl
<= s
->iopl
) {
4943 gen_op_movl_eflags_T0_io();
4945 gen_op_movw_eflags_T0_io();
4949 gen_op_movl_eflags_T0();
4951 gen_op_movw_eflags_T0();
4956 s
->cc_op
= CC_OP_EFLAGS
;
4957 /* abort translation because TF flag may change */
4958 gen_jmp_im(s
->pc
- s
->cs_base
);
4962 case 0x9e: /* sahf */
4965 gen_op_mov_TN_reg
[OT_BYTE
][0][R_AH
]();
4966 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4967 gen_op_set_cc_op(s
->cc_op
);
4968 gen_op_movb_eflags_T0();
4969 s
->cc_op
= CC_OP_EFLAGS
;
4971 case 0x9f: /* lahf */
4974 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4975 gen_op_set_cc_op(s
->cc_op
);
4976 gen_op_movl_T0_eflags();
4977 gen_op_mov_reg_T0
[OT_BYTE
][R_AH
]();
4979 case 0xf5: /* cmc */
4980 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4981 gen_op_set_cc_op(s
->cc_op
);
4983 s
->cc_op
= CC_OP_EFLAGS
;
4985 case 0xf8: /* clc */
4986 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4987 gen_op_set_cc_op(s
->cc_op
);
4989 s
->cc_op
= CC_OP_EFLAGS
;
4991 case 0xf9: /* stc */
4992 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4993 gen_op_set_cc_op(s
->cc_op
);
4995 s
->cc_op
= CC_OP_EFLAGS
;
4997 case 0xfc: /* cld */
5000 case 0xfd: /* std */
5004 /************************/
5005 /* bit operations */
5006 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5007 ot
= dflag
+ OT_WORD
;
5008 modrm
= ldub_code(s
->pc
++);
5009 op
= ((modrm
>> 3) & 7) | rex_r
;
5010 mod
= (modrm
>> 6) & 3;
5011 rm
= (modrm
& 7) | REX_B(s
);
5014 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5015 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
5017 gen_op_mov_TN_reg
[ot
][0][rm
]();
5020 val
= ldub_code(s
->pc
++);
5021 gen_op_movl_T1_im(val
);
5025 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5026 s
->cc_op
= CC_OP_SARB
+ ot
;
5029 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
5031 gen_op_mov_reg_T0
[ot
][rm
]();
5032 gen_op_update_bt_cc();
5035 case 0x1a3: /* bt Gv, Ev */
5038 case 0x1ab: /* bts */
5041 case 0x1b3: /* btr */
5044 case 0x1bb: /* btc */
5047 ot
= dflag
+ OT_WORD
;
5048 modrm
= ldub_code(s
->pc
++);
5049 reg
= ((modrm
>> 3) & 7) | rex_r
;
5050 mod
= (modrm
>> 6) & 3;
5051 rm
= (modrm
& 7) | REX_B(s
);
5052 gen_op_mov_TN_reg
[OT_LONG
][1][reg
]();
5054 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5055 /* specific case: we need to add a displacement */
5056 gen_op_add_bit_A0_T1
[ot
- OT_WORD
]();
5057 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
5059 gen_op_mov_TN_reg
[ot
][0][rm
]();
5061 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5062 s
->cc_op
= CC_OP_SARB
+ ot
;
5065 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
5067 gen_op_mov_reg_T0
[ot
][rm
]();
5068 gen_op_update_bt_cc();
5071 case 0x1bc: /* bsf */
5072 case 0x1bd: /* bsr */
5073 ot
= dflag
+ OT_WORD
;
5074 modrm
= ldub_code(s
->pc
++);
5075 reg
= ((modrm
>> 3) & 7) | rex_r
;
5076 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5077 /* NOTE: in order to handle the 0 case, we must load the
5078 result. It could be optimized with a generated jump */
5079 gen_op_mov_TN_reg
[ot
][1][reg
]();
5080 gen_op_bsx_T0_cc
[ot
- OT_WORD
][b
& 1]();
5081 gen_op_mov_reg_T1
[ot
][reg
]();
5082 s
->cc_op
= CC_OP_LOGICB
+ ot
;
5084 /************************/
5086 case 0x27: /* daa */
5089 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5090 gen_op_set_cc_op(s
->cc_op
);
5092 s
->cc_op
= CC_OP_EFLAGS
;
5094 case 0x2f: /* das */
5097 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5098 gen_op_set_cc_op(s
->cc_op
);
5100 s
->cc_op
= CC_OP_EFLAGS
;
5102 case 0x37: /* aaa */
5105 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5106 gen_op_set_cc_op(s
->cc_op
);
5108 s
->cc_op
= CC_OP_EFLAGS
;
5110 case 0x3f: /* aas */
5113 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5114 gen_op_set_cc_op(s
->cc_op
);
5116 s
->cc_op
= CC_OP_EFLAGS
;
5118 case 0xd4: /* aam */
5121 val
= ldub_code(s
->pc
++);
5123 s
->cc_op
= CC_OP_LOGICB
;
5125 case 0xd5: /* aad */
5128 val
= ldub_code(s
->pc
++);
5130 s
->cc_op
= CC_OP_LOGICB
;
5132 /************************/
5134 case 0x90: /* nop */
5135 /* XXX: xchg + rex handling */
5136 /* XXX: correct lock test for all insn */
5137 if (prefixes
& PREFIX_LOCK
)
5140 case 0x9b: /* fwait */
5141 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
5142 (HF_MP_MASK
| HF_TS_MASK
)) {
5143 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5145 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5146 gen_op_set_cc_op(s
->cc_op
);
5147 gen_jmp_im(pc_start
- s
->cs_base
);
5151 case 0xcc: /* int3 */
5152 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5154 case 0xcd: /* int N */
5155 val
= ldub_code(s
->pc
++);
5156 if (s
->vm86
&& s
->iopl
!= 3) {
5157 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5159 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5162 case 0xce: /* into */
5165 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5166 gen_op_set_cc_op(s
->cc_op
);
5167 gen_jmp_im(pc_start
- s
->cs_base
);
5168 gen_op_into(s
->pc
- pc_start
);
5170 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5171 gen_debug(s
, pc_start
- s
->cs_base
);
5173 case 0xfa: /* cli */
5175 if (s
->cpl
<= s
->iopl
) {
5178 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5184 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5188 case 0xfb: /* sti */
5190 if (s
->cpl
<= s
->iopl
) {
5193 /* interruptions are enabled only the first insn after sti */
5194 /* If several instructions disable interrupts, only the
5196 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5197 gen_op_set_inhibit_irq();
5198 /* give a chance to handle pending irqs */
5199 gen_jmp_im(s
->pc
- s
->cs_base
);
5202 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5208 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5212 case 0x62: /* bound */
5215 ot
= dflag
? OT_LONG
: OT_WORD
;
5216 modrm
= ldub_code(s
->pc
++);
5217 reg
= (modrm
>> 3) & 7;
5218 mod
= (modrm
>> 6) & 3;
5221 gen_op_mov_TN_reg
[ot
][0][reg
]();
5222 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5223 gen_jmp_im(pc_start
- s
->cs_base
);
5229 case 0x1c8 ... 0x1cf: /* bswap reg */
5230 reg
= (b
& 7) | REX_B(s
);
5231 #ifdef TARGET_X86_64
5233 gen_op_mov_TN_reg
[OT_QUAD
][0][reg
]();
5235 gen_op_mov_reg_T0
[OT_QUAD
][reg
]();
5239 gen_op_mov_TN_reg
[OT_LONG
][0][reg
]();
5241 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
5244 case 0xd6: /* salc */
5247 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5248 gen_op_set_cc_op(s
->cc_op
);
5251 case 0xe0: /* loopnz */
5252 case 0xe1: /* loopz */
5253 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5254 gen_op_set_cc_op(s
->cc_op
);
5256 case 0xe2: /* loop */
5257 case 0xe3: /* jecxz */
5261 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5262 next_eip
= s
->pc
- s
->cs_base
;
5267 l1
= gen_new_label();
5268 l2
= gen_new_label();
5271 gen_op_jz_ecx
[s
->aflag
](l1
);
5273 gen_op_dec_ECX
[s
->aflag
]();
5276 gen_op_loop
[s
->aflag
][b
](l1
);
5279 gen_jmp_im(next_eip
);
5280 gen_op_jmp_label(l2
);
5287 case 0x130: /* wrmsr */
5288 case 0x132: /* rdmsr */
5290 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5298 case 0x131: /* rdtsc */
5301 case 0x134: /* sysenter */
5305 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5307 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5308 gen_op_set_cc_op(s
->cc_op
);
5309 s
->cc_op
= CC_OP_DYNAMIC
;
5311 gen_jmp_im(pc_start
- s
->cs_base
);
5316 case 0x135: /* sysexit */
5320 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5322 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5323 gen_op_set_cc_op(s
->cc_op
);
5324 s
->cc_op
= CC_OP_DYNAMIC
;
5326 gen_jmp_im(pc_start
- s
->cs_base
);
5331 #ifdef TARGET_X86_64
5332 case 0x105: /* syscall */
5333 /* XXX: is it usable in real mode ? */
5334 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5335 gen_op_set_cc_op(s
->cc_op
);
5336 s
->cc_op
= CC_OP_DYNAMIC
;
5338 gen_jmp_im(pc_start
- s
->cs_base
);
5339 gen_op_syscall(s
->pc
- pc_start
);
5342 case 0x107: /* sysret */
5344 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5346 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5347 gen_op_set_cc_op(s
->cc_op
);
5348 s
->cc_op
= CC_OP_DYNAMIC
;
5350 gen_jmp_im(pc_start
- s
->cs_base
);
5351 gen_op_sysret(s
->dflag
);
5356 case 0x1a2: /* cpuid */
5359 case 0xf4: /* hlt */
5361 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5363 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5364 gen_op_set_cc_op(s
->cc_op
);
5365 gen_jmp_im(s
->pc
- s
->cs_base
);
5371 modrm
= ldub_code(s
->pc
++);
5372 mod
= (modrm
>> 6) & 3;
5373 op
= (modrm
>> 3) & 7;
5376 if (!s
->pe
|| s
->vm86
)
5378 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
5382 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5385 if (!s
->pe
|| s
->vm86
)
5388 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5390 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5391 gen_jmp_im(pc_start
- s
->cs_base
);
5396 if (!s
->pe
|| s
->vm86
)
5398 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
5402 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5405 if (!s
->pe
|| s
->vm86
)
5408 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5410 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5411 gen_jmp_im(pc_start
- s
->cs_base
);
5417 if (!s
->pe
|| s
->vm86
)
5419 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5420 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5421 gen_op_set_cc_op(s
->cc_op
);
5426 s
->cc_op
= CC_OP_EFLAGS
;
5433 modrm
= ldub_code(s
->pc
++);
5434 mod
= (modrm
>> 6) & 3;
5435 op
= (modrm
>> 3) & 7;
5441 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5443 gen_op_movl_T0_env(offsetof(CPUX86State
,gdt
.limit
));
5445 gen_op_movl_T0_env(offsetof(CPUX86State
,idt
.limit
));
5446 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
5447 #ifdef TARGET_X86_64
5449 gen_op_addq_A0_im(2);
5452 gen_op_addl_A0_im(2);
5454 gen_op_movtl_T0_env(offsetof(CPUX86State
,gdt
.base
));
5456 gen_op_movtl_T0_env(offsetof(CPUX86State
,idt
.base
));
5458 gen_op_andl_T0_im(0xffffff);
5459 gen_op_st_T0_A0
[CODE64(s
) + OT_LONG
+ s
->mem_index
]();
5466 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5468 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5469 gen_op_ld_T1_A0
[OT_WORD
+ s
->mem_index
]();
5470 #ifdef TARGET_X86_64
5472 gen_op_addq_A0_im(2);
5475 gen_op_addl_A0_im(2);
5476 gen_op_ld_T0_A0
[CODE64(s
) + OT_LONG
+ s
->mem_index
]();
5478 gen_op_andl_T0_im(0xffffff);
5480 gen_op_movtl_env_T0(offsetof(CPUX86State
,gdt
.base
));
5481 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
5483 gen_op_movtl_env_T0(offsetof(CPUX86State
,idt
.base
));
5484 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
5489 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
5490 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
5494 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5496 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5498 gen_jmp_im(s
->pc
- s
->cs_base
);
5502 case 7: /* invlpg */
5504 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5507 #ifdef TARGET_X86_64
5508 if (CODE64(s
) && (modrm
& 7) == 0) {
5510 gen_op_movtl_T0_env(offsetof(CPUX86State
,segs
[R_GS
].base
));
5511 gen_op_movtl_T1_env(offsetof(CPUX86State
,kernelgsbase
));
5512 gen_op_movtl_env_T1(offsetof(CPUX86State
,segs
[R_GS
].base
));
5513 gen_op_movtl_env_T0(offsetof(CPUX86State
,kernelgsbase
));
5520 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5522 gen_jmp_im(s
->pc
- s
->cs_base
);
5531 case 0x108: /* invd */
5532 case 0x109: /* wbinvd */
5534 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5539 case 0x63: /* arpl or movslS (x86_64) */
5540 #ifdef TARGET_X86_64
5543 /* d_ot is the size of destination */
5544 d_ot
= dflag
+ OT_WORD
;
5546 modrm
= ldub_code(s
->pc
++);
5547 reg
= ((modrm
>> 3) & 7) | rex_r
;
5548 mod
= (modrm
>> 6) & 3;
5549 rm
= (modrm
& 7) | REX_B(s
);
5552 gen_op_mov_TN_reg
[OT_LONG
][0][rm
]();
5554 if (d_ot
== OT_QUAD
)
5555 gen_op_movslq_T0_T0();
5556 gen_op_mov_reg_T0
[d_ot
][reg
]();
5558 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5559 if (d_ot
== OT_QUAD
) {
5560 gen_op_lds_T0_A0
[OT_LONG
+ s
->mem_index
]();
5562 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
5564 gen_op_mov_reg_T0
[d_ot
][reg
]();
5569 if (!s
->pe
|| s
->vm86
)
5571 ot
= dflag
? OT_LONG
: OT_WORD
;
5572 modrm
= ldub_code(s
->pc
++);
5573 reg
= (modrm
>> 3) & 7;
5574 mod
= (modrm
>> 6) & 3;
5577 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5578 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
5580 gen_op_mov_TN_reg
[ot
][0][rm
]();
5582 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5583 gen_op_set_cc_op(s
->cc_op
);
5585 s
->cc_op
= CC_OP_EFLAGS
;
5587 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
5589 gen_op_mov_reg_T0
[ot
][rm
]();
5591 gen_op_arpl_update();
5594 case 0x102: /* lar */
5595 case 0x103: /* lsl */
5596 if (!s
->pe
|| s
->vm86
)
5598 ot
= dflag
? OT_LONG
: OT_WORD
;
5599 modrm
= ldub_code(s
->pc
++);
5600 reg
= ((modrm
>> 3) & 7) | rex_r
;
5601 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5602 gen_op_mov_TN_reg
[ot
][1][reg
]();
5603 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5604 gen_op_set_cc_op(s
->cc_op
);
5609 s
->cc_op
= CC_OP_EFLAGS
;
5610 gen_op_mov_reg_T1
[ot
][reg
]();
5613 modrm
= ldub_code(s
->pc
++);
5614 mod
= (modrm
>> 6) & 3;
5615 op
= (modrm
>> 3) & 7;
5617 case 0: /* prefetchnta */
5618 case 1: /* prefetchnt0 */
5619 case 2: /* prefetchnt0 */
5620 case 3: /* prefetchnt0 */
5623 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5624 /* nothing more to do */
5630 case 0x120: /* mov reg, crN */
5631 case 0x122: /* mov crN, reg */
5633 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5635 modrm
= ldub_code(s
->pc
++);
5636 if ((modrm
& 0xc0) != 0xc0)
5638 rm
= (modrm
& 7) | REX_B(s
);
5639 reg
= ((modrm
>> 3) & 7) | rex_r
;
5651 gen_op_mov_TN_reg
[ot
][0][rm
]();
5652 gen_op_movl_crN_T0(reg
);
5653 gen_jmp_im(s
->pc
- s
->cs_base
);
5656 #if !defined(CONFIG_USER_ONLY)
5658 gen_op_movtl_T0_cr8();
5661 gen_op_movtl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
5662 gen_op_mov_reg_T0
[ot
][rm
]();
5670 case 0x121: /* mov reg, drN */
5671 case 0x123: /* mov drN, reg */
5673 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5675 modrm
= ldub_code(s
->pc
++);
5676 if ((modrm
& 0xc0) != 0xc0)
5678 rm
= (modrm
& 7) | REX_B(s
);
5679 reg
= ((modrm
>> 3) & 7) | rex_r
;
5684 /* XXX: do it dynamically with CR4.DE bit */
5685 if (reg
== 4 || reg
== 5 || reg
>= 8)
5688 gen_op_mov_TN_reg
[ot
][0][rm
]();
5689 gen_op_movl_drN_T0(reg
);
5690 gen_jmp_im(s
->pc
- s
->cs_base
);
5693 gen_op_movtl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
5694 gen_op_mov_reg_T0
[ot
][rm
]();
5698 case 0x106: /* clts */
5700 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5703 /* abort block because static cpu state changed */
5704 gen_jmp_im(s
->pc
- s
->cs_base
);
5708 /* MMX/SSE/SSE2/PNI support */
5709 case 0x1c3: /* MOVNTI reg, mem */
5710 if (!(s
->cpuid_features
& CPUID_SSE2
))
5712 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
5713 modrm
= ldub_code(s
->pc
++);
5714 mod
= (modrm
>> 6) & 3;
5717 reg
= ((modrm
>> 3) & 7) | rex_r
;
5718 /* generate a generic store */
5719 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
5722 modrm
= ldub_code(s
->pc
++);
5723 mod
= (modrm
>> 6) & 3;
5724 op
= (modrm
>> 3) & 7;
5726 case 0: /* fxsave */
5727 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
))
5729 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5730 gen_op_fxsave_A0((s
->dflag
== 2));
5732 case 1: /* fxrstor */
5733 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
))
5735 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5736 gen_op_fxrstor_A0((s
->dflag
== 2));
5738 case 2: /* ldmxcsr */
5739 case 3: /* stmxcsr */
5740 if (s
->flags
& HF_TS_MASK
) {
5741 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5744 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
5747 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5749 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
5750 gen_op_movl_env_T0(offsetof(CPUX86State
, mxcsr
));
5752 gen_op_movl_T0_env(offsetof(CPUX86State
, mxcsr
));
5753 gen_op_st_T0_A0
[OT_LONG
+ s
->mem_index
]();
5756 case 5: /* lfence */
5757 case 6: /* mfence */
5758 case 7: /* sfence */
5759 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
5766 case 0x110 ... 0x117:
5767 case 0x128 ... 0x12f:
5768 case 0x150 ... 0x177:
5769 case 0x17c ... 0x17f:
5771 case 0x1c4 ... 0x1c6:
5772 case 0x1d0 ... 0x1fe:
5773 gen_sse(s
, b
, pc_start
, rex_r
);
5778 /* lock generation */
5779 if (s
->prefix
& PREFIX_LOCK
)
5783 if (s
->prefix
& PREFIX_LOCK
)
5785 /* XXX: ensure that no lock was generated */
5786 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
5790 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
5791 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
5793 /* flags read by an operation */
5794 static uint16_t opc_read_flags
[NB_OPS
] = {
5795 [INDEX_op_aas
] = CC_A
,
5796 [INDEX_op_aaa
] = CC_A
,
5797 [INDEX_op_das
] = CC_A
| CC_C
,
5798 [INDEX_op_daa
] = CC_A
| CC_C
,
5800 /* subtle: due to the incl/decl implementation, C is used */
5801 [INDEX_op_update_inc_cc
] = CC_C
,
5803 [INDEX_op_into
] = CC_O
,
5805 [INDEX_op_jb_subb
] = CC_C
,
5806 [INDEX_op_jb_subw
] = CC_C
,
5807 [INDEX_op_jb_subl
] = CC_C
,
5809 [INDEX_op_jz_subb
] = CC_Z
,
5810 [INDEX_op_jz_subw
] = CC_Z
,
5811 [INDEX_op_jz_subl
] = CC_Z
,
5813 [INDEX_op_jbe_subb
] = CC_Z
| CC_C
,
5814 [INDEX_op_jbe_subw
] = CC_Z
| CC_C
,
5815 [INDEX_op_jbe_subl
] = CC_Z
| CC_C
,
5817 [INDEX_op_js_subb
] = CC_S
,
5818 [INDEX_op_js_subw
] = CC_S
,
5819 [INDEX_op_js_subl
] = CC_S
,
5821 [INDEX_op_jl_subb
] = CC_O
| CC_S
,
5822 [INDEX_op_jl_subw
] = CC_O
| CC_S
,
5823 [INDEX_op_jl_subl
] = CC_O
| CC_S
,
5825 [INDEX_op_jle_subb
] = CC_O
| CC_S
| CC_Z
,
5826 [INDEX_op_jle_subw
] = CC_O
| CC_S
| CC_Z
,
5827 [INDEX_op_jle_subl
] = CC_O
| CC_S
| CC_Z
,
5829 [INDEX_op_loopnzw
] = CC_Z
,
5830 [INDEX_op_loopnzl
] = CC_Z
,
5831 [INDEX_op_loopzw
] = CC_Z
,
5832 [INDEX_op_loopzl
] = CC_Z
,
5834 [INDEX_op_seto_T0_cc
] = CC_O
,
5835 [INDEX_op_setb_T0_cc
] = CC_C
,
5836 [INDEX_op_setz_T0_cc
] = CC_Z
,
5837 [INDEX_op_setbe_T0_cc
] = CC_Z
| CC_C
,
5838 [INDEX_op_sets_T0_cc
] = CC_S
,
5839 [INDEX_op_setp_T0_cc
] = CC_P
,
5840 [INDEX_op_setl_T0_cc
] = CC_O
| CC_S
,
5841 [INDEX_op_setle_T0_cc
] = CC_O
| CC_S
| CC_Z
,
5843 [INDEX_op_setb_T0_subb
] = CC_C
,
5844 [INDEX_op_setb_T0_subw
] = CC_C
,
5845 [INDEX_op_setb_T0_subl
] = CC_C
,
5847 [INDEX_op_setz_T0_subb
] = CC_Z
,
5848 [INDEX_op_setz_T0_subw
] = CC_Z
,
5849 [INDEX_op_setz_T0_subl
] = CC_Z
,
5851 [INDEX_op_setbe_T0_subb
] = CC_Z
| CC_C
,
5852 [INDEX_op_setbe_T0_subw
] = CC_Z
| CC_C
,
5853 [INDEX_op_setbe_T0_subl
] = CC_Z
| CC_C
,
5855 [INDEX_op_sets_T0_subb
] = CC_S
,
5856 [INDEX_op_sets_T0_subw
] = CC_S
,
5857 [INDEX_op_sets_T0_subl
] = CC_S
,
5859 [INDEX_op_setl_T0_subb
] = CC_O
| CC_S
,
5860 [INDEX_op_setl_T0_subw
] = CC_O
| CC_S
,
5861 [INDEX_op_setl_T0_subl
] = CC_O
| CC_S
,
5863 [INDEX_op_setle_T0_subb
] = CC_O
| CC_S
| CC_Z
,
5864 [INDEX_op_setle_T0_subw
] = CC_O
| CC_S
| CC_Z
,
5865 [INDEX_op_setle_T0_subl
] = CC_O
| CC_S
| CC_Z
,
5867 [INDEX_op_movl_T0_eflags
] = CC_OSZAPC
,
5868 [INDEX_op_cmc
] = CC_C
,
5869 [INDEX_op_salc
] = CC_C
,
5871 /* needed for correct flag optimisation before string ops */
5872 [INDEX_op_jnz_ecxw
] = CC_OSZAPC
,
5873 [INDEX_op_jnz_ecxl
] = CC_OSZAPC
,
5874 [INDEX_op_jz_ecxw
] = CC_OSZAPC
,
5875 [INDEX_op_jz_ecxl
] = CC_OSZAPC
,
5877 #ifdef TARGET_X86_64
5878 [INDEX_op_jb_subq
] = CC_C
,
5879 [INDEX_op_jz_subq
] = CC_Z
,
5880 [INDEX_op_jbe_subq
] = CC_Z
| CC_C
,
5881 [INDEX_op_js_subq
] = CC_S
,
5882 [INDEX_op_jl_subq
] = CC_O
| CC_S
,
5883 [INDEX_op_jle_subq
] = CC_O
| CC_S
| CC_Z
,
5885 [INDEX_op_loopnzq
] = CC_Z
,
5886 [INDEX_op_loopzq
] = CC_Z
,
5888 [INDEX_op_setb_T0_subq
] = CC_C
,
5889 [INDEX_op_setz_T0_subq
] = CC_Z
,
5890 [INDEX_op_setbe_T0_subq
] = CC_Z
| CC_C
,
5891 [INDEX_op_sets_T0_subq
] = CC_S
,
5892 [INDEX_op_setl_T0_subq
] = CC_O
| CC_S
,
5893 [INDEX_op_setle_T0_subq
] = CC_O
| CC_S
| CC_Z
,
5895 [INDEX_op_jnz_ecxq
] = CC_OSZAPC
,
5896 [INDEX_op_jz_ecxq
] = CC_OSZAPC
,
5899 #define DEF_READF(SUFFIX)\
5900 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5901 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5902 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5903 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5904 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5905 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5906 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5907 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5909 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5910 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5911 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
5912 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5913 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5914 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5915 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5916 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
5920 #ifndef CONFIG_USER_ONLY
5926 /* flags written by an operation */
5927 static uint16_t opc_write_flags
[NB_OPS
] = {
5928 [INDEX_op_update2_cc
] = CC_OSZAPC
,
5929 [INDEX_op_update1_cc
] = CC_OSZAPC
,
5930 [INDEX_op_cmpl_T0_T1_cc
] = CC_OSZAPC
,
5931 [INDEX_op_update_neg_cc
] = CC_OSZAPC
,
5932 /* subtle: due to the incl/decl implementation, C is used */
5933 [INDEX_op_update_inc_cc
] = CC_OSZAPC
,
5934 [INDEX_op_testl_T0_T1_cc
] = CC_OSZAPC
,
5936 [INDEX_op_mulb_AL_T0
] = CC_OSZAPC
,
5937 [INDEX_op_mulw_AX_T0
] = CC_OSZAPC
,
5938 [INDEX_op_mull_EAX_T0
] = CC_OSZAPC
,
5939 X86_64_DEF([INDEX_op_mulq_EAX_T0
] = CC_OSZAPC
,)
5940 [INDEX_op_imulb_AL_T0
] = CC_OSZAPC
,
5941 [INDEX_op_imulw_AX_T0
] = CC_OSZAPC
,
5942 [INDEX_op_imull_EAX_T0
] = CC_OSZAPC
,
5943 X86_64_DEF([INDEX_op_imulq_EAX_T0
] = CC_OSZAPC
,)
5944 [INDEX_op_imulw_T0_T1
] = CC_OSZAPC
,
5945 [INDEX_op_imull_T0_T1
] = CC_OSZAPC
,
5946 X86_64_DEF([INDEX_op_imulq_T0_T1
] = CC_OSZAPC
,)
5949 [INDEX_op_ucomiss
] = CC_OSZAPC
,
5950 [INDEX_op_ucomisd
] = CC_OSZAPC
,
5951 [INDEX_op_comiss
] = CC_OSZAPC
,
5952 [INDEX_op_comisd
] = CC_OSZAPC
,
5955 [INDEX_op_aam
] = CC_OSZAPC
,
5956 [INDEX_op_aad
] = CC_OSZAPC
,
5957 [INDEX_op_aas
] = CC_OSZAPC
,
5958 [INDEX_op_aaa
] = CC_OSZAPC
,
5959 [INDEX_op_das
] = CC_OSZAPC
,
5960 [INDEX_op_daa
] = CC_OSZAPC
,
5962 [INDEX_op_movb_eflags_T0
] = CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
,
5963 [INDEX_op_movw_eflags_T0
] = CC_OSZAPC
,
5964 [INDEX_op_movl_eflags_T0
] = CC_OSZAPC
,
5965 [INDEX_op_movw_eflags_T0_io
] = CC_OSZAPC
,
5966 [INDEX_op_movl_eflags_T0_io
] = CC_OSZAPC
,
5967 [INDEX_op_movw_eflags_T0_cpl0
] = CC_OSZAPC
,
5968 [INDEX_op_movl_eflags_T0_cpl0
] = CC_OSZAPC
,
5969 [INDEX_op_clc
] = CC_C
,
5970 [INDEX_op_stc
] = CC_C
,
5971 [INDEX_op_cmc
] = CC_C
,
5973 [INDEX_op_btw_T0_T1_cc
] = CC_OSZAPC
,
5974 [INDEX_op_btl_T0_T1_cc
] = CC_OSZAPC
,
5975 X86_64_DEF([INDEX_op_btq_T0_T1_cc
] = CC_OSZAPC
,)
5976 [INDEX_op_btsw_T0_T1_cc
] = CC_OSZAPC
,
5977 [INDEX_op_btsl_T0_T1_cc
] = CC_OSZAPC
,
5978 X86_64_DEF([INDEX_op_btsq_T0_T1_cc
] = CC_OSZAPC
,)
5979 [INDEX_op_btrw_T0_T1_cc
] = CC_OSZAPC
,
5980 [INDEX_op_btrl_T0_T1_cc
] = CC_OSZAPC
,
5981 X86_64_DEF([INDEX_op_btrq_T0_T1_cc
] = CC_OSZAPC
,)
5982 [INDEX_op_btcw_T0_T1_cc
] = CC_OSZAPC
,
5983 [INDEX_op_btcl_T0_T1_cc
] = CC_OSZAPC
,
5984 X86_64_DEF([INDEX_op_btcq_T0_T1_cc
] = CC_OSZAPC
,)
5986 [INDEX_op_bsfw_T0_cc
] = CC_OSZAPC
,
5987 [INDEX_op_bsfl_T0_cc
] = CC_OSZAPC
,
5988 X86_64_DEF([INDEX_op_bsfq_T0_cc
] = CC_OSZAPC
,)
5989 [INDEX_op_bsrw_T0_cc
] = CC_OSZAPC
,
5990 [INDEX_op_bsrl_T0_cc
] = CC_OSZAPC
,
5991 X86_64_DEF([INDEX_op_bsrq_T0_cc
] = CC_OSZAPC
,)
5993 [INDEX_op_cmpxchgb_T0_T1_EAX_cc
] = CC_OSZAPC
,
5994 [INDEX_op_cmpxchgw_T0_T1_EAX_cc
] = CC_OSZAPC
,
5995 [INDEX_op_cmpxchgl_T0_T1_EAX_cc
] = CC_OSZAPC
,
5996 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc
] = CC_OSZAPC
,)
5998 [INDEX_op_cmpxchg8b
] = CC_Z
,
5999 [INDEX_op_lar
] = CC_Z
,
6000 [INDEX_op_lsl
] = CC_Z
,
6001 [INDEX_op_fcomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6002 [INDEX_op_fucomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6004 #define DEF_WRITEF(SUFFIX)\
6005 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6006 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6007 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6008 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6009 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6010 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6011 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6012 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6014 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6015 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6016 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6017 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6018 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6019 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6020 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6021 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6023 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6024 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6025 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6026 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6027 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6028 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6029 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6030 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6032 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6033 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6034 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6035 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6037 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6038 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6039 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6040 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6042 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6043 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6044 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6045 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6047 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6048 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6049 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6050 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6051 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6052 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6054 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6055 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6056 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6057 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6058 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6059 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6061 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6062 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6063 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6064 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6069 #ifndef CONFIG_USER_ONLY
6075 /* simpler form of an operation if no flags need to be generated */
6076 static uint16_t opc_simpler
[NB_OPS
] = {
6077 [INDEX_op_update2_cc
] = INDEX_op_nop
,
6078 [INDEX_op_update1_cc
] = INDEX_op_nop
,
6079 [INDEX_op_update_neg_cc
] = INDEX_op_nop
,
6081 /* broken: CC_OP logic must be rewritten */
6082 [INDEX_op_update_inc_cc
] = INDEX_op_nop
,
6085 [INDEX_op_shlb_T0_T1_cc
] = INDEX_op_shlb_T0_T1
,
6086 [INDEX_op_shlw_T0_T1_cc
] = INDEX_op_shlw_T0_T1
,
6087 [INDEX_op_shll_T0_T1_cc
] = INDEX_op_shll_T0_T1
,
6088 X86_64_DEF([INDEX_op_shlq_T0_T1_cc
] = INDEX_op_shlq_T0_T1
,)
6090 [INDEX_op_shrb_T0_T1_cc
] = INDEX_op_shrb_T0_T1
,
6091 [INDEX_op_shrw_T0_T1_cc
] = INDEX_op_shrw_T0_T1
,
6092 [INDEX_op_shrl_T0_T1_cc
] = INDEX_op_shrl_T0_T1
,
6093 X86_64_DEF([INDEX_op_shrq_T0_T1_cc
] = INDEX_op_shrq_T0_T1
,)
6095 [INDEX_op_sarb_T0_T1_cc
] = INDEX_op_sarb_T0_T1
,
6096 [INDEX_op_sarw_T0_T1_cc
] = INDEX_op_sarw_T0_T1
,
6097 [INDEX_op_sarl_T0_T1_cc
] = INDEX_op_sarl_T0_T1
,
6098 X86_64_DEF([INDEX_op_sarq_T0_T1_cc
] = INDEX_op_sarq_T0_T1
,)
6100 #define DEF_SIMPLER(SUFFIX)\
6101 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6102 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6103 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6104 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6106 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6107 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6108 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6109 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6113 #ifndef CONFIG_USER_ONLY
6114 DEF_SIMPLER(_kernel
)
6119 void optimize_flags_init(void)
6122 /* put default values in arrays */
6123 for(i
= 0; i
< NB_OPS
; i
++) {
6124 if (opc_simpler
[i
] == 0)
6129 /* CPU flags computation optimization: we move backward thru the
6130 generated code to see which flags are needed. The operation is
6131 modified if suitable */
6132 static void optimize_flags(uint16_t *opc_buf
, int opc_buf_len
)
6135 int live_flags
, write_flags
, op
;
6137 opc_ptr
= opc_buf
+ opc_buf_len
;
6138 /* live_flags contains the flags needed by the next instructions
6139 in the code. At the end of the bloc, we consider that all the
6141 live_flags
= CC_OSZAPC
;
6142 while (opc_ptr
> opc_buf
) {
6144 /* if none of the flags written by the instruction is used,
6145 then we can try to find a simpler instruction */
6146 write_flags
= opc_write_flags
[op
];
6147 if ((live_flags
& write_flags
) == 0) {
6148 *opc_ptr
= opc_simpler
[op
];
6150 /* compute the live flags before the instruction */
6151 live_flags
&= ~write_flags
;
6152 live_flags
|= opc_read_flags
[op
];
6156 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6157 basic block 'tb'. If search_pc is TRUE, also generate PC
6158 information for each intermediate instruction. */
6159 static inline int gen_intermediate_code_internal(CPUState
*env
,
6160 TranslationBlock
*tb
,
6163 DisasContext dc1
, *dc
= &dc1
;
6164 target_ulong pc_ptr
;
6165 uint16_t *gen_opc_end
;
6166 int flags
, j
, lj
, cflags
;
6167 target_ulong pc_start
;
6168 target_ulong cs_base
;
6170 /* generate intermediate code */
6172 cs_base
= tb
->cs_base
;
6174 cflags
= tb
->cflags
;
6176 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
6177 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
6178 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
6179 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
6181 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
6182 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
6183 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
6184 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
6185 dc
->singlestep_enabled
= env
->singlestep_enabled
;
6186 dc
->cc_op
= CC_OP_DYNAMIC
;
6187 dc
->cs_base
= cs_base
;
6189 dc
->popl_esp_hack
= 0;
6190 /* select memory access functions */
6192 if (flags
& HF_SOFTMMU_MASK
) {
6194 dc
->mem_index
= 2 * 4;
6196 dc
->mem_index
= 1 * 4;
6198 dc
->cpuid_features
= env
->cpuid_features
;
6199 #ifdef TARGET_X86_64
6200 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
6201 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
6204 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
6205 (flags
& HF_INHIBIT_IRQ_MASK
)
6206 #ifndef CONFIG_SOFTMMU
6207 || (flags
& HF_SOFTMMU_MASK
)
6211 /* check addseg logic */
6212 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
6213 printf("ERROR addseg\n");
6216 gen_opc_ptr
= gen_opc_buf
;
6217 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
6218 gen_opparam_ptr
= gen_opparam_buf
;
6221 dc
->is_jmp
= DISAS_NEXT
;
6226 if (env
->nb_breakpoints
> 0) {
6227 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
6228 if (env
->breakpoints
[j
] == pc_ptr
) {
6229 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
6235 j
= gen_opc_ptr
- gen_opc_buf
;
6239 gen_opc_instr_start
[lj
++] = 0;
6241 gen_opc_pc
[lj
] = pc_ptr
;
6242 gen_opc_cc_op
[lj
] = dc
->cc_op
;
6243 gen_opc_instr_start
[lj
] = 1;
6245 pc_ptr
= disas_insn(dc
, pc_ptr
);
6246 /* stop translation if indicated */
6249 /* if single step mode, we generate only one instruction and
6250 generate an exception */
6251 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6252 the flag and abort the translation to give the irqs a
6253 change to be happen */
6254 if (dc
->tf
|| dc
->singlestep_enabled
||
6255 (flags
& HF_INHIBIT_IRQ_MASK
) ||
6256 (cflags
& CF_SINGLE_INSN
)) {
6257 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6261 /* if too long translation, stop generation too */
6262 if (gen_opc_ptr
>= gen_opc_end
||
6263 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
6264 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6269 *gen_opc_ptr
= INDEX_op_end
;
6270 /* we don't forget to fill the last values */
6272 j
= gen_opc_ptr
- gen_opc_buf
;
6275 gen_opc_instr_start
[lj
++] = 0;
6279 if (loglevel
& CPU_LOG_TB_CPU
) {
6280 cpu_dump_state(env
, logfile
, fprintf
, X86_DUMP_CCOP
);
6282 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
6284 fprintf(logfile
, "----------------\n");
6285 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
6286 #ifdef TARGET_X86_64
6291 disas_flags
= !dc
->code32
;
6292 target_disas(logfile
, pc_start
, pc_ptr
- pc_start
, disas_flags
);
6293 fprintf(logfile
, "\n");
6294 if (loglevel
& CPU_LOG_TB_OP
) {
6295 fprintf(logfile
, "OP:\n");
6296 dump_ops(gen_opc_buf
, gen_opparam_buf
);
6297 fprintf(logfile
, "\n");
6302 /* optimize flag computations */
6303 optimize_flags(gen_opc_buf
, gen_opc_ptr
- gen_opc_buf
);
6306 if (loglevel
& CPU_LOG_TB_OP_OPT
) {
6307 fprintf(logfile
, "AFTER FLAGS OPT:\n");
6308 dump_ops(gen_opc_buf
, gen_opparam_buf
);
6309 fprintf(logfile
, "\n");
6313 tb
->size
= pc_ptr
- pc_start
;
6317 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
6319 return gen_intermediate_code_internal(env
, tb
, 0);
6322 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
6324 return gen_intermediate_code_internal(env
, tb
, 1);