4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 /* XXX: move that elsewhere */
33 static uint16_t *gen_opc_ptr
;
34 static uint32_t *gen_opparam_ptr
;
36 #define PREFIX_REPZ 0x01
37 #define PREFIX_REPNZ 0x02
38 #define PREFIX_LOCK 0x04
39 #define PREFIX_DATA 0x08
40 #define PREFIX_ADR 0x10
43 #define X86_64_ONLY(x) x
44 #define X86_64_DEF(x...) x
45 #define CODE64(s) ((s)->code64)
46 #define REX_X(s) ((s)->rex_x)
47 #define REX_B(s) ((s)->rex_b)
48 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
50 #define BUGGY_64(x) NULL
53 #define X86_64_ONLY(x) NULL
54 #define X86_64_DEF(x...)
61 static int x86_64_hregs
;
64 typedef struct DisasContext
{
65 /* current insn context */
66 int override
; /* -1 if no override */
69 target_ulong pc
; /* pc = eip + cs_base */
70 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
71 static state change (stop translation) */
72 /* current block context */
73 target_ulong cs_base
; /* base of CS segment */
74 int pe
; /* protected mode */
75 int code32
; /* 32 bit code segment */
77 int lma
; /* long mode active */
78 int code64
; /* 64 bit code segment */
81 int ss32
; /* 32 bit stack segment */
82 int cc_op
; /* current CC operation */
83 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
84 int f_st
; /* currently unused */
85 int vm86
; /* vm86 mode */
88 int tf
; /* TF cpu flag */
89 int singlestep_enabled
; /* "hardware" single step enabled */
90 int jmp_opt
; /* use direct block chaining for direct jumps */
91 int mem_index
; /* select memory access functions */
92 int flags
; /* all execution flags */
93 struct TranslationBlock
*tb
;
94 int popl_esp_hack
; /* for correct popl with esp base handling */
95 int rip_offset
; /* only used in x86_64, but left for simplicity */
99 static void gen_eob(DisasContext
*s
);
100 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
101 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
103 /* i386 arith/logic operations */
123 OP_SHL1
, /* undocumented */
128 #define DEF(s, n, copy_size) INDEX_op_ ## s,
145 /* I386 int registers */
146 OR_EAX
, /* MUST be even numbered */
155 OR_TMP0
= 16, /* temporary operand register */
157 OR_A0
, /* temporary register used when doing address evaluation */
162 #define NB_OP_SIZES 4
164 #define DEF_REGS(prefix, suffix) \
165 prefix ## EAX ## suffix,\
166 prefix ## ECX ## suffix,\
167 prefix ## EDX ## suffix,\
168 prefix ## EBX ## suffix,\
169 prefix ## ESP ## suffix,\
170 prefix ## EBP ## suffix,\
171 prefix ## ESI ## suffix,\
172 prefix ## EDI ## suffix,\
173 prefix ## R8 ## suffix,\
174 prefix ## R9 ## suffix,\
175 prefix ## R10 ## suffix,\
176 prefix ## R11 ## suffix,\
177 prefix ## R12 ## suffix,\
178 prefix ## R13 ## suffix,\
179 prefix ## R14 ## suffix,\
180 prefix ## R15 ## suffix,
182 #define DEF_BREGS(prefixb, prefixh, suffix) \
184 static void prefixb ## ESP ## suffix ## _wrapper(void) \
187 prefixb ## ESP ## suffix (); \
189 prefixh ## EAX ## suffix (); \
192 static void prefixb ## EBP ## suffix ## _wrapper(void) \
195 prefixb ## EBP ## suffix (); \
197 prefixh ## ECX ## suffix (); \
200 static void prefixb ## ESI ## suffix ## _wrapper(void) \
203 prefixb ## ESI ## suffix (); \
205 prefixh ## EDX ## suffix (); \
208 static void prefixb ## EDI ## suffix ## _wrapper(void) \
211 prefixb ## EDI ## suffix (); \
213 prefixh ## EBX ## suffix (); \
216 DEF_BREGS(gen_op_movb_
, gen_op_movh_
, _T0
)
217 DEF_BREGS(gen_op_movb_
, gen_op_movh_
, _T1
)
218 DEF_BREGS(gen_op_movl_T0_
, gen_op_movh_T0_
, )
219 DEF_BREGS(gen_op_movl_T1_
, gen_op_movh_T1_
, )
221 #else /* !TARGET_X86_64 */
223 #define NB_OP_SIZES 3
225 #define DEF_REGS(prefix, suffix) \
226 prefix ## EAX ## suffix,\
227 prefix ## ECX ## suffix,\
228 prefix ## EDX ## suffix,\
229 prefix ## EBX ## suffix,\
230 prefix ## ESP ## suffix,\
231 prefix ## EBP ## suffix,\
232 prefix ## ESI ## suffix,\
233 prefix ## EDI ## suffix,
235 #endif /* !TARGET_X86_64 */
237 static GenOpFunc
*gen_op_mov_reg_T0
[NB_OP_SIZES
][CPU_NB_REGS
] = {
244 gen_op_movb_ESP_T0_wrapper
,
245 gen_op_movb_EBP_T0_wrapper
,
246 gen_op_movb_ESI_T0_wrapper
,
247 gen_op_movb_EDI_T0_wrapper
,
264 DEF_REGS(gen_op_movw_
, _T0
)
267 DEF_REGS(gen_op_movl_
, _T0
)
271 DEF_REGS(gen_op_movq_
, _T0
)
276 static GenOpFunc
*gen_op_mov_reg_T1
[NB_OP_SIZES
][CPU_NB_REGS
] = {
283 gen_op_movb_ESP_T1_wrapper
,
284 gen_op_movb_EBP_T1_wrapper
,
285 gen_op_movb_ESI_T1_wrapper
,
286 gen_op_movb_EDI_T1_wrapper
,
303 DEF_REGS(gen_op_movw_
, _T1
)
306 DEF_REGS(gen_op_movl_
, _T1
)
310 DEF_REGS(gen_op_movq_
, _T1
)
315 static GenOpFunc
*gen_op_mov_reg_A0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
317 DEF_REGS(gen_op_movw_
, _A0
)
320 DEF_REGS(gen_op_movl_
, _A0
)
324 DEF_REGS(gen_op_movq_
, _A0
)
329 static GenOpFunc
*gen_op_mov_TN_reg
[NB_OP_SIZES
][2][CPU_NB_REGS
] =
338 gen_op_movl_T0_ESP_wrapper
,
339 gen_op_movl_T0_EBP_wrapper
,
340 gen_op_movl_T0_ESI_wrapper
,
341 gen_op_movl_T0_EDI_wrapper
,
363 gen_op_movl_T1_ESP_wrapper
,
364 gen_op_movl_T1_EBP_wrapper
,
365 gen_op_movl_T1_ESI_wrapper
,
366 gen_op_movl_T1_EDI_wrapper
,
385 DEF_REGS(gen_op_movl_T0_
, )
388 DEF_REGS(gen_op_movl_T1_
, )
393 DEF_REGS(gen_op_movl_T0_
, )
396 DEF_REGS(gen_op_movl_T1_
, )
402 DEF_REGS(gen_op_movl_T0_
, )
405 DEF_REGS(gen_op_movl_T1_
, )
411 static GenOpFunc
*gen_op_movl_A0_reg
[CPU_NB_REGS
] = {
412 DEF_REGS(gen_op_movl_A0_
, )
415 static GenOpFunc
*gen_op_addl_A0_reg_sN
[4][CPU_NB_REGS
] = {
417 DEF_REGS(gen_op_addl_A0_
, )
420 DEF_REGS(gen_op_addl_A0_
, _s1
)
423 DEF_REGS(gen_op_addl_A0_
, _s2
)
426 DEF_REGS(gen_op_addl_A0_
, _s3
)
431 static GenOpFunc
*gen_op_movq_A0_reg
[CPU_NB_REGS
] = {
432 DEF_REGS(gen_op_movq_A0_
, )
435 static GenOpFunc
*gen_op_addq_A0_reg_sN
[4][CPU_NB_REGS
] = {
437 DEF_REGS(gen_op_addq_A0_
, )
440 DEF_REGS(gen_op_addq_A0_
, _s1
)
443 DEF_REGS(gen_op_addq_A0_
, _s2
)
446 DEF_REGS(gen_op_addq_A0_
, _s3
)
451 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
453 DEF_REGS(gen_op_cmovw_
, _T1_T0
)
456 DEF_REGS(gen_op_cmovl_
, _T1_T0
)
460 DEF_REGS(gen_op_cmovq_
, _T1_T0
)
465 static GenOpFunc
*gen_op_arith_T0_T1_cc
[8] = {
476 #define DEF_ARITHC(SUFFIX)\
478 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
479 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
482 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
483 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
486 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
487 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
490 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
491 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
494 static GenOpFunc
*gen_op_arithc_T0_T1_cc
[4][2] = {
498 static GenOpFunc
*gen_op_arithc_mem_T0_T1_cc
[3 * 4][2] = {
500 #ifndef CONFIG_USER_ONLY
506 static const int cc_op_arithb
[8] = {
517 #define DEF_CMPXCHG(SUFFIX)\
518 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
519 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
520 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
521 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
523 static GenOpFunc
*gen_op_cmpxchg_T0_T1_EAX_cc
[4] = {
527 static GenOpFunc
*gen_op_cmpxchg_mem_T0_T1_EAX_cc
[3 * 4] = {
529 #ifndef CONFIG_USER_ONLY
535 #define DEF_SHIFT(SUFFIX)\
537 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
538 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
539 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
540 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
541 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
542 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
543 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
544 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
547 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
548 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
549 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
550 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
551 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
552 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
553 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
554 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
557 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
558 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
559 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
560 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
561 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
562 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
563 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
564 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
567 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
568 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
569 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
570 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
571 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
572 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
573 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
574 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
577 static GenOpFunc
*gen_op_shift_T0_T1_cc
[4][8] = {
581 static GenOpFunc
*gen_op_shift_mem_T0_T1_cc
[3 * 4][8] = {
583 #ifndef CONFIG_USER_ONLY
589 #define DEF_SHIFTD(SUFFIX, op)\
595 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
596 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
599 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
600 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
605 static GenOpFunc1
*gen_op_shiftd_T0_T1_im_cc
[4][2] = {
609 static GenOpFunc
*gen_op_shiftd_T0_T1_ECX_cc
[4][2] = {
613 static GenOpFunc1
*gen_op_shiftd_mem_T0_T1_im_cc
[3 * 4][2] = {
615 #ifndef CONFIG_USER_ONLY
616 DEF_SHIFTD(_kernel
, im
)
617 DEF_SHIFTD(_user
, im
)
621 static GenOpFunc
*gen_op_shiftd_mem_T0_T1_ECX_cc
[3 * 4][2] = {
622 DEF_SHIFTD(_raw
, ECX
)
623 #ifndef CONFIG_USER_ONLY
624 DEF_SHIFTD(_kernel
, ECX
)
625 DEF_SHIFTD(_user
, ECX
)
629 static GenOpFunc
*gen_op_btx_T0_T1_cc
[3][4] = {
632 gen_op_btsw_T0_T1_cc
,
633 gen_op_btrw_T0_T1_cc
,
634 gen_op_btcw_T0_T1_cc
,
638 gen_op_btsl_T0_T1_cc
,
639 gen_op_btrl_T0_T1_cc
,
640 gen_op_btcl_T0_T1_cc
,
645 gen_op_btsq_T0_T1_cc
,
646 gen_op_btrq_T0_T1_cc
,
647 gen_op_btcq_T0_T1_cc
,
652 static GenOpFunc
*gen_op_add_bit_A0_T1
[3] = {
653 gen_op_add_bitw_A0_T1
,
654 gen_op_add_bitl_A0_T1
,
655 X86_64_ONLY(gen_op_add_bitq_A0_T1
),
658 static GenOpFunc
*gen_op_bsx_T0_cc
[3][2] = {
675 static GenOpFunc
*gen_op_lds_T0_A0
[3 * 4] = {
676 gen_op_ldsb_raw_T0_A0
,
677 gen_op_ldsw_raw_T0_A0
,
678 X86_64_ONLY(gen_op_ldsl_raw_T0_A0
),
680 #ifndef CONFIG_USER_ONLY
681 gen_op_ldsb_kernel_T0_A0
,
682 gen_op_ldsw_kernel_T0_A0
,
683 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0
),
686 gen_op_ldsb_user_T0_A0
,
687 gen_op_ldsw_user_T0_A0
,
688 X86_64_ONLY(gen_op_ldsl_user_T0_A0
),
693 static GenOpFunc
*gen_op_ldu_T0_A0
[3 * 4] = {
694 gen_op_ldub_raw_T0_A0
,
695 gen_op_lduw_raw_T0_A0
,
699 #ifndef CONFIG_USER_ONLY
700 gen_op_ldub_kernel_T0_A0
,
701 gen_op_lduw_kernel_T0_A0
,
705 gen_op_ldub_user_T0_A0
,
706 gen_op_lduw_user_T0_A0
,
712 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
713 static GenOpFunc
*gen_op_ld_T0_A0
[3 * 4] = {
714 gen_op_ldub_raw_T0_A0
,
715 gen_op_lduw_raw_T0_A0
,
716 gen_op_ldl_raw_T0_A0
,
717 X86_64_ONLY(gen_op_ldq_raw_T0_A0
),
719 #ifndef CONFIG_USER_ONLY
720 gen_op_ldub_kernel_T0_A0
,
721 gen_op_lduw_kernel_T0_A0
,
722 gen_op_ldl_kernel_T0_A0
,
723 X86_64_ONLY(gen_op_ldq_kernel_T0_A0
),
725 gen_op_ldub_user_T0_A0
,
726 gen_op_lduw_user_T0_A0
,
727 gen_op_ldl_user_T0_A0
,
728 X86_64_ONLY(gen_op_ldq_user_T0_A0
),
732 static GenOpFunc
*gen_op_ld_T1_A0
[3 * 4] = {
733 gen_op_ldub_raw_T1_A0
,
734 gen_op_lduw_raw_T1_A0
,
735 gen_op_ldl_raw_T1_A0
,
736 X86_64_ONLY(gen_op_ldq_raw_T1_A0
),
738 #ifndef CONFIG_USER_ONLY
739 gen_op_ldub_kernel_T1_A0
,
740 gen_op_lduw_kernel_T1_A0
,
741 gen_op_ldl_kernel_T1_A0
,
742 X86_64_ONLY(gen_op_ldq_kernel_T1_A0
),
744 gen_op_ldub_user_T1_A0
,
745 gen_op_lduw_user_T1_A0
,
746 gen_op_ldl_user_T1_A0
,
747 X86_64_ONLY(gen_op_ldq_user_T1_A0
),
751 static GenOpFunc
*gen_op_st_T0_A0
[3 * 4] = {
752 gen_op_stb_raw_T0_A0
,
753 gen_op_stw_raw_T0_A0
,
754 gen_op_stl_raw_T0_A0
,
755 X86_64_ONLY(gen_op_stq_raw_T0_A0
),
757 #ifndef CONFIG_USER_ONLY
758 gen_op_stb_kernel_T0_A0
,
759 gen_op_stw_kernel_T0_A0
,
760 gen_op_stl_kernel_T0_A0
,
761 X86_64_ONLY(gen_op_stq_kernel_T0_A0
),
763 gen_op_stb_user_T0_A0
,
764 gen_op_stw_user_T0_A0
,
765 gen_op_stl_user_T0_A0
,
766 X86_64_ONLY(gen_op_stq_user_T0_A0
),
770 static GenOpFunc
*gen_op_st_T1_A0
[3 * 4] = {
772 gen_op_stw_raw_T1_A0
,
773 gen_op_stl_raw_T1_A0
,
774 X86_64_ONLY(gen_op_stq_raw_T1_A0
),
776 #ifndef CONFIG_USER_ONLY
778 gen_op_stw_kernel_T1_A0
,
779 gen_op_stl_kernel_T1_A0
,
780 X86_64_ONLY(gen_op_stq_kernel_T1_A0
),
783 gen_op_stw_user_T1_A0
,
784 gen_op_stl_user_T1_A0
,
785 X86_64_ONLY(gen_op_stq_user_T1_A0
),
789 static inline void gen_jmp_im(target_ulong pc
)
792 if (pc
== (uint32_t)pc
) {
793 gen_op_movl_eip_im(pc
);
794 } else if (pc
== (int32_t)pc
) {
795 gen_op_movq_eip_im(pc
);
797 gen_op_movq_eip_im64(pc
>> 32, pc
);
800 gen_op_movl_eip_im(pc
);
804 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
808 override
= s
->override
;
812 gen_op_movq_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
813 gen_op_addq_A0_reg_sN
[0][R_ESI
]();
815 gen_op_movq_A0_reg
[R_ESI
]();
821 if (s
->addseg
&& override
< 0)
824 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
825 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
827 gen_op_movl_A0_reg
[R_ESI
]();
830 /* 16 address, always override */
833 gen_op_movl_A0_reg
[R_ESI
]();
834 gen_op_andl_A0_ffff();
835 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
839 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
843 gen_op_movq_A0_reg
[R_EDI
]();
848 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
849 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
851 gen_op_movl_A0_reg
[R_EDI
]();
854 gen_op_movl_A0_reg
[R_EDI
]();
855 gen_op_andl_A0_ffff();
856 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
860 static GenOpFunc
*gen_op_movl_T0_Dshift
[4] = {
861 gen_op_movl_T0_Dshiftb
,
862 gen_op_movl_T0_Dshiftw
,
863 gen_op_movl_T0_Dshiftl
,
864 X86_64_ONLY(gen_op_movl_T0_Dshiftq
),
867 static GenOpFunc1
*gen_op_jnz_ecx
[3] = {
870 X86_64_ONLY(gen_op_jnz_ecxq
),
873 static GenOpFunc1
*gen_op_jz_ecx
[3] = {
876 X86_64_ONLY(gen_op_jz_ecxq
),
879 static GenOpFunc
*gen_op_dec_ECX
[3] = {
882 X86_64_ONLY(gen_op_decq_ECX
),
885 static GenOpFunc1
*gen_op_string_jnz_sub
[2][4] = {
890 X86_64_ONLY(gen_op_jnz_subq
),
896 X86_64_ONLY(gen_op_jz_subq
),
900 static GenOpFunc
*gen_op_in_DX_T0
[3] = {
906 static GenOpFunc
*gen_op_out_DX_T0
[3] = {
912 static GenOpFunc
*gen_op_in
[3] = {
918 static GenOpFunc
*gen_op_out
[3] = {
924 static GenOpFunc
*gen_check_io_T0
[3] = {
930 static GenOpFunc
*gen_check_io_DX
[3] = {
936 static void gen_check_io(DisasContext
*s
, int ot
, int use_dx
, target_ulong cur_eip
)
938 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
939 if (s
->cc_op
!= CC_OP_DYNAMIC
)
940 gen_op_set_cc_op(s
->cc_op
);
943 gen_check_io_DX
[ot
]();
945 gen_check_io_T0
[ot
]();
949 static inline void gen_movs(DisasContext
*s
, int ot
)
951 gen_string_movl_A0_ESI(s
);
952 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
953 gen_string_movl_A0_EDI(s
);
954 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
955 gen_op_movl_T0_Dshift
[ot
]();
958 gen_op_addq_ESI_T0();
959 gen_op_addq_EDI_T0();
963 gen_op_addl_ESI_T0();
964 gen_op_addl_EDI_T0();
966 gen_op_addw_ESI_T0();
967 gen_op_addw_EDI_T0();
971 static inline void gen_update_cc_op(DisasContext
*s
)
973 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
974 gen_op_set_cc_op(s
->cc_op
);
975 s
->cc_op
= CC_OP_DYNAMIC
;
979 /* XXX: does not work with gdbstub "ice" single step - not a
981 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
985 l1
= gen_new_label();
986 l2
= gen_new_label();
987 gen_op_jnz_ecx
[s
->aflag
](l1
);
989 gen_jmp_tb(s
, next_eip
, 1);
994 static inline void gen_stos(DisasContext
*s
, int ot
)
996 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
997 gen_string_movl_A0_EDI(s
);
998 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
999 gen_op_movl_T0_Dshift
[ot
]();
1000 #ifdef TARGET_X86_64
1001 if (s
->aflag
== 2) {
1002 gen_op_addq_EDI_T0();
1006 gen_op_addl_EDI_T0();
1008 gen_op_addw_EDI_T0();
1012 static inline void gen_lods(DisasContext
*s
, int ot
)
1014 gen_string_movl_A0_ESI(s
);
1015 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1016 gen_op_mov_reg_T0
[ot
][R_EAX
]();
1017 gen_op_movl_T0_Dshift
[ot
]();
1018 #ifdef TARGET_X86_64
1019 if (s
->aflag
== 2) {
1020 gen_op_addq_ESI_T0();
1024 gen_op_addl_ESI_T0();
1026 gen_op_addw_ESI_T0();
1030 static inline void gen_scas(DisasContext
*s
, int ot
)
1032 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
1033 gen_string_movl_A0_EDI(s
);
1034 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
1035 gen_op_cmpl_T0_T1_cc();
1036 gen_op_movl_T0_Dshift
[ot
]();
1037 #ifdef TARGET_X86_64
1038 if (s
->aflag
== 2) {
1039 gen_op_addq_EDI_T0();
1043 gen_op_addl_EDI_T0();
1045 gen_op_addw_EDI_T0();
1049 static inline void gen_cmps(DisasContext
*s
, int ot
)
1051 gen_string_movl_A0_ESI(s
);
1052 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1053 gen_string_movl_A0_EDI(s
);
1054 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
1055 gen_op_cmpl_T0_T1_cc();
1056 gen_op_movl_T0_Dshift
[ot
]();
1057 #ifdef TARGET_X86_64
1058 if (s
->aflag
== 2) {
1059 gen_op_addq_ESI_T0();
1060 gen_op_addq_EDI_T0();
1064 gen_op_addl_ESI_T0();
1065 gen_op_addl_EDI_T0();
1067 gen_op_addw_ESI_T0();
1068 gen_op_addw_EDI_T0();
1072 static inline void gen_ins(DisasContext
*s
, int ot
)
1074 gen_string_movl_A0_EDI(s
);
1076 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1077 gen_op_in_DX_T0
[ot
]();
1078 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1079 gen_op_movl_T0_Dshift
[ot
]();
1080 #ifdef TARGET_X86_64
1081 if (s
->aflag
== 2) {
1082 gen_op_addq_EDI_T0();
1086 gen_op_addl_EDI_T0();
1088 gen_op_addw_EDI_T0();
1092 static inline void gen_outs(DisasContext
*s
, int ot
)
1094 gen_string_movl_A0_ESI(s
);
1095 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1096 gen_op_out_DX_T0
[ot
]();
1097 gen_op_movl_T0_Dshift
[ot
]();
1098 #ifdef TARGET_X86_64
1099 if (s
->aflag
== 2) {
1100 gen_op_addq_ESI_T0();
1104 gen_op_addl_ESI_T0();
1106 gen_op_addw_ESI_T0();
1110 /* same method as Valgrind : we generate jumps to current or next
1112 #define GEN_REPZ(op) \
1113 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1114 target_ulong cur_eip, target_ulong next_eip) \
1117 gen_update_cc_op(s); \
1118 l2 = gen_jz_ecx_string(s, next_eip); \
1119 gen_ ## op(s, ot); \
1120 gen_op_dec_ECX[s->aflag](); \
1121 /* a loop would cause two single step exceptions if ECX = 1 \
1122 before rep string_insn */ \
1124 gen_op_jz_ecx[s->aflag](l2); \
1125 gen_jmp(s, cur_eip); \
1128 #define GEN_REPZ2(op) \
1129 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1130 target_ulong cur_eip, \
1131 target_ulong next_eip, \
1135 gen_update_cc_op(s); \
1136 l2 = gen_jz_ecx_string(s, next_eip); \
1137 gen_ ## op(s, ot); \
1138 gen_op_dec_ECX[s->aflag](); \
1139 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1140 gen_op_string_jnz_sub[nz][ot](l2);\
1142 gen_op_jz_ecx[s->aflag](l2); \
1143 gen_jmp(s, cur_eip); \
1165 static GenOpFunc1
*gen_jcc_sub
[4][8] = {
1196 #ifdef TARGET_X86_64
1199 BUGGY_64(gen_op_jb_subq
),
1201 BUGGY_64(gen_op_jbe_subq
),
1204 BUGGY_64(gen_op_jl_subq
),
1205 BUGGY_64(gen_op_jle_subq
),
1209 static GenOpFunc1
*gen_op_loop
[3][4] = {
1220 #ifdef TARGET_X86_64
1229 static GenOpFunc
*gen_setcc_slow
[8] = {
1240 static GenOpFunc
*gen_setcc_sub
[4][8] = {
1243 gen_op_setb_T0_subb
,
1244 gen_op_setz_T0_subb
,
1245 gen_op_setbe_T0_subb
,
1246 gen_op_sets_T0_subb
,
1248 gen_op_setl_T0_subb
,
1249 gen_op_setle_T0_subb
,
1253 gen_op_setb_T0_subw
,
1254 gen_op_setz_T0_subw
,
1255 gen_op_setbe_T0_subw
,
1256 gen_op_sets_T0_subw
,
1258 gen_op_setl_T0_subw
,
1259 gen_op_setle_T0_subw
,
1263 gen_op_setb_T0_subl
,
1264 gen_op_setz_T0_subl
,
1265 gen_op_setbe_T0_subl
,
1266 gen_op_sets_T0_subl
,
1268 gen_op_setl_T0_subl
,
1269 gen_op_setle_T0_subl
,
1271 #ifdef TARGET_X86_64
1274 gen_op_setb_T0_subq
,
1275 gen_op_setz_T0_subq
,
1276 gen_op_setbe_T0_subq
,
1277 gen_op_sets_T0_subq
,
1279 gen_op_setl_T0_subq
,
1280 gen_op_setle_T0_subq
,
1285 static GenOpFunc
*gen_op_fp_arith_ST0_FT0
[8] = {
1286 gen_op_fadd_ST0_FT0
,
1287 gen_op_fmul_ST0_FT0
,
1288 gen_op_fcom_ST0_FT0
,
1289 gen_op_fcom_ST0_FT0
,
1290 gen_op_fsub_ST0_FT0
,
1291 gen_op_fsubr_ST0_FT0
,
1292 gen_op_fdiv_ST0_FT0
,
1293 gen_op_fdivr_ST0_FT0
,
1296 /* NOTE the exception in "r" op ordering */
1297 static GenOpFunc1
*gen_op_fp_arith_STN_ST0
[8] = {
1298 gen_op_fadd_STN_ST0
,
1299 gen_op_fmul_STN_ST0
,
1302 gen_op_fsubr_STN_ST0
,
1303 gen_op_fsub_STN_ST0
,
1304 gen_op_fdivr_STN_ST0
,
1305 gen_op_fdiv_STN_ST0
,
1308 /* if d == OR_TMP0, it means memory operand (address in A0) */
1309 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1311 GenOpFunc
*gen_update_cc
;
1314 gen_op_mov_TN_reg
[ot
][0][d
]();
1316 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1321 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1322 gen_op_set_cc_op(s1
->cc_op
);
1324 gen_op_arithc_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1325 gen_op_mov_reg_T0
[ot
][d
]();
1327 gen_op_arithc_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
- OP_ADCL
]();
1329 s1
->cc_op
= CC_OP_DYNAMIC
;
1332 gen_op_addl_T0_T1();
1333 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1334 gen_update_cc
= gen_op_update2_cc
;
1337 gen_op_subl_T0_T1();
1338 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1339 gen_update_cc
= gen_op_update2_cc
;
1345 gen_op_arith_T0_T1_cc
[op
]();
1346 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1347 gen_update_cc
= gen_op_update1_cc
;
1350 gen_op_cmpl_T0_T1_cc();
1351 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1352 gen_update_cc
= NULL
;
1355 if (op
!= OP_CMPL
) {
1357 gen_op_mov_reg_T0
[ot
][d
]();
1359 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1361 /* the flags update must happen after the memory write (precise
1362 exception support) */
1368 /* if d == OR_TMP0, it means memory operand (address in A0) */
1369 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1372 gen_op_mov_TN_reg
[ot
][0][d
]();
1374 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1375 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1376 gen_op_set_cc_op(s1
->cc_op
);
1379 s1
->cc_op
= CC_OP_INCB
+ ot
;
1382 s1
->cc_op
= CC_OP_DECB
+ ot
;
1385 gen_op_mov_reg_T0
[ot
][d
]();
1387 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1388 gen_op_update_inc_cc();
1391 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1394 gen_op_mov_TN_reg
[ot
][0][d
]();
1396 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1398 gen_op_mov_TN_reg
[ot
][1][s
]();
1399 /* for zero counts, flags are not updated, so must do it dynamically */
1400 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1401 gen_op_set_cc_op(s1
->cc_op
);
1404 gen_op_shift_T0_T1_cc
[ot
][op
]();
1406 gen_op_shift_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
]();
1408 gen_op_mov_reg_T0
[ot
][d
]();
1409 s1
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1412 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1414 /* currently not optimized */
1415 gen_op_movl_T1_im(c
);
1416 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1419 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1427 int mod
, rm
, code
, override
, must_add_seg
;
1429 override
= s
->override
;
1430 must_add_seg
= s
->addseg
;
1433 mod
= (modrm
>> 6) & 3;
1445 code
= ldub_code(s
->pc
++);
1446 scale
= (code
>> 6) & 3;
1447 index
= ((code
>> 3) & 7) | REX_X(s
);
1454 if ((base
& 7) == 5) {
1456 disp
= (int32_t)ldl_code(s
->pc
);
1458 if (CODE64(s
) && !havesib
) {
1459 disp
+= s
->pc
+ s
->rip_offset
;
1466 disp
= (int8_t)ldub_code(s
->pc
++);
1470 disp
= ldl_code(s
->pc
);
1476 /* for correct popl handling with esp */
1477 if (base
== 4 && s
->popl_esp_hack
)
1478 disp
+= s
->popl_esp_hack
;
1479 #ifdef TARGET_X86_64
1480 if (s
->aflag
== 2) {
1481 gen_op_movq_A0_reg
[base
]();
1483 if ((int32_t)disp
== disp
)
1484 gen_op_addq_A0_im(disp
);
1486 gen_op_addq_A0_im64(disp
>> 32, disp
);
1491 gen_op_movl_A0_reg
[base
]();
1493 gen_op_addl_A0_im(disp
);
1496 #ifdef TARGET_X86_64
1497 if (s
->aflag
== 2) {
1498 if ((int32_t)disp
== disp
)
1499 gen_op_movq_A0_im(disp
);
1501 gen_op_movq_A0_im64(disp
>> 32, disp
);
1505 gen_op_movl_A0_im(disp
);
1508 /* XXX: index == 4 is always invalid */
1509 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1510 #ifdef TARGET_X86_64
1511 if (s
->aflag
== 2) {
1512 gen_op_addq_A0_reg_sN
[scale
][index
]();
1516 gen_op_addl_A0_reg_sN
[scale
][index
]();
1521 if (base
== R_EBP
|| base
== R_ESP
)
1526 #ifdef TARGET_X86_64
1527 if (s
->aflag
== 2) {
1528 gen_op_addq_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1532 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1539 disp
= lduw_code(s
->pc
);
1541 gen_op_movl_A0_im(disp
);
1542 rm
= 0; /* avoid SS override */
1549 disp
= (int8_t)ldub_code(s
->pc
++);
1553 disp
= lduw_code(s
->pc
);
1559 gen_op_movl_A0_reg
[R_EBX
]();
1560 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1563 gen_op_movl_A0_reg
[R_EBX
]();
1564 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1567 gen_op_movl_A0_reg
[R_EBP
]();
1568 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1571 gen_op_movl_A0_reg
[R_EBP
]();
1572 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1575 gen_op_movl_A0_reg
[R_ESI
]();
1578 gen_op_movl_A0_reg
[R_EDI
]();
1581 gen_op_movl_A0_reg
[R_EBP
]();
1585 gen_op_movl_A0_reg
[R_EBX
]();
1589 gen_op_addl_A0_im(disp
);
1590 gen_op_andl_A0_ffff();
1594 if (rm
== 2 || rm
== 3 || rm
== 6)
1599 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1609 /* used for LEA and MOV AX, mem */
1610 static void gen_add_A0_ds_seg(DisasContext
*s
)
1612 int override
, must_add_seg
;
1613 must_add_seg
= s
->addseg
;
1615 if (s
->override
>= 0) {
1616 override
= s
->override
;
1622 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1626 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1628 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1630 int mod
, rm
, opreg
, disp
;
1632 mod
= (modrm
>> 6) & 3;
1633 rm
= (modrm
& 7) | REX_B(s
);
1637 gen_op_mov_TN_reg
[ot
][0][reg
]();
1638 gen_op_mov_reg_T0
[ot
][rm
]();
1640 gen_op_mov_TN_reg
[ot
][0][rm
]();
1642 gen_op_mov_reg_T0
[ot
][reg
]();
1645 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
1648 gen_op_mov_TN_reg
[ot
][0][reg
]();
1649 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1651 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1653 gen_op_mov_reg_T0
[ot
][reg
]();
1658 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
1664 ret
= ldub_code(s
->pc
);
1668 ret
= lduw_code(s
->pc
);
1673 ret
= ldl_code(s
->pc
);
1680 static inline int insn_const_size(unsigned int ot
)
1688 static inline void gen_jcc(DisasContext
*s
, int b
,
1689 target_ulong val
, target_ulong next_eip
)
1691 TranslationBlock
*tb
;
1698 jcc_op
= (b
>> 1) & 7;
1702 /* we optimize the cmp/jcc case */
1707 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1710 /* some jumps are easy to compute */
1752 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1755 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1767 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1768 gen_op_set_cc_op(s
->cc_op
);
1771 gen_setcc_slow
[jcc_op
]();
1772 func
= gen_op_jnz_T0_label
;
1782 l1
= gen_new_label();
1786 gen_jmp_im(next_eip
);
1787 gen_op_movl_T0_im((long)tb
+ 0);
1793 gen_op_movl_T0_im((long)tb
+ 1);
1799 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1800 gen_op_set_cc_op(s
->cc_op
);
1801 s
->cc_op
= CC_OP_DYNAMIC
;
1803 gen_setcc_slow
[jcc_op
]();
1809 l1
= gen_new_label();
1810 l2
= gen_new_label();
1811 gen_op_jnz_T0_label(l1
);
1812 gen_jmp_im(next_eip
);
1813 gen_op_jmp_label(l2
);
1821 static void gen_setcc(DisasContext
*s
, int b
)
1827 jcc_op
= (b
>> 1) & 7;
1829 /* we optimize the cmp/jcc case */
1834 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1839 /* some jumps are easy to compute */
1866 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1869 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1877 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1878 gen_op_set_cc_op(s
->cc_op
);
1879 func
= gen_setcc_slow
[jcc_op
];
1888 /* move T0 to seg_reg and compute if the CPU state may change. Never
1889 call this function with seg_reg == R_CS */
1890 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
1892 if (s
->pe
&& !s
->vm86
) {
1893 /* XXX: optimize by finding processor state dynamically */
1894 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1895 gen_op_set_cc_op(s
->cc_op
);
1896 gen_jmp_im(cur_eip
);
1897 gen_op_movl_seg_T0(seg_reg
);
1898 /* abort translation because the addseg value may change or
1899 because ss32 may change. For R_SS, translation must always
1900 stop as a special handling must be done to disable hardware
1901 interrupts for the next instruction */
1902 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
1905 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
1906 if (seg_reg
== R_SS
)
1911 static inline void gen_stack_update(DisasContext
*s
, int addend
)
1913 #ifdef TARGET_X86_64
1916 gen_op_addq_ESP_8();
1918 gen_op_addq_ESP_im(addend
);
1923 gen_op_addl_ESP_2();
1924 else if (addend
== 4)
1925 gen_op_addl_ESP_4();
1927 gen_op_addl_ESP_im(addend
);
1930 gen_op_addw_ESP_2();
1931 else if (addend
== 4)
1932 gen_op_addw_ESP_4();
1934 gen_op_addw_ESP_im(addend
);
1938 /* generate a push. It depends on ss32, addseg and dflag */
1939 static void gen_push_T0(DisasContext
*s
)
1941 #ifdef TARGET_X86_64
1943 /* XXX: check 16 bit behaviour */
1944 gen_op_movq_A0_reg
[R_ESP
]();
1946 gen_op_st_T0_A0
[OT_QUAD
+ s
->mem_index
]();
1947 gen_op_movq_ESP_A0();
1951 gen_op_movl_A0_reg
[R_ESP
]();
1958 gen_op_movl_T1_A0();
1959 gen_op_addl_A0_SS();
1962 gen_op_andl_A0_ffff();
1963 gen_op_movl_T1_A0();
1964 gen_op_addl_A0_SS();
1966 gen_op_st_T0_A0
[s
->dflag
+ 1 + s
->mem_index
]();
1967 if (s
->ss32
&& !s
->addseg
)
1968 gen_op_movl_ESP_A0();
1970 gen_op_mov_reg_T1
[s
->ss32
+ 1][R_ESP
]();
1974 /* generate a push. It depends on ss32, addseg and dflag */
1975 /* slower version for T1, only used for call Ev */
1976 static void gen_push_T1(DisasContext
*s
)
1978 #ifdef TARGET_X86_64
1980 /* XXX: check 16 bit behaviour */
1981 gen_op_movq_A0_reg
[R_ESP
]();
1983 gen_op_st_T1_A0
[OT_QUAD
+ s
->mem_index
]();
1984 gen_op_movq_ESP_A0();
1988 gen_op_movl_A0_reg
[R_ESP
]();
1995 gen_op_addl_A0_SS();
1998 gen_op_andl_A0_ffff();
1999 gen_op_addl_A0_SS();
2001 gen_op_st_T1_A0
[s
->dflag
+ 1 + s
->mem_index
]();
2003 if (s
->ss32
&& !s
->addseg
)
2004 gen_op_movl_ESP_A0();
2006 gen_stack_update(s
, (-2) << s
->dflag
);
2010 /* two step pop is necessary for precise exceptions */
2011 static void gen_pop_T0(DisasContext
*s
)
2013 #ifdef TARGET_X86_64
2015 /* XXX: check 16 bit behaviour */
2016 gen_op_movq_A0_reg
[R_ESP
]();
2017 gen_op_ld_T0_A0
[OT_QUAD
+ s
->mem_index
]();
2021 gen_op_movl_A0_reg
[R_ESP
]();
2024 gen_op_addl_A0_SS();
2026 gen_op_andl_A0_ffff();
2027 gen_op_addl_A0_SS();
2029 gen_op_ld_T0_A0
[s
->dflag
+ 1 + s
->mem_index
]();
2033 static void gen_pop_update(DisasContext
*s
)
2035 #ifdef TARGET_X86_64
2037 gen_stack_update(s
, 8);
2041 gen_stack_update(s
, 2 << s
->dflag
);
2045 static void gen_stack_A0(DisasContext
*s
)
2047 gen_op_movl_A0_ESP();
2049 gen_op_andl_A0_ffff();
2050 gen_op_movl_T1_A0();
2052 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2055 /* NOTE: wrap around in 16 bit not fully handled */
2056 static void gen_pusha(DisasContext
*s
)
2059 gen_op_movl_A0_ESP();
2060 gen_op_addl_A0_im(-16 << s
->dflag
);
2062 gen_op_andl_A0_ffff();
2063 gen_op_movl_T1_A0();
2065 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2066 for(i
= 0;i
< 8; i
++) {
2067 gen_op_mov_TN_reg
[OT_LONG
][0][7 - i
]();
2068 gen_op_st_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
2069 gen_op_addl_A0_im(2 << s
->dflag
);
2071 gen_op_mov_reg_T1
[OT_WORD
+ s
->dflag
][R_ESP
]();
2074 /* NOTE: wrap around in 16 bit not fully handled */
2075 static void gen_popa(DisasContext
*s
)
2078 gen_op_movl_A0_ESP();
2080 gen_op_andl_A0_ffff();
2081 gen_op_movl_T1_A0();
2082 gen_op_addl_T1_im(16 << s
->dflag
);
2084 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2085 for(i
= 0;i
< 8; i
++) {
2086 /* ESP is not reloaded */
2088 gen_op_ld_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
2089 gen_op_mov_reg_T0
[OT_WORD
+ s
->dflag
][7 - i
]();
2091 gen_op_addl_A0_im(2 << s
->dflag
);
2093 gen_op_mov_reg_T1
[OT_WORD
+ s
->dflag
][R_ESP
]();
2096 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2100 ot
= s
->dflag
+ OT_WORD
;
2102 opsize
= 2 << s
->dflag
;
2104 gen_op_movl_A0_ESP();
2105 gen_op_addl_A0_im(-opsize
);
2107 gen_op_andl_A0_ffff();
2108 gen_op_movl_T1_A0();
2110 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2112 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
2113 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2115 gen_op_enter_level(level
, s
->dflag
);
2117 gen_op_mov_reg_T1
[ot
][R_EBP
]();
2118 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2119 gen_op_mov_reg_T1
[ot
][R_ESP
]();
2122 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2124 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2125 gen_op_set_cc_op(s
->cc_op
);
2126 gen_jmp_im(cur_eip
);
2127 gen_op_raise_exception(trapno
);
2131 /* an interrupt is different from an exception because of the
2132 priviledge checks */
2133 static void gen_interrupt(DisasContext
*s
, int intno
,
2134 target_ulong cur_eip
, target_ulong next_eip
)
2136 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2137 gen_op_set_cc_op(s
->cc_op
);
2138 gen_jmp_im(cur_eip
);
2139 gen_op_raise_interrupt(intno
, (int)(next_eip
- cur_eip
));
2143 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2145 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2146 gen_op_set_cc_op(s
->cc_op
);
2147 gen_jmp_im(cur_eip
);
2152 /* generate a generic end of block. Trace exception is also generated
2154 static void gen_eob(DisasContext
*s
)
2156 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2157 gen_op_set_cc_op(s
->cc_op
);
2158 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2159 gen_op_reset_inhibit_irq();
2161 if (s
->singlestep_enabled
) {
2164 gen_op_raise_exception(EXCP01_SSTP
);
2172 /* generate a jump to eip. No segment change must happen before as a
2173 direct call to the next block may occur */
2174 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2176 TranslationBlock
*tb
= s
->tb
;
2179 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2180 gen_op_set_cc_op(s
->cc_op
);
2186 gen_op_movl_T0_im((long)tb
+ tb_num
);
2195 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2197 gen_jmp_tb(s
, eip
, 0);
2200 static void gen_movtl_T0_im(target_ulong val
)
2202 #ifdef TARGET_X86_64
2203 if ((int32_t)val
== val
) {
2204 gen_op_movl_T0_im(val
);
2206 gen_op_movq_T0_im64(val
>> 32, val
);
2209 gen_op_movl_T0_im(val
);
2213 static GenOpFunc1
*gen_ldq_env_A0
[3] = {
2214 gen_op_ldq_raw_env_A0
,
2215 #ifndef CONFIG_USER_ONLY
2216 gen_op_ldq_kernel_env_A0
,
2217 gen_op_ldq_user_env_A0
,
2221 static GenOpFunc1
*gen_stq_env_A0
[3] = {
2222 gen_op_stq_raw_env_A0
,
2223 #ifndef CONFIG_USER_ONLY
2224 gen_op_stq_kernel_env_A0
,
2225 gen_op_stq_user_env_A0
,
2229 static GenOpFunc1
*gen_ldo_env_A0
[3] = {
2230 gen_op_ldo_raw_env_A0
,
2231 #ifndef CONFIG_USER_ONLY
2232 gen_op_ldo_kernel_env_A0
,
2233 gen_op_ldo_user_env_A0
,
2237 static GenOpFunc1
*gen_sto_env_A0
[3] = {
2238 gen_op_sto_raw_env_A0
,
2239 #ifndef CONFIG_USER_ONLY
2240 gen_op_sto_kernel_env_A0
,
2241 gen_op_sto_user_env_A0
,
2245 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2247 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2248 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2249 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2251 static GenOpFunc2
*sse_op_table1
[256][4] = {
2252 /* pure SSE operations */
2253 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2254 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2255 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2256 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2257 [0x14] = { gen_op_punpckldq_xmm
, gen_op_punpcklqdq_xmm
},
2258 [0x15] = { gen_op_punpckhdq_xmm
, gen_op_punpckhqdq_xmm
},
2259 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2260 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2262 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2263 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2264 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2265 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2266 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2267 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2268 [0x2e] = { gen_op_ucomiss
, gen_op_ucomisd
},
2269 [0x2f] = { gen_op_comiss
, gen_op_comisd
},
2270 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2271 [0x51] = SSE_FOP(sqrt
),
2272 [0x52] = { gen_op_rsqrtps
, NULL
, gen_op_rsqrtss
, NULL
},
2273 [0x53] = { gen_op_rcpps
, NULL
, gen_op_rcpss
, NULL
},
2274 [0x54] = { gen_op_pand_xmm
, gen_op_pand_xmm
}, /* andps, andpd */
2275 [0x55] = { gen_op_pandn_xmm
, gen_op_pandn_xmm
}, /* andnps, andnpd */
2276 [0x56] = { gen_op_por_xmm
, gen_op_por_xmm
}, /* orps, orpd */
2277 [0x57] = { gen_op_pxor_xmm
, gen_op_pxor_xmm
}, /* xorps, xorpd */
2278 [0x58] = SSE_FOP(add
),
2279 [0x59] = SSE_FOP(mul
),
2280 [0x5a] = { gen_op_cvtps2pd
, gen_op_cvtpd2ps
,
2281 gen_op_cvtss2sd
, gen_op_cvtsd2ss
},
2282 [0x5b] = { gen_op_cvtdq2ps
, gen_op_cvtps2dq
, gen_op_cvttps2dq
},
2283 [0x5c] = SSE_FOP(sub
),
2284 [0x5d] = SSE_FOP(min
),
2285 [0x5e] = SSE_FOP(div
),
2286 [0x5f] = SSE_FOP(max
),
2288 [0xc2] = SSE_FOP(cmpeq
),
2289 [0xc6] = { (GenOpFunc2
*)gen_op_pshufd_xmm
, (GenOpFunc2
*)gen_op_shufpd
},
2291 /* MMX ops and their SSE extensions */
2292 [0x60] = MMX_OP2(punpcklbw
),
2293 [0x61] = MMX_OP2(punpcklwd
),
2294 [0x62] = MMX_OP2(punpckldq
),
2295 [0x63] = MMX_OP2(packsswb
),
2296 [0x64] = MMX_OP2(pcmpgtb
),
2297 [0x65] = MMX_OP2(pcmpgtw
),
2298 [0x66] = MMX_OP2(pcmpgtl
),
2299 [0x67] = MMX_OP2(packuswb
),
2300 [0x68] = MMX_OP2(punpckhbw
),
2301 [0x69] = MMX_OP2(punpckhwd
),
2302 [0x6a] = MMX_OP2(punpckhdq
),
2303 [0x6b] = MMX_OP2(packssdw
),
2304 [0x6c] = { NULL
, gen_op_punpcklqdq_xmm
},
2305 [0x6d] = { NULL
, gen_op_punpckhqdq_xmm
},
2306 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2307 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2308 [0x70] = { (GenOpFunc2
*)gen_op_pshufw_mmx
,
2309 (GenOpFunc2
*)gen_op_pshufd_xmm
,
2310 (GenOpFunc2
*)gen_op_pshufhw_xmm
,
2311 (GenOpFunc2
*)gen_op_pshuflw_xmm
},
2312 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2313 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2314 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2315 [0x74] = MMX_OP2(pcmpeqb
),
2316 [0x75] = MMX_OP2(pcmpeqw
),
2317 [0x76] = MMX_OP2(pcmpeql
),
2318 [0x77] = { SSE_SPECIAL
}, /* emms */
2319 [0x7c] = { NULL
, gen_op_haddpd
, NULL
, gen_op_haddps
},
2320 [0x7d] = { NULL
, gen_op_hsubpd
, NULL
, gen_op_hsubps
},
2321 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2322 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2323 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2324 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2325 [0xd0] = { NULL
, gen_op_addsubpd
, NULL
, gen_op_addsubps
},
2326 [0xd1] = MMX_OP2(psrlw
),
2327 [0xd2] = MMX_OP2(psrld
),
2328 [0xd3] = MMX_OP2(psrlq
),
2329 [0xd4] = MMX_OP2(paddq
),
2330 [0xd5] = MMX_OP2(pmullw
),
2331 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2332 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2333 [0xd8] = MMX_OP2(psubusb
),
2334 [0xd9] = MMX_OP2(psubusw
),
2335 [0xda] = MMX_OP2(pminub
),
2336 [0xdb] = MMX_OP2(pand
),
2337 [0xdc] = MMX_OP2(paddusb
),
2338 [0xdd] = MMX_OP2(paddusw
),
2339 [0xde] = MMX_OP2(pmaxub
),
2340 [0xdf] = MMX_OP2(pandn
),
2341 [0xe0] = MMX_OP2(pavgb
),
2342 [0xe1] = MMX_OP2(psraw
),
2343 [0xe2] = MMX_OP2(psrad
),
2344 [0xe3] = MMX_OP2(pavgw
),
2345 [0xe4] = MMX_OP2(pmulhuw
),
2346 [0xe5] = MMX_OP2(pmulhw
),
2347 [0xe6] = { NULL
, gen_op_cvttpd2dq
, gen_op_cvtdq2pd
, gen_op_cvtpd2dq
},
2348 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2349 [0xe8] = MMX_OP2(psubsb
),
2350 [0xe9] = MMX_OP2(psubsw
),
2351 [0xea] = MMX_OP2(pminsw
),
2352 [0xeb] = MMX_OP2(por
),
2353 [0xec] = MMX_OP2(paddsb
),
2354 [0xed] = MMX_OP2(paddsw
),
2355 [0xee] = MMX_OP2(pmaxsw
),
2356 [0xef] = MMX_OP2(pxor
),
2357 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu (PNI) */
2358 [0xf1] = MMX_OP2(psllw
),
2359 [0xf2] = MMX_OP2(pslld
),
2360 [0xf3] = MMX_OP2(psllq
),
2361 [0xf4] = MMX_OP2(pmuludq
),
2362 [0xf5] = MMX_OP2(pmaddwd
),
2363 [0xf6] = MMX_OP2(psadbw
),
2364 [0xf7] = MMX_OP2(maskmov
),
2365 [0xf8] = MMX_OP2(psubb
),
2366 [0xf9] = MMX_OP2(psubw
),
2367 [0xfa] = MMX_OP2(psubl
),
2368 [0xfb] = MMX_OP2(psubq
),
2369 [0xfc] = MMX_OP2(paddb
),
2370 [0xfd] = MMX_OP2(paddw
),
2371 [0xfe] = MMX_OP2(paddl
),
2374 static GenOpFunc2
*sse_op_table2
[3 * 8][2] = {
2375 [0 + 2] = MMX_OP2(psrlw
),
2376 [0 + 4] = MMX_OP2(psraw
),
2377 [0 + 6] = MMX_OP2(psllw
),
2378 [8 + 2] = MMX_OP2(psrld
),
2379 [8 + 4] = MMX_OP2(psrad
),
2380 [8 + 6] = MMX_OP2(pslld
),
2381 [16 + 2] = MMX_OP2(psrlq
),
2382 [16 + 3] = { NULL
, gen_op_psrldq_xmm
},
2383 [16 + 6] = MMX_OP2(psllq
),
2384 [16 + 7] = { NULL
, gen_op_pslldq_xmm
},
2387 static GenOpFunc1
*sse_op_table3
[4 * 3] = {
2390 X86_64_ONLY(gen_op_cvtsq2ss
),
2391 X86_64_ONLY(gen_op_cvtsq2sd
),
2395 X86_64_ONLY(gen_op_cvttss2sq
),
2396 X86_64_ONLY(gen_op_cvttsd2sq
),
2400 X86_64_ONLY(gen_op_cvtss2sq
),
2401 X86_64_ONLY(gen_op_cvtsd2sq
),
2404 static GenOpFunc2
*sse_op_table4
[8][4] = {
2415 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
2417 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
2418 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
2419 GenOpFunc2
*sse_op2
;
2420 GenOpFunc3
*sse_op3
;
2423 if (s
->prefix
& PREFIX_DATA
)
2425 else if (s
->prefix
& PREFIX_REPZ
)
2427 else if (s
->prefix
& PREFIX_REPNZ
)
2431 sse_op2
= sse_op_table1
[b
][b1
];
2434 if (b
<= 0x5f || b
== 0xc6 || b
== 0xc2) {
2444 /* simple MMX/SSE operation */
2445 if (s
->flags
& HF_TS_MASK
) {
2446 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
2449 if (s
->flags
& HF_EM_MASK
) {
2451 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
2454 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
2461 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2462 the static cpu state) */
2467 modrm
= ldub_code(s
->pc
++);
2468 reg
= ((modrm
>> 3) & 7);
2471 mod
= (modrm
>> 6) & 3;
2472 if (sse_op2
== SSE_SPECIAL
) {
2475 case 0x0e7: /* movntq */
2478 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2479 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2481 case 0x1e7: /* movntdq */
2482 case 0x02b: /* movntps */
2483 case 0x12b: /* movntps */
2484 case 0x2f0: /* lddqu */
2487 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2488 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2490 case 0x6e: /* movd mm, ea */
2491 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2492 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2494 case 0x16e: /* movd xmm, ea */
2495 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2496 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2498 case 0x6f: /* movq mm, ea */
2500 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2501 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2504 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
].mmx
),
2505 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2508 case 0x010: /* movups */
2509 case 0x110: /* movupd */
2510 case 0x028: /* movaps */
2511 case 0x128: /* movapd */
2512 case 0x16f: /* movdqa xmm, ea */
2513 case 0x26f: /* movdqu xmm, ea */
2515 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2516 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2518 rm
= (modrm
& 7) | REX_B(s
);
2519 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
2520 offsetof(CPUX86State
,xmm_regs
[rm
]));
2523 case 0x210: /* movss xmm, ea */
2525 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2526 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
2527 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2529 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2530 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2531 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2533 rm
= (modrm
& 7) | REX_B(s
);
2534 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2535 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2538 case 0x310: /* movsd xmm, ea */
2540 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2541 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2543 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2544 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2546 rm
= (modrm
& 7) | REX_B(s
);
2547 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2548 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2551 case 0x012: /* movlps */
2552 case 0x112: /* movlpd */
2554 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2555 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2558 rm
= (modrm
& 7) | REX_B(s
);
2559 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2560 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2563 case 0x016: /* movhps */
2564 case 0x116: /* movhpd */
2566 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2567 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2570 rm
= (modrm
& 7) | REX_B(s
);
2571 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2572 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2575 case 0x216: /* movshdup */
2577 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2578 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2580 rm
= (modrm
& 7) | REX_B(s
);
2581 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2582 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
2583 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2584 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
2586 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2587 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2588 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2589 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2591 case 0x7e: /* movd ea, mm */
2592 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2593 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2595 case 0x17e: /* movd ea, xmm */
2596 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2597 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2599 case 0x27e: /* movq xmm, ea */
2601 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2602 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2604 rm
= (modrm
& 7) | REX_B(s
);
2605 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2606 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2608 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2610 case 0x7f: /* movq ea, mm */
2612 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2613 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2616 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
2617 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2620 case 0x011: /* movups */
2621 case 0x111: /* movupd */
2622 case 0x029: /* movaps */
2623 case 0x129: /* movapd */
2624 case 0x17f: /* movdqa ea, xmm */
2625 case 0x27f: /* movdqu ea, xmm */
2627 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2628 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2630 rm
= (modrm
& 7) | REX_B(s
);
2631 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
2632 offsetof(CPUX86State
,xmm_regs
[reg
]));
2635 case 0x211: /* movss ea, xmm */
2637 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2638 gen_op_movl_T0_env(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2639 gen_op_st_T0_A0
[OT_LONG
+ s
->mem_index
]();
2641 rm
= (modrm
& 7) | REX_B(s
);
2642 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
2643 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2646 case 0x311: /* movsd ea, xmm */
2648 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2649 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2651 rm
= (modrm
& 7) | REX_B(s
);
2652 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2653 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2656 case 0x013: /* movlps */
2657 case 0x113: /* movlpd */
2659 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2660 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2665 case 0x017: /* movhps */
2666 case 0x117: /* movhpd */
2668 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2669 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2674 case 0x71: /* shift mm, im */
2677 case 0x171: /* shift xmm, im */
2680 val
= ldub_code(s
->pc
++);
2682 gen_op_movl_T0_im(val
);
2683 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
2685 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
2686 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
2688 gen_op_movl_T0_im(val
);
2689 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
2691 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
2692 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
2694 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
2698 rm
= (modrm
& 7) | REX_B(s
);
2699 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2702 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2704 sse_op2(op2_offset
, op1_offset
);
2706 case 0x050: /* movmskps */
2707 gen_op_movmskps(offsetof(CPUX86State
,xmm_regs
[reg
]));
2708 rm
= (modrm
& 7) | REX_B(s
);
2709 gen_op_mov_reg_T0
[OT_LONG
][rm
]();
2711 case 0x150: /* movmskpd */
2712 gen_op_movmskpd(offsetof(CPUX86State
,xmm_regs
[reg
]));
2713 rm
= (modrm
& 7) | REX_B(s
);
2714 gen_op_mov_reg_T0
[OT_LONG
][rm
]();
2716 case 0x02a: /* cvtpi2ps */
2717 case 0x12a: /* cvtpi2pd */
2720 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2721 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
2722 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
2725 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2727 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2730 gen_op_cvtpi2ps(op1_offset
, op2_offset
);
2734 gen_op_cvtpi2pd(op1_offset
, op2_offset
);
2738 case 0x22a: /* cvtsi2ss */
2739 case 0x32a: /* cvtsi2sd */
2740 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
2741 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
2742 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2743 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)](op1_offset
);
2745 case 0x02c: /* cvttps2pi */
2746 case 0x12c: /* cvttpd2pi */
2747 case 0x02d: /* cvtps2pi */
2748 case 0x12d: /* cvtpd2pi */
2751 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2752 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
2753 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
2755 rm
= (modrm
& 7) | REX_B(s
);
2756 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2758 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
2761 gen_op_cvttps2pi(op1_offset
, op2_offset
);
2764 gen_op_cvttpd2pi(op1_offset
, op2_offset
);
2767 gen_op_cvtps2pi(op1_offset
, op2_offset
);
2770 gen_op_cvtpd2pi(op1_offset
, op2_offset
);
2774 case 0x22c: /* cvttss2si */
2775 case 0x32c: /* cvttsd2si */
2776 case 0x22d: /* cvtss2si */
2777 case 0x32d: /* cvtsd2si */
2778 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
2779 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2780 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
2781 (b
& 1) * 4](op1_offset
);
2782 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
2784 case 0xc4: /* pinsrw */
2786 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
2787 val
= ldub_code(s
->pc
++);
2790 gen_op_pinsrw_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]), val
);
2793 gen_op_pinsrw_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
), val
);
2796 case 0xc5: /* pextrw */
2800 val
= ldub_code(s
->pc
++);
2803 rm
= (modrm
& 7) | REX_B(s
);
2804 gen_op_pextrw_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]), val
);
2808 gen_op_pextrw_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
), val
);
2810 reg
= ((modrm
>> 3) & 7) | rex_r
;
2811 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
2813 case 0x1d6: /* movq ea, xmm */
2815 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2816 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2818 rm
= (modrm
& 7) | REX_B(s
);
2819 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2820 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2821 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2824 case 0x2d6: /* movq2dq */
2826 rm
= (modrm
& 7) | REX_B(s
);
2827 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2828 offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
));
2829 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2831 case 0x3d6: /* movdq2q */
2834 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
2835 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2837 case 0xd7: /* pmovmskb */
2842 rm
= (modrm
& 7) | REX_B(s
);
2843 gen_op_pmovmskb_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]));
2846 gen_op_pmovmskb_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2848 reg
= ((modrm
>> 3) & 7) | rex_r
;
2849 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
2855 /* generic MMX or SSE operation */
2857 /* maskmov : we must prepare A0 */
2860 #ifdef TARGET_X86_64
2862 gen_op_movq_A0_reg
[R_EDI
]();
2866 gen_op_movl_A0_reg
[R_EDI
]();
2868 gen_op_andl_A0_ffff();
2870 gen_add_A0_ds_seg(s
);
2873 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2875 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2876 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
2877 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f) ||
2879 /* specific case for SSE single instructions */
2882 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
2883 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
2886 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
2889 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
2892 rm
= (modrm
& 7) | REX_B(s
);
2893 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2896 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
2898 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2899 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
2900 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
2903 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2907 case 0x70: /* pshufx insn */
2908 case 0xc6: /* pshufx insn */
2909 val
= ldub_code(s
->pc
++);
2910 sse_op3
= (GenOpFunc3
*)sse_op2
;
2911 sse_op3(op1_offset
, op2_offset
, val
);
2915 val
= ldub_code(s
->pc
++);
2918 sse_op2
= sse_op_table4
[val
][b1
];
2919 sse_op2(op1_offset
, op2_offset
);
2922 sse_op2(op1_offset
, op2_offset
);
2925 if (b
== 0x2e || b
== 0x2f) {
2926 s
->cc_op
= CC_OP_EFLAGS
;
2932 /* convert one instruction. s->is_jmp is set if the translation must
2933 be stopped. Return the next pc value */
2934 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
2936 int b
, prefixes
, aflag
, dflag
;
2938 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
2939 target_ulong next_eip
, tval
;
2949 #ifdef TARGET_X86_64
2954 s
->rip_offset
= 0; /* for relative ip address */
2956 b
= ldub_code(s
->pc
);
2958 /* check prefixes */
2959 #ifdef TARGET_X86_64
2963 prefixes
|= PREFIX_REPZ
;
2966 prefixes
|= PREFIX_REPNZ
;
2969 prefixes
|= PREFIX_LOCK
;
2990 prefixes
|= PREFIX_DATA
;
2993 prefixes
|= PREFIX_ADR
;
2997 rex_w
= (b
>> 3) & 1;
2998 rex_r
= (b
& 0x4) << 1;
2999 s
->rex_x
= (b
& 0x2) << 2;
3000 REX_B(s
) = (b
& 0x1) << 3;
3001 x86_64_hregs
= 1; /* select uniform byte register addressing */
3005 /* 0x66 is ignored if rex.w is set */
3008 if (prefixes
& PREFIX_DATA
)
3011 if (!(prefixes
& PREFIX_ADR
))
3018 prefixes
|= PREFIX_REPZ
;
3021 prefixes
|= PREFIX_REPNZ
;
3024 prefixes
|= PREFIX_LOCK
;
3045 prefixes
|= PREFIX_DATA
;
3048 prefixes
|= PREFIX_ADR
;
3051 if (prefixes
& PREFIX_DATA
)
3053 if (prefixes
& PREFIX_ADR
)
3057 s
->prefix
= prefixes
;
3061 /* lock generation */
3062 if (prefixes
& PREFIX_LOCK
)
3065 /* now check op code */
3069 /**************************/
3070 /* extended op code */
3071 b
= ldub_code(s
->pc
++) | 0x100;
3074 /**************************/
3092 ot
= dflag
+ OT_WORD
;
3095 case 0: /* OP Ev, Gv */
3096 modrm
= ldub_code(s
->pc
++);
3097 reg
= ((modrm
>> 3) & 7) | rex_r
;
3098 mod
= (modrm
>> 6) & 3;
3099 rm
= (modrm
& 7) | REX_B(s
);
3101 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3103 } else if (op
== OP_XORL
&& rm
== reg
) {
3105 /* xor reg, reg optimisation */
3107 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3108 gen_op_mov_reg_T0
[ot
][reg
]();
3109 gen_op_update1_cc();
3114 gen_op_mov_TN_reg
[ot
][1][reg
]();
3115 gen_op(s
, op
, ot
, opreg
);
3117 case 1: /* OP Gv, Ev */
3118 modrm
= ldub_code(s
->pc
++);
3119 mod
= (modrm
>> 6) & 3;
3120 reg
= ((modrm
>> 3) & 7) | rex_r
;
3121 rm
= (modrm
& 7) | REX_B(s
);
3123 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3124 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3125 } else if (op
== OP_XORL
&& rm
== reg
) {
3128 gen_op_mov_TN_reg
[ot
][1][rm
]();
3130 gen_op(s
, op
, ot
, reg
);
3132 case 2: /* OP A, Iv */
3133 val
= insn_get(s
, ot
);
3134 gen_op_movl_T1_im(val
);
3135 gen_op(s
, op
, ot
, OR_EAX
);
3141 case 0x80: /* GRP1 */
3151 ot
= dflag
+ OT_WORD
;
3153 modrm
= ldub_code(s
->pc
++);
3154 mod
= (modrm
>> 6) & 3;
3155 rm
= (modrm
& 7) | REX_B(s
);
3156 op
= (modrm
>> 3) & 7;
3162 s
->rip_offset
= insn_const_size(ot
);
3163 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3174 val
= insn_get(s
, ot
);
3177 val
= (int8_t)insn_get(s
, OT_BYTE
);
3180 gen_op_movl_T1_im(val
);
3181 gen_op(s
, op
, ot
, opreg
);
3185 /**************************/
3186 /* inc, dec, and other misc arith */
3187 case 0x40 ... 0x47: /* inc Gv */
3188 ot
= dflag
? OT_LONG
: OT_WORD
;
3189 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
3191 case 0x48 ... 0x4f: /* dec Gv */
3192 ot
= dflag
? OT_LONG
: OT_WORD
;
3193 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
3195 case 0xf6: /* GRP3 */
3200 ot
= dflag
+ OT_WORD
;
3202 modrm
= ldub_code(s
->pc
++);
3203 mod
= (modrm
>> 6) & 3;
3204 rm
= (modrm
& 7) | REX_B(s
);
3205 op
= (modrm
>> 3) & 7;
3208 s
->rip_offset
= insn_const_size(ot
);
3209 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3210 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3212 gen_op_mov_TN_reg
[ot
][0][rm
]();
3217 val
= insn_get(s
, ot
);
3218 gen_op_movl_T1_im(val
);
3219 gen_op_testl_T0_T1_cc();
3220 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3225 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3227 gen_op_mov_reg_T0
[ot
][rm
]();
3233 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3235 gen_op_mov_reg_T0
[ot
][rm
]();
3237 gen_op_update_neg_cc();
3238 s
->cc_op
= CC_OP_SUBB
+ ot
;
3243 gen_op_mulb_AL_T0();
3244 s
->cc_op
= CC_OP_MULB
;
3247 gen_op_mulw_AX_T0();
3248 s
->cc_op
= CC_OP_MULW
;
3252 gen_op_mull_EAX_T0();
3253 s
->cc_op
= CC_OP_MULL
;
3255 #ifdef TARGET_X86_64
3257 gen_op_mulq_EAX_T0();
3258 s
->cc_op
= CC_OP_MULQ
;
3266 gen_op_imulb_AL_T0();
3267 s
->cc_op
= CC_OP_MULB
;
3270 gen_op_imulw_AX_T0();
3271 s
->cc_op
= CC_OP_MULW
;
3275 gen_op_imull_EAX_T0();
3276 s
->cc_op
= CC_OP_MULL
;
3278 #ifdef TARGET_X86_64
3280 gen_op_imulq_EAX_T0();
3281 s
->cc_op
= CC_OP_MULQ
;
3289 gen_jmp_im(pc_start
- s
->cs_base
);
3290 gen_op_divb_AL_T0();
3293 gen_jmp_im(pc_start
- s
->cs_base
);
3294 gen_op_divw_AX_T0();
3298 gen_jmp_im(pc_start
- s
->cs_base
);
3299 gen_op_divl_EAX_T0();
3301 #ifdef TARGET_X86_64
3303 gen_jmp_im(pc_start
- s
->cs_base
);
3304 gen_op_divq_EAX_T0();
3312 gen_jmp_im(pc_start
- s
->cs_base
);
3313 gen_op_idivb_AL_T0();
3316 gen_jmp_im(pc_start
- s
->cs_base
);
3317 gen_op_idivw_AX_T0();
3321 gen_jmp_im(pc_start
- s
->cs_base
);
3322 gen_op_idivl_EAX_T0();
3324 #ifdef TARGET_X86_64
3326 gen_jmp_im(pc_start
- s
->cs_base
);
3327 gen_op_idivq_EAX_T0();
3337 case 0xfe: /* GRP4 */
3338 case 0xff: /* GRP5 */
3342 ot
= dflag
+ OT_WORD
;
3344 modrm
= ldub_code(s
->pc
++);
3345 mod
= (modrm
>> 6) & 3;
3346 rm
= (modrm
& 7) | REX_B(s
);
3347 op
= (modrm
>> 3) & 7;
3348 if (op
>= 2 && b
== 0xfe) {
3352 if (op
>= 2 && op
<= 5) {
3353 /* operand size for jumps is 64 bit */
3355 } else if (op
== 6) {
3356 /* default push size is 64 bit */
3357 ot
= dflag
? OT_QUAD
: OT_WORD
;
3361 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3362 if (op
>= 2 && op
!= 3 && op
!= 5)
3363 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3365 gen_op_mov_TN_reg
[ot
][0][rm
]();
3369 case 0: /* inc Ev */
3374 gen_inc(s
, ot
, opreg
, 1);
3376 case 1: /* dec Ev */
3381 gen_inc(s
, ot
, opreg
, -1);
3383 case 2: /* call Ev */
3384 /* XXX: optimize if memory (no 'and' is necessary) */
3386 gen_op_andl_T0_ffff();
3387 next_eip
= s
->pc
- s
->cs_base
;
3388 gen_op_movl_T1_im(next_eip
);
3393 case 3: /* lcall Ev */
3394 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3395 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
3396 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
3398 if (s
->pe
&& !s
->vm86
) {
3399 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3400 gen_op_set_cc_op(s
->cc_op
);
3401 gen_jmp_im(pc_start
- s
->cs_base
);
3402 gen_op_lcall_protected_T0_T1(dflag
, s
->pc
- s
->cs_base
);
3404 gen_op_lcall_real_T0_T1(dflag
, s
->pc
- s
->cs_base
);
3408 case 4: /* jmp Ev */
3410 gen_op_andl_T0_ffff();
3414 case 5: /* ljmp Ev */
3415 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3416 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
3417 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
3419 if (s
->pe
&& !s
->vm86
) {
3420 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3421 gen_op_set_cc_op(s
->cc_op
);
3422 gen_jmp_im(pc_start
- s
->cs_base
);
3423 gen_op_ljmp_protected_T0_T1(s
->pc
- s
->cs_base
);
3425 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
3426 gen_op_movl_T0_T1();
3431 case 6: /* push Ev */
3439 case 0x84: /* test Ev, Gv */
3444 ot
= dflag
+ OT_WORD
;
3446 modrm
= ldub_code(s
->pc
++);
3447 mod
= (modrm
>> 6) & 3;
3448 rm
= (modrm
& 7) | REX_B(s
);
3449 reg
= ((modrm
>> 3) & 7) | rex_r
;
3451 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3452 gen_op_mov_TN_reg
[ot
][1][reg
]();
3453 gen_op_testl_T0_T1_cc();
3454 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3457 case 0xa8: /* test eAX, Iv */
3462 ot
= dflag
+ OT_WORD
;
3463 val
= insn_get(s
, ot
);
3465 gen_op_mov_TN_reg
[ot
][0][OR_EAX
]();
3466 gen_op_movl_T1_im(val
);
3467 gen_op_testl_T0_T1_cc();
3468 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3471 case 0x98: /* CWDE/CBW */
3472 #ifdef TARGET_X86_64
3474 gen_op_movslq_RAX_EAX();
3478 gen_op_movswl_EAX_AX();
3480 gen_op_movsbw_AX_AL();
3482 case 0x99: /* CDQ/CWD */
3483 #ifdef TARGET_X86_64
3485 gen_op_movsqo_RDX_RAX();
3489 gen_op_movslq_EDX_EAX();
3491 gen_op_movswl_DX_AX();
3493 case 0x1af: /* imul Gv, Ev */
3494 case 0x69: /* imul Gv, Ev, I */
3496 ot
= dflag
+ OT_WORD
;
3497 modrm
= ldub_code(s
->pc
++);
3498 reg
= ((modrm
>> 3) & 7) | rex_r
;
3500 s
->rip_offset
= insn_const_size(ot
);
3503 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3505 val
= insn_get(s
, ot
);
3506 gen_op_movl_T1_im(val
);
3507 } else if (b
== 0x6b) {
3508 val
= (int8_t)insn_get(s
, OT_BYTE
);
3509 gen_op_movl_T1_im(val
);
3511 gen_op_mov_TN_reg
[ot
][1][reg
]();
3514 #ifdef TARGET_X86_64
3515 if (ot
== OT_QUAD
) {
3516 gen_op_imulq_T0_T1();
3519 if (ot
== OT_LONG
) {
3520 gen_op_imull_T0_T1();
3522 gen_op_imulw_T0_T1();
3524 gen_op_mov_reg_T0
[ot
][reg
]();
3525 s
->cc_op
= CC_OP_MULB
+ ot
;
3528 case 0x1c1: /* xadd Ev, Gv */
3532 ot
= dflag
+ OT_WORD
;
3533 modrm
= ldub_code(s
->pc
++);
3534 reg
= ((modrm
>> 3) & 7) | rex_r
;
3535 mod
= (modrm
>> 6) & 3;
3537 rm
= (modrm
& 7) | REX_B(s
);
3538 gen_op_mov_TN_reg
[ot
][0][reg
]();
3539 gen_op_mov_TN_reg
[ot
][1][rm
]();
3540 gen_op_addl_T0_T1();
3541 gen_op_mov_reg_T1
[ot
][reg
]();
3542 gen_op_mov_reg_T0
[ot
][rm
]();
3544 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3545 gen_op_mov_TN_reg
[ot
][0][reg
]();
3546 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3547 gen_op_addl_T0_T1();
3548 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3549 gen_op_mov_reg_T1
[ot
][reg
]();
3551 gen_op_update2_cc();
3552 s
->cc_op
= CC_OP_ADDB
+ ot
;
3555 case 0x1b1: /* cmpxchg Ev, Gv */
3559 ot
= dflag
+ OT_WORD
;
3560 modrm
= ldub_code(s
->pc
++);
3561 reg
= ((modrm
>> 3) & 7) | rex_r
;
3562 mod
= (modrm
>> 6) & 3;
3563 gen_op_mov_TN_reg
[ot
][1][reg
]();
3565 rm
= (modrm
& 7) | REX_B(s
);
3566 gen_op_mov_TN_reg
[ot
][0][rm
]();
3567 gen_op_cmpxchg_T0_T1_EAX_cc
[ot
]();
3568 gen_op_mov_reg_T0
[ot
][rm
]();
3570 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3571 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3572 gen_op_cmpxchg_mem_T0_T1_EAX_cc
[ot
+ s
->mem_index
]();
3574 s
->cc_op
= CC_OP_SUBB
+ ot
;
3576 case 0x1c7: /* cmpxchg8b */
3577 modrm
= ldub_code(s
->pc
++);
3578 mod
= (modrm
>> 6) & 3;
3581 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3582 gen_op_set_cc_op(s
->cc_op
);
3583 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3585 s
->cc_op
= CC_OP_EFLAGS
;
3588 /**************************/
3590 case 0x50 ... 0x57: /* push */
3591 gen_op_mov_TN_reg
[OT_LONG
][0][(b
& 7) | REX_B(s
)]();
3594 case 0x58 ... 0x5f: /* pop */
3596 ot
= dflag
? OT_QUAD
: OT_WORD
;
3598 ot
= dflag
+ OT_WORD
;
3601 /* NOTE: order is important for pop %sp */
3603 gen_op_mov_reg_T0
[ot
][(b
& 7) | REX_B(s
)]();
3605 case 0x60: /* pusha */
3610 case 0x61: /* popa */
3615 case 0x68: /* push Iv */
3618 ot
= dflag
? OT_QUAD
: OT_WORD
;
3620 ot
= dflag
+ OT_WORD
;
3623 val
= insn_get(s
, ot
);
3625 val
= (int8_t)insn_get(s
, OT_BYTE
);
3626 gen_op_movl_T0_im(val
);
3629 case 0x8f: /* pop Ev */
3631 ot
= dflag
? OT_QUAD
: OT_WORD
;
3633 ot
= dflag
+ OT_WORD
;
3635 modrm
= ldub_code(s
->pc
++);
3636 mod
= (modrm
>> 6) & 3;
3639 /* NOTE: order is important for pop %sp */
3641 rm
= (modrm
& 7) | REX_B(s
);
3642 gen_op_mov_reg_T0
[ot
][rm
]();
3644 /* NOTE: order is important too for MMU exceptions */
3645 s
->popl_esp_hack
= 1 << ot
;
3646 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3647 s
->popl_esp_hack
= 0;
3651 case 0xc8: /* enter */
3653 /* XXX: long mode support */
3655 val
= lduw_code(s
->pc
);
3657 level
= ldub_code(s
->pc
++);
3658 gen_enter(s
, val
, level
);
3661 case 0xc9: /* leave */
3662 /* XXX: exception not precise (ESP is updated before potential exception) */
3663 /* XXX: may be invalid for 16 bit in long mode */
3665 gen_op_mov_TN_reg
[OT_QUAD
][0][R_EBP
]();
3666 gen_op_mov_reg_T0
[OT_QUAD
][R_ESP
]();
3667 } else if (s
->ss32
) {
3668 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
3669 gen_op_mov_reg_T0
[OT_LONG
][R_ESP
]();
3671 gen_op_mov_TN_reg
[OT_WORD
][0][R_EBP
]();
3672 gen_op_mov_reg_T0
[OT_WORD
][R_ESP
]();
3676 ot
= dflag
? OT_QUAD
: OT_WORD
;
3678 ot
= dflag
+ OT_WORD
;
3680 gen_op_mov_reg_T0
[ot
][R_EBP
]();
3683 case 0x06: /* push es */
3684 case 0x0e: /* push cs */
3685 case 0x16: /* push ss */
3686 case 0x1e: /* push ds */
3689 gen_op_movl_T0_seg(b
>> 3);
3692 case 0x1a0: /* push fs */
3693 case 0x1a8: /* push gs */
3694 gen_op_movl_T0_seg((b
>> 3) & 7);
3697 case 0x07: /* pop es */
3698 case 0x17: /* pop ss */
3699 case 0x1f: /* pop ds */
3704 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
3707 /* if reg == SS, inhibit interrupts/trace. */
3708 /* If several instructions disable interrupts, only the
3710 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
3711 gen_op_set_inhibit_irq();
3715 gen_jmp_im(s
->pc
- s
->cs_base
);
3719 case 0x1a1: /* pop fs */
3720 case 0x1a9: /* pop gs */
3722 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
3725 gen_jmp_im(s
->pc
- s
->cs_base
);
3730 /**************************/
3733 case 0x89: /* mov Gv, Ev */
3737 ot
= dflag
+ OT_WORD
;
3738 modrm
= ldub_code(s
->pc
++);
3739 reg
= ((modrm
>> 3) & 7) | rex_r
;
3741 /* generate a generic store */
3742 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
3745 case 0xc7: /* mov Ev, Iv */
3749 ot
= dflag
+ OT_WORD
;
3750 modrm
= ldub_code(s
->pc
++);
3751 mod
= (modrm
>> 6) & 3;
3753 s
->rip_offset
= insn_const_size(ot
);
3754 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3756 val
= insn_get(s
, ot
);
3757 gen_op_movl_T0_im(val
);
3759 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3761 gen_op_mov_reg_T0
[ot
][(modrm
& 7) | REX_B(s
)]();
3764 case 0x8b: /* mov Ev, Gv */
3768 ot
= OT_WORD
+ dflag
;
3769 modrm
= ldub_code(s
->pc
++);
3770 reg
= ((modrm
>> 3) & 7) | rex_r
;
3772 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3773 gen_op_mov_reg_T0
[ot
][reg
]();
3775 case 0x8e: /* mov seg, Gv */
3776 modrm
= ldub_code(s
->pc
++);
3777 reg
= (modrm
>> 3) & 7;
3778 if (reg
>= 6 || reg
== R_CS
)
3780 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3781 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
3783 /* if reg == SS, inhibit interrupts/trace */
3784 /* If several instructions disable interrupts, only the
3786 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
3787 gen_op_set_inhibit_irq();
3791 gen_jmp_im(s
->pc
- s
->cs_base
);
3795 case 0x8c: /* mov Gv, seg */
3796 modrm
= ldub_code(s
->pc
++);
3797 reg
= (modrm
>> 3) & 7;
3798 mod
= (modrm
>> 6) & 3;
3801 gen_op_movl_T0_seg(reg
);
3803 ot
= OT_WORD
+ dflag
;
3806 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3809 case 0x1b6: /* movzbS Gv, Eb */
3810 case 0x1b7: /* movzwS Gv, Eb */
3811 case 0x1be: /* movsbS Gv, Eb */
3812 case 0x1bf: /* movswS Gv, Eb */
3815 /* d_ot is the size of destination */
3816 d_ot
= dflag
+ OT_WORD
;
3817 /* ot is the size of source */
3818 ot
= (b
& 1) + OT_BYTE
;
3819 modrm
= ldub_code(s
->pc
++);
3820 reg
= ((modrm
>> 3) & 7) | rex_r
;
3821 mod
= (modrm
>> 6) & 3;
3822 rm
= (modrm
& 7) | REX_B(s
);
3825 gen_op_mov_TN_reg
[ot
][0][rm
]();
3826 switch(ot
| (b
& 8)) {
3828 gen_op_movzbl_T0_T0();
3831 gen_op_movsbl_T0_T0();
3834 gen_op_movzwl_T0_T0();
3838 gen_op_movswl_T0_T0();
3841 gen_op_mov_reg_T0
[d_ot
][reg
]();
3843 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3845 gen_op_lds_T0_A0
[ot
+ s
->mem_index
]();
3847 gen_op_ldu_T0_A0
[ot
+ s
->mem_index
]();
3849 gen_op_mov_reg_T0
[d_ot
][reg
]();
3854 case 0x8d: /* lea */
3855 ot
= dflag
+ OT_WORD
;
3856 modrm
= ldub_code(s
->pc
++);
3857 mod
= (modrm
>> 6) & 3;
3860 reg
= ((modrm
>> 3) & 7) | rex_r
;
3861 /* we must ensure that no segment is added */
3865 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3867 gen_op_mov_reg_A0
[ot
- OT_WORD
][reg
]();
3870 case 0xa0: /* mov EAX, Ov */
3872 case 0xa2: /* mov Ov, EAX */
3875 target_ulong offset_addr
;
3880 ot
= dflag
+ OT_WORD
;
3881 #ifdef TARGET_X86_64
3883 offset_addr
= ldq_code(s
->pc
);
3885 if (offset_addr
== (int32_t)offset_addr
)
3886 gen_op_movq_A0_im(offset_addr
);
3888 gen_op_movq_A0_im64(offset_addr
>> 32, offset_addr
);
3893 offset_addr
= insn_get(s
, OT_LONG
);
3895 offset_addr
= insn_get(s
, OT_WORD
);
3897 gen_op_movl_A0_im(offset_addr
);
3899 gen_add_A0_ds_seg(s
);
3901 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3902 gen_op_mov_reg_T0
[ot
][R_EAX
]();
3904 gen_op_mov_TN_reg
[ot
][0][R_EAX
]();
3905 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3909 case 0xd7: /* xlat */
3910 #ifdef TARGET_X86_64
3912 gen_op_movq_A0_reg
[R_EBX
]();
3913 gen_op_addq_A0_AL();
3917 gen_op_movl_A0_reg
[R_EBX
]();
3918 gen_op_addl_A0_AL();
3920 gen_op_andl_A0_ffff();
3922 gen_add_A0_ds_seg(s
);
3923 gen_op_ldu_T0_A0
[OT_BYTE
+ s
->mem_index
]();
3924 gen_op_mov_reg_T0
[OT_BYTE
][R_EAX
]();
3926 case 0xb0 ... 0xb7: /* mov R, Ib */
3927 val
= insn_get(s
, OT_BYTE
);
3928 gen_op_movl_T0_im(val
);
3929 gen_op_mov_reg_T0
[OT_BYTE
][(b
& 7) | REX_B(s
)]();
3931 case 0xb8 ... 0xbf: /* mov R, Iv */
3932 #ifdef TARGET_X86_64
3936 tmp
= ldq_code(s
->pc
);
3938 reg
= (b
& 7) | REX_B(s
);
3939 gen_movtl_T0_im(tmp
);
3940 gen_op_mov_reg_T0
[OT_QUAD
][reg
]();
3944 ot
= dflag
? OT_LONG
: OT_WORD
;
3945 val
= insn_get(s
, ot
);
3946 reg
= (b
& 7) | REX_B(s
);
3947 gen_op_movl_T0_im(val
);
3948 gen_op_mov_reg_T0
[ot
][reg
]();
3952 case 0x91 ... 0x97: /* xchg R, EAX */
3953 ot
= dflag
+ OT_WORD
;
3954 reg
= (b
& 7) | REX_B(s
);
3958 case 0x87: /* xchg Ev, Gv */
3962 ot
= dflag
+ OT_WORD
;
3963 modrm
= ldub_code(s
->pc
++);
3964 reg
= ((modrm
>> 3) & 7) | rex_r
;
3965 mod
= (modrm
>> 6) & 3;
3967 rm
= (modrm
& 7) | REX_B(s
);
3969 gen_op_mov_TN_reg
[ot
][0][reg
]();
3970 gen_op_mov_TN_reg
[ot
][1][rm
]();
3971 gen_op_mov_reg_T0
[ot
][rm
]();
3972 gen_op_mov_reg_T1
[ot
][reg
]();
3974 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3975 gen_op_mov_TN_reg
[ot
][0][reg
]();
3976 /* for xchg, lock is implicit */
3977 if (!(prefixes
& PREFIX_LOCK
))
3979 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3980 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3981 if (!(prefixes
& PREFIX_LOCK
))
3983 gen_op_mov_reg_T1
[ot
][reg
]();
3986 case 0xc4: /* les Gv */
3991 case 0xc5: /* lds Gv */
3996 case 0x1b2: /* lss Gv */
3999 case 0x1b4: /* lfs Gv */
4002 case 0x1b5: /* lgs Gv */
4005 ot
= dflag
? OT_LONG
: OT_WORD
;
4006 modrm
= ldub_code(s
->pc
++);
4007 reg
= ((modrm
>> 3) & 7) | rex_r
;
4008 mod
= (modrm
>> 6) & 3;
4011 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4012 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
4013 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
4014 /* load the segment first to handle exceptions properly */
4015 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
4016 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
4017 /* then put the data */
4018 gen_op_mov_reg_T1
[ot
][reg
]();
4020 gen_jmp_im(s
->pc
- s
->cs_base
);
4025 /************************/
4036 ot
= dflag
+ OT_WORD
;
4038 modrm
= ldub_code(s
->pc
++);
4039 mod
= (modrm
>> 6) & 3;
4040 op
= (modrm
>> 3) & 7;
4046 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4049 opreg
= (modrm
& 7) | REX_B(s
);
4054 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
4057 shift
= ldub_code(s
->pc
++);
4059 gen_shifti(s
, op
, ot
, opreg
, shift
);
4074 case 0x1a4: /* shld imm */
4078 case 0x1a5: /* shld cl */
4082 case 0x1ac: /* shrd imm */
4086 case 0x1ad: /* shrd cl */
4090 ot
= dflag
+ OT_WORD
;
4091 modrm
= ldub_code(s
->pc
++);
4092 mod
= (modrm
>> 6) & 3;
4093 rm
= (modrm
& 7) | REX_B(s
);
4094 reg
= ((modrm
>> 3) & 7) | rex_r
;
4097 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4098 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
4100 gen_op_mov_TN_reg
[ot
][0][rm
]();
4102 gen_op_mov_TN_reg
[ot
][1][reg
]();
4105 val
= ldub_code(s
->pc
++);
4112 gen_op_shiftd_T0_T1_im_cc
[ot
][op
](val
);
4114 gen_op_shiftd_mem_T0_T1_im_cc
[ot
+ s
->mem_index
][op
](val
);
4115 if (op
== 0 && ot
!= OT_WORD
)
4116 s
->cc_op
= CC_OP_SHLB
+ ot
;
4118 s
->cc_op
= CC_OP_SARB
+ ot
;
4121 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4122 gen_op_set_cc_op(s
->cc_op
);
4124 gen_op_shiftd_T0_T1_ECX_cc
[ot
][op
]();
4126 gen_op_shiftd_mem_T0_T1_ECX_cc
[ot
+ s
->mem_index
][op
]();
4127 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
4130 gen_op_mov_reg_T0
[ot
][rm
]();
4134 /************************/
4137 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
4138 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4139 /* XXX: what to do if illegal op ? */
4140 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
4143 modrm
= ldub_code(s
->pc
++);
4144 mod
= (modrm
>> 6) & 3;
4146 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
4149 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4151 case 0x00 ... 0x07: /* fxxxs */
4152 case 0x10 ... 0x17: /* fixxxl */
4153 case 0x20 ... 0x27: /* fxxxl */
4154 case 0x30 ... 0x37: /* fixxx */
4161 gen_op_flds_FT0_A0();
4164 gen_op_fildl_FT0_A0();
4167 gen_op_fldl_FT0_A0();
4171 gen_op_fild_FT0_A0();
4175 gen_op_fp_arith_ST0_FT0
[op1
]();
4177 /* fcomp needs pop */
4182 case 0x08: /* flds */
4183 case 0x0a: /* fsts */
4184 case 0x0b: /* fstps */
4185 case 0x18: /* fildl */
4186 case 0x1a: /* fistl */
4187 case 0x1b: /* fistpl */
4188 case 0x28: /* fldl */
4189 case 0x2a: /* fstl */
4190 case 0x2b: /* fstpl */
4191 case 0x38: /* filds */
4192 case 0x3a: /* fists */
4193 case 0x3b: /* fistps */
4199 gen_op_flds_ST0_A0();
4202 gen_op_fildl_ST0_A0();
4205 gen_op_fldl_ST0_A0();
4209 gen_op_fild_ST0_A0();
4216 gen_op_fsts_ST0_A0();
4219 gen_op_fistl_ST0_A0();
4222 gen_op_fstl_ST0_A0();
4226 gen_op_fist_ST0_A0();
4234 case 0x0c: /* fldenv mem */
4235 gen_op_fldenv_A0(s
->dflag
);
4237 case 0x0d: /* fldcw mem */
4240 case 0x0e: /* fnstenv mem */
4241 gen_op_fnstenv_A0(s
->dflag
);
4243 case 0x0f: /* fnstcw mem */
4246 case 0x1d: /* fldt mem */
4247 gen_op_fldt_ST0_A0();
4249 case 0x1f: /* fstpt mem */
4250 gen_op_fstt_ST0_A0();
4253 case 0x2c: /* frstor mem */
4254 gen_op_frstor_A0(s
->dflag
);
4256 case 0x2e: /* fnsave mem */
4257 gen_op_fnsave_A0(s
->dflag
);
4259 case 0x2f: /* fnstsw mem */
4262 case 0x3c: /* fbld */
4263 gen_op_fbld_ST0_A0();
4265 case 0x3e: /* fbstp */
4266 gen_op_fbst_ST0_A0();
4269 case 0x3d: /* fildll */
4270 gen_op_fildll_ST0_A0();
4272 case 0x3f: /* fistpll */
4273 gen_op_fistll_ST0_A0();
4280 /* register float ops */
4284 case 0x08: /* fld sti */
4286 gen_op_fmov_ST0_STN((opreg
+ 1) & 7);
4288 case 0x09: /* fxchg sti */
4289 case 0x29: /* fxchg4 sti, undocumented op */
4290 case 0x39: /* fxchg7 sti, undocumented op */
4291 gen_op_fxchg_ST0_STN(opreg
);
4293 case 0x0a: /* grp d9/2 */
4296 /* check exceptions (FreeBSD FPU probe) */
4297 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4298 gen_op_set_cc_op(s
->cc_op
);
4299 gen_jmp_im(pc_start
- s
->cs_base
);
4306 case 0x0c: /* grp d9/4 */
4316 gen_op_fcom_ST0_FT0();
4325 case 0x0d: /* grp d9/5 */
4334 gen_op_fldl2t_ST0();
4338 gen_op_fldl2e_ST0();
4346 gen_op_fldlg2_ST0();
4350 gen_op_fldln2_ST0();
4361 case 0x0e: /* grp d9/6 */
4372 case 3: /* fpatan */
4375 case 4: /* fxtract */
4378 case 5: /* fprem1 */
4381 case 6: /* fdecstp */
4385 case 7: /* fincstp */
4390 case 0x0f: /* grp d9/7 */
4395 case 1: /* fyl2xp1 */
4401 case 3: /* fsincos */
4404 case 5: /* fscale */
4407 case 4: /* frndint */
4419 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4420 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4421 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4427 gen_op_fp_arith_STN_ST0
[op1
](opreg
);
4431 gen_op_fmov_FT0_STN(opreg
);
4432 gen_op_fp_arith_ST0_FT0
[op1
]();
4436 case 0x02: /* fcom */
4437 case 0x22: /* fcom2, undocumented op */
4438 gen_op_fmov_FT0_STN(opreg
);
4439 gen_op_fcom_ST0_FT0();
4441 case 0x03: /* fcomp */
4442 case 0x23: /* fcomp3, undocumented op */
4443 case 0x32: /* fcomp5, undocumented op */
4444 gen_op_fmov_FT0_STN(opreg
);
4445 gen_op_fcom_ST0_FT0();
4448 case 0x15: /* da/5 */
4450 case 1: /* fucompp */
4451 gen_op_fmov_FT0_STN(1);
4452 gen_op_fucom_ST0_FT0();
4462 case 0: /* feni (287 only, just do nop here) */
4464 case 1: /* fdisi (287 only, just do nop here) */
4469 case 3: /* fninit */
4472 case 4: /* fsetpm (287 only, just do nop here) */
4478 case 0x1d: /* fucomi */
4479 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4480 gen_op_set_cc_op(s
->cc_op
);
4481 gen_op_fmov_FT0_STN(opreg
);
4482 gen_op_fucomi_ST0_FT0();
4483 s
->cc_op
= CC_OP_EFLAGS
;
4485 case 0x1e: /* fcomi */
4486 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4487 gen_op_set_cc_op(s
->cc_op
);
4488 gen_op_fmov_FT0_STN(opreg
);
4489 gen_op_fcomi_ST0_FT0();
4490 s
->cc_op
= CC_OP_EFLAGS
;
4492 case 0x28: /* ffree sti */
4493 gen_op_ffree_STN(opreg
);
4495 case 0x2a: /* fst sti */
4496 gen_op_fmov_STN_ST0(opreg
);
4498 case 0x2b: /* fstp sti */
4499 case 0x0b: /* fstp1 sti, undocumented op */
4500 case 0x3a: /* fstp8 sti, undocumented op */
4501 case 0x3b: /* fstp9 sti, undocumented op */
4502 gen_op_fmov_STN_ST0(opreg
);
4505 case 0x2c: /* fucom st(i) */
4506 gen_op_fmov_FT0_STN(opreg
);
4507 gen_op_fucom_ST0_FT0();
4509 case 0x2d: /* fucomp st(i) */
4510 gen_op_fmov_FT0_STN(opreg
);
4511 gen_op_fucom_ST0_FT0();
4514 case 0x33: /* de/3 */
4516 case 1: /* fcompp */
4517 gen_op_fmov_FT0_STN(1);
4518 gen_op_fcom_ST0_FT0();
4526 case 0x38: /* ffreep sti, undocumented op */
4527 gen_op_ffree_STN(opreg
);
4530 case 0x3c: /* df/4 */
4533 gen_op_fnstsw_EAX();
4539 case 0x3d: /* fucomip */
4540 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4541 gen_op_set_cc_op(s
->cc_op
);
4542 gen_op_fmov_FT0_STN(opreg
);
4543 gen_op_fucomi_ST0_FT0();
4545 s
->cc_op
= CC_OP_EFLAGS
;
4547 case 0x3e: /* fcomip */
4548 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4549 gen_op_set_cc_op(s
->cc_op
);
4550 gen_op_fmov_FT0_STN(opreg
);
4551 gen_op_fcomi_ST0_FT0();
4553 s
->cc_op
= CC_OP_EFLAGS
;
4555 case 0x10 ... 0x13: /* fcmovxx */
4559 const static uint8_t fcmov_cc
[8] = {
4565 op1
= fcmov_cc
[op
& 3] | ((op
>> 3) & 1);
4567 gen_op_fcmov_ST0_STN_T0(opreg
);
4574 #ifdef USE_CODE_COPY
4575 s
->tb
->cflags
|= CF_TB_FP_USED
;
4578 /************************/
4581 case 0xa4: /* movsS */
4586 ot
= dflag
+ OT_WORD
;
4588 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4589 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4595 case 0xaa: /* stosS */
4600 ot
= dflag
+ OT_WORD
;
4602 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4603 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4608 case 0xac: /* lodsS */
4613 ot
= dflag
+ OT_WORD
;
4614 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4615 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4620 case 0xae: /* scasS */
4625 ot
= dflag
+ OT_WORD
;
4626 if (prefixes
& PREFIX_REPNZ
) {
4627 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4628 } else if (prefixes
& PREFIX_REPZ
) {
4629 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4632 s
->cc_op
= CC_OP_SUBB
+ ot
;
4636 case 0xa6: /* cmpsS */
4641 ot
= dflag
+ OT_WORD
;
4642 if (prefixes
& PREFIX_REPNZ
) {
4643 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4644 } else if (prefixes
& PREFIX_REPZ
) {
4645 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4648 s
->cc_op
= CC_OP_SUBB
+ ot
;
4651 case 0x6c: /* insS */
4656 ot
= dflag
? OT_LONG
: OT_WORD
;
4657 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4658 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4659 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4664 case 0x6e: /* outsS */
4669 ot
= dflag
? OT_LONG
: OT_WORD
;
4670 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4671 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4672 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4678 /************************/
4685 ot
= dflag
? OT_LONG
: OT_WORD
;
4686 val
= ldub_code(s
->pc
++);
4687 gen_op_movl_T0_im(val
);
4688 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4690 gen_op_mov_reg_T1
[ot
][R_EAX
]();
4697 ot
= dflag
? OT_LONG
: OT_WORD
;
4698 val
= ldub_code(s
->pc
++);
4699 gen_op_movl_T0_im(val
);
4700 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4701 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
4709 ot
= dflag
? OT_LONG
: OT_WORD
;
4710 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
4711 gen_op_andl_T0_ffff();
4712 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4714 gen_op_mov_reg_T1
[ot
][R_EAX
]();
4721 ot
= dflag
? OT_LONG
: OT_WORD
;
4722 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
4723 gen_op_andl_T0_ffff();
4724 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4725 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
4729 /************************/
4731 case 0xc2: /* ret im */
4732 val
= ldsw_code(s
->pc
);
4735 gen_stack_update(s
, val
+ (2 << s
->dflag
));
4737 gen_op_andl_T0_ffff();
4741 case 0xc3: /* ret */
4745 gen_op_andl_T0_ffff();
4749 case 0xca: /* lret im */
4750 val
= ldsw_code(s
->pc
);
4753 if (s
->pe
&& !s
->vm86
) {
4754 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4755 gen_op_set_cc_op(s
->cc_op
);
4756 gen_jmp_im(pc_start
- s
->cs_base
);
4757 gen_op_lret_protected(s
->dflag
, val
);
4761 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
4763 gen_op_andl_T0_ffff();
4764 /* NOTE: keeping EIP updated is not a problem in case of
4768 gen_op_addl_A0_im(2 << s
->dflag
);
4769 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
4770 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
4771 /* add stack offset */
4772 gen_stack_update(s
, val
+ (4 << s
->dflag
));
4776 case 0xcb: /* lret */
4779 case 0xcf: /* iret */
4782 gen_op_iret_real(s
->dflag
);
4783 s
->cc_op
= CC_OP_EFLAGS
;
4784 } else if (s
->vm86
) {
4786 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4788 gen_op_iret_real(s
->dflag
);
4789 s
->cc_op
= CC_OP_EFLAGS
;
4792 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4793 gen_op_set_cc_op(s
->cc_op
);
4794 gen_jmp_im(pc_start
- s
->cs_base
);
4795 gen_op_iret_protected(s
->dflag
, s
->pc
- s
->cs_base
);
4796 s
->cc_op
= CC_OP_EFLAGS
;
4800 case 0xe8: /* call im */
4803 tval
= (int32_t)insn_get(s
, OT_LONG
);
4805 tval
= (int16_t)insn_get(s
, OT_WORD
);
4806 next_eip
= s
->pc
- s
->cs_base
;
4810 gen_movtl_T0_im(next_eip
);
4815 case 0x9a: /* lcall im */
4817 unsigned int selector
, offset
;
4821 ot
= dflag
? OT_LONG
: OT_WORD
;
4822 offset
= insn_get(s
, ot
);
4823 selector
= insn_get(s
, OT_WORD
);
4825 gen_op_movl_T0_im(selector
);
4826 gen_op_movl_T1_imu(offset
);
4829 case 0xe9: /* jmp */
4831 tval
= (int32_t)insn_get(s
, OT_LONG
);
4833 tval
= (int16_t)insn_get(s
, OT_WORD
);
4834 tval
+= s
->pc
- s
->cs_base
;
4839 case 0xea: /* ljmp im */
4841 unsigned int selector
, offset
;
4845 ot
= dflag
? OT_LONG
: OT_WORD
;
4846 offset
= insn_get(s
, ot
);
4847 selector
= insn_get(s
, OT_WORD
);
4849 gen_op_movl_T0_im(selector
);
4850 gen_op_movl_T1_imu(offset
);
4853 case 0xeb: /* jmp Jb */
4854 tval
= (int8_t)insn_get(s
, OT_BYTE
);
4855 tval
+= s
->pc
- s
->cs_base
;
4860 case 0x70 ... 0x7f: /* jcc Jb */
4861 tval
= (int8_t)insn_get(s
, OT_BYTE
);
4863 case 0x180 ... 0x18f: /* jcc Jv */
4865 tval
= (int32_t)insn_get(s
, OT_LONG
);
4867 tval
= (int16_t)insn_get(s
, OT_WORD
);
4870 next_eip
= s
->pc
- s
->cs_base
;
4874 gen_jcc(s
, b
, tval
, next_eip
);
4877 case 0x190 ... 0x19f: /* setcc Gv */
4878 modrm
= ldub_code(s
->pc
++);
4880 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
4882 case 0x140 ... 0x14f: /* cmov Gv, Ev */
4883 ot
= dflag
+ OT_WORD
;
4884 modrm
= ldub_code(s
->pc
++);
4885 reg
= ((modrm
>> 3) & 7) | rex_r
;
4886 mod
= (modrm
>> 6) & 3;
4889 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4890 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
4892 rm
= (modrm
& 7) | REX_B(s
);
4893 gen_op_mov_TN_reg
[ot
][1][rm
]();
4895 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
4898 /************************/
4900 case 0x9c: /* pushf */
4901 if (s
->vm86
&& s
->iopl
!= 3) {
4902 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4904 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4905 gen_op_set_cc_op(s
->cc_op
);
4906 gen_op_movl_T0_eflags();
4910 case 0x9d: /* popf */
4911 if (s
->vm86
&& s
->iopl
!= 3) {
4912 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4917 gen_op_movl_eflags_T0_cpl0();
4919 gen_op_movw_eflags_T0_cpl0();
4922 if (s
->cpl
<= s
->iopl
) {
4924 gen_op_movl_eflags_T0_io();
4926 gen_op_movw_eflags_T0_io();
4930 gen_op_movl_eflags_T0();
4932 gen_op_movw_eflags_T0();
4937 s
->cc_op
= CC_OP_EFLAGS
;
4938 /* abort translation because TF flag may change */
4939 gen_jmp_im(s
->pc
- s
->cs_base
);
4943 case 0x9e: /* sahf */
4946 gen_op_mov_TN_reg
[OT_BYTE
][0][R_AH
]();
4947 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4948 gen_op_set_cc_op(s
->cc_op
);
4949 gen_op_movb_eflags_T0();
4950 s
->cc_op
= CC_OP_EFLAGS
;
4952 case 0x9f: /* lahf */
4955 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4956 gen_op_set_cc_op(s
->cc_op
);
4957 gen_op_movl_T0_eflags();
4958 gen_op_mov_reg_T0
[OT_BYTE
][R_AH
]();
4960 case 0xf5: /* cmc */
4961 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4962 gen_op_set_cc_op(s
->cc_op
);
4964 s
->cc_op
= CC_OP_EFLAGS
;
4966 case 0xf8: /* clc */
4967 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4968 gen_op_set_cc_op(s
->cc_op
);
4970 s
->cc_op
= CC_OP_EFLAGS
;
4972 case 0xf9: /* stc */
4973 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4974 gen_op_set_cc_op(s
->cc_op
);
4976 s
->cc_op
= CC_OP_EFLAGS
;
4978 case 0xfc: /* cld */
4981 case 0xfd: /* std */
4985 /************************/
4986 /* bit operations */
4987 case 0x1ba: /* bt/bts/btr/btc Gv, im */
4988 ot
= dflag
+ OT_WORD
;
4989 modrm
= ldub_code(s
->pc
++);
4990 op
= ((modrm
>> 3) & 7) | rex_r
;
4991 mod
= (modrm
>> 6) & 3;
4992 rm
= (modrm
& 7) | REX_B(s
);
4995 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4996 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
4998 gen_op_mov_TN_reg
[ot
][0][rm
]();
5001 val
= ldub_code(s
->pc
++);
5002 gen_op_movl_T1_im(val
);
5006 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5007 s
->cc_op
= CC_OP_SARB
+ ot
;
5010 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
5012 gen_op_mov_reg_T0
[ot
][rm
]();
5013 gen_op_update_bt_cc();
5016 case 0x1a3: /* bt Gv, Ev */
5019 case 0x1ab: /* bts */
5022 case 0x1b3: /* btr */
5025 case 0x1bb: /* btc */
5028 ot
= dflag
+ OT_WORD
;
5029 modrm
= ldub_code(s
->pc
++);
5030 reg
= ((modrm
>> 3) & 7) | rex_r
;
5031 mod
= (modrm
>> 6) & 3;
5032 rm
= (modrm
& 7) | REX_B(s
);
5033 gen_op_mov_TN_reg
[OT_LONG
][1][reg
]();
5035 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5036 /* specific case: we need to add a displacement */
5037 gen_op_add_bit_A0_T1
[ot
- OT_WORD
]();
5038 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
5040 gen_op_mov_TN_reg
[ot
][0][rm
]();
5042 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5043 s
->cc_op
= CC_OP_SARB
+ ot
;
5046 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
5048 gen_op_mov_reg_T0
[ot
][rm
]();
5049 gen_op_update_bt_cc();
5052 case 0x1bc: /* bsf */
5053 case 0x1bd: /* bsr */
5054 ot
= dflag
+ OT_WORD
;
5055 modrm
= ldub_code(s
->pc
++);
5056 reg
= ((modrm
>> 3) & 7) | rex_r
;
5057 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5058 /* NOTE: in order to handle the 0 case, we must load the
5059 result. It could be optimized with a generated jump */
5060 gen_op_mov_TN_reg
[ot
][1][reg
]();
5061 gen_op_bsx_T0_cc
[ot
- OT_WORD
][b
& 1]();
5062 gen_op_mov_reg_T1
[ot
][reg
]();
5063 s
->cc_op
= CC_OP_LOGICB
+ ot
;
5065 /************************/
5067 case 0x27: /* daa */
5070 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5071 gen_op_set_cc_op(s
->cc_op
);
5073 s
->cc_op
= CC_OP_EFLAGS
;
5075 case 0x2f: /* das */
5078 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5079 gen_op_set_cc_op(s
->cc_op
);
5081 s
->cc_op
= CC_OP_EFLAGS
;
5083 case 0x37: /* aaa */
5086 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5087 gen_op_set_cc_op(s
->cc_op
);
5089 s
->cc_op
= CC_OP_EFLAGS
;
5091 case 0x3f: /* aas */
5094 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5095 gen_op_set_cc_op(s
->cc_op
);
5097 s
->cc_op
= CC_OP_EFLAGS
;
5099 case 0xd4: /* aam */
5102 val
= ldub_code(s
->pc
++);
5104 s
->cc_op
= CC_OP_LOGICB
;
5106 case 0xd5: /* aad */
5109 val
= ldub_code(s
->pc
++);
5111 s
->cc_op
= CC_OP_LOGICB
;
5113 /************************/
5115 case 0x90: /* nop */
5116 /* XXX: xchg + rex handling */
5117 /* XXX: correct lock test for all insn */
5118 if (prefixes
& PREFIX_LOCK
)
5121 case 0x9b: /* fwait */
5122 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
5123 (HF_MP_MASK
| HF_TS_MASK
)) {
5124 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5126 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5127 gen_op_set_cc_op(s
->cc_op
);
5128 gen_jmp_im(pc_start
- s
->cs_base
);
5132 case 0xcc: /* int3 */
5133 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5135 case 0xcd: /* int N */
5136 val
= ldub_code(s
->pc
++);
5137 if (s
->vm86
&& s
->iopl
!= 3) {
5138 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5140 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5143 case 0xce: /* into */
5146 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5147 gen_op_set_cc_op(s
->cc_op
);
5148 gen_jmp_im(pc_start
- s
->cs_base
);
5149 gen_op_into(s
->pc
- pc_start
);
5151 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5153 gen_debug(s
, pc_start
- s
->cs_base
);
5156 cpu_set_log(CPU_LOG_TB_IN_ASM
| CPU_LOG_PCALL
);
5159 case 0xfa: /* cli */
5161 if (s
->cpl
<= s
->iopl
) {
5164 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5170 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5174 case 0xfb: /* sti */
5176 if (s
->cpl
<= s
->iopl
) {
5179 /* interruptions are enabled only the first insn after sti */
5180 /* If several instructions disable interrupts, only the
5182 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5183 gen_op_set_inhibit_irq();
5184 /* give a chance to handle pending irqs */
5185 gen_jmp_im(s
->pc
- s
->cs_base
);
5188 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5194 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5198 case 0x62: /* bound */
5201 ot
= dflag
? OT_LONG
: OT_WORD
;
5202 modrm
= ldub_code(s
->pc
++);
5203 reg
= (modrm
>> 3) & 7;
5204 mod
= (modrm
>> 6) & 3;
5207 gen_op_mov_TN_reg
[ot
][0][reg
]();
5208 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5209 gen_jmp_im(pc_start
- s
->cs_base
);
5215 case 0x1c8 ... 0x1cf: /* bswap reg */
5216 reg
= (b
& 7) | REX_B(s
);
5217 #ifdef TARGET_X86_64
5219 gen_op_mov_TN_reg
[OT_QUAD
][0][reg
]();
5221 gen_op_mov_reg_T0
[OT_QUAD
][reg
]();
5225 gen_op_mov_TN_reg
[OT_LONG
][0][reg
]();
5227 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
5230 case 0xd6: /* salc */
5233 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5234 gen_op_set_cc_op(s
->cc_op
);
5237 case 0xe0: /* loopnz */
5238 case 0xe1: /* loopz */
5239 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5240 gen_op_set_cc_op(s
->cc_op
);
5242 case 0xe2: /* loop */
5243 case 0xe3: /* jecxz */
5247 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5248 next_eip
= s
->pc
- s
->cs_base
;
5253 l1
= gen_new_label();
5254 l2
= gen_new_label();
5257 gen_op_jz_ecx
[s
->aflag
](l1
);
5259 gen_op_dec_ECX
[s
->aflag
]();
5260 gen_op_loop
[s
->aflag
][b
](l1
);
5263 gen_jmp_im(next_eip
);
5264 gen_op_jmp_label(l2
);
5271 case 0x130: /* wrmsr */
5272 case 0x132: /* rdmsr */
5274 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5282 case 0x131: /* rdtsc */
5285 case 0x134: /* sysenter */
5289 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5291 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5292 gen_op_set_cc_op(s
->cc_op
);
5293 s
->cc_op
= CC_OP_DYNAMIC
;
5295 gen_jmp_im(pc_start
- s
->cs_base
);
5300 case 0x135: /* sysexit */
5304 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5306 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5307 gen_op_set_cc_op(s
->cc_op
);
5308 s
->cc_op
= CC_OP_DYNAMIC
;
5310 gen_jmp_im(pc_start
- s
->cs_base
);
5315 #ifdef TARGET_X86_64
5316 case 0x105: /* syscall */
5317 /* XXX: is it usable in real mode ? */
5318 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5319 gen_op_set_cc_op(s
->cc_op
);
5320 s
->cc_op
= CC_OP_DYNAMIC
;
5322 gen_jmp_im(pc_start
- s
->cs_base
);
5323 gen_op_syscall(s
->pc
- pc_start
);
5326 case 0x107: /* sysret */
5328 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5330 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5331 gen_op_set_cc_op(s
->cc_op
);
5332 s
->cc_op
= CC_OP_DYNAMIC
;
5334 gen_jmp_im(pc_start
- s
->cs_base
);
5335 gen_op_sysret(s
->dflag
);
5340 case 0x1a2: /* cpuid */
5343 case 0xf4: /* hlt */
5345 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5347 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5348 gen_op_set_cc_op(s
->cc_op
);
5349 gen_jmp_im(s
->pc
- s
->cs_base
);
5355 modrm
= ldub_code(s
->pc
++);
5356 mod
= (modrm
>> 6) & 3;
5357 op
= (modrm
>> 3) & 7;
5360 if (!s
->pe
|| s
->vm86
)
5362 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
5366 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5369 if (!s
->pe
|| s
->vm86
)
5372 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5374 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5375 gen_jmp_im(pc_start
- s
->cs_base
);
5380 if (!s
->pe
|| s
->vm86
)
5382 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
5386 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5389 if (!s
->pe
|| s
->vm86
)
5392 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5394 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5395 gen_jmp_im(pc_start
- s
->cs_base
);
5401 if (!s
->pe
|| s
->vm86
)
5403 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5404 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5405 gen_op_set_cc_op(s
->cc_op
);
5410 s
->cc_op
= CC_OP_EFLAGS
;
5417 modrm
= ldub_code(s
->pc
++);
5418 mod
= (modrm
>> 6) & 3;
5419 op
= (modrm
>> 3) & 7;
5425 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5427 gen_op_movl_T0_env(offsetof(CPUX86State
,gdt
.limit
));
5429 gen_op_movl_T0_env(offsetof(CPUX86State
,idt
.limit
));
5430 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
5431 #ifdef TARGET_X86_64
5433 gen_op_addq_A0_im(2);
5436 gen_op_addl_A0_im(2);
5438 gen_op_movtl_T0_env(offsetof(CPUX86State
,gdt
.base
));
5440 gen_op_movtl_T0_env(offsetof(CPUX86State
,idt
.base
));
5442 gen_op_andl_T0_im(0xffffff);
5443 gen_op_st_T0_A0
[CODE64(s
) + OT_LONG
+ s
->mem_index
]();
5450 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5452 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5453 gen_op_ld_T1_A0
[OT_WORD
+ s
->mem_index
]();
5454 #ifdef TARGET_X86_64
5456 gen_op_addq_A0_im(2);
5459 gen_op_addl_A0_im(2);
5460 gen_op_ld_T0_A0
[CODE64(s
) + OT_LONG
+ s
->mem_index
]();
5462 gen_op_andl_T0_im(0xffffff);
5464 gen_op_movtl_env_T0(offsetof(CPUX86State
,gdt
.base
));
5465 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
5467 gen_op_movtl_env_T0(offsetof(CPUX86State
,idt
.base
));
5468 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
5473 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
5474 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
5478 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5480 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5482 gen_jmp_im(s
->pc
- s
->cs_base
);
5486 case 7: /* invlpg */
5488 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5491 #ifdef TARGET_X86_64
5492 if (CODE64(s
) && (modrm
& 7) == 0) {
5494 gen_op_movtl_T0_env(offsetof(CPUX86State
,segs
[R_GS
].base
));
5495 gen_op_movtl_T1_env(offsetof(CPUX86State
,kernelgsbase
));
5496 gen_op_movtl_env_T1(offsetof(CPUX86State
,segs
[R_GS
].base
));
5497 gen_op_movtl_env_T0(offsetof(CPUX86State
,kernelgsbase
));
5504 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5506 gen_jmp_im(s
->pc
- s
->cs_base
);
5515 case 0x108: /* invd */
5516 case 0x109: /* wbinvd */
5518 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5523 case 0x63: /* arpl or movslS (x86_64) */
5524 #ifdef TARGET_X86_64
5527 /* d_ot is the size of destination */
5528 d_ot
= dflag
+ OT_WORD
;
5530 modrm
= ldub_code(s
->pc
++);
5531 reg
= ((modrm
>> 3) & 7) | rex_r
;
5532 mod
= (modrm
>> 6) & 3;
5533 rm
= (modrm
& 7) | REX_B(s
);
5536 gen_op_mov_TN_reg
[OT_LONG
][0][rm
]();
5538 if (d_ot
== OT_QUAD
)
5539 gen_op_movslq_T0_T0();
5540 gen_op_mov_reg_T0
[d_ot
][reg
]();
5542 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5543 if (d_ot
== OT_QUAD
) {
5544 gen_op_lds_T0_A0
[OT_LONG
+ s
->mem_index
]();
5546 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
5548 gen_op_mov_reg_T0
[d_ot
][reg
]();
5553 if (!s
->pe
|| s
->vm86
)
5555 ot
= dflag
? OT_LONG
: OT_WORD
;
5556 modrm
= ldub_code(s
->pc
++);
5557 reg
= (modrm
>> 3) & 7;
5558 mod
= (modrm
>> 6) & 3;
5561 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5562 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
5564 gen_op_mov_TN_reg
[ot
][0][rm
]();
5566 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5567 gen_op_set_cc_op(s
->cc_op
);
5569 s
->cc_op
= CC_OP_EFLAGS
;
5571 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
5573 gen_op_mov_reg_T0
[ot
][rm
]();
5575 gen_op_arpl_update();
5578 case 0x102: /* lar */
5579 case 0x103: /* lsl */
5580 if (!s
->pe
|| s
->vm86
)
5582 ot
= dflag
? OT_LONG
: OT_WORD
;
5583 modrm
= ldub_code(s
->pc
++);
5584 reg
= ((modrm
>> 3) & 7) | rex_r
;
5585 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5586 gen_op_mov_TN_reg
[ot
][1][reg
]();
5587 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5588 gen_op_set_cc_op(s
->cc_op
);
5593 s
->cc_op
= CC_OP_EFLAGS
;
5594 gen_op_mov_reg_T1
[ot
][reg
]();
5597 modrm
= ldub_code(s
->pc
++);
5598 mod
= (modrm
>> 6) & 3;
5599 op
= (modrm
>> 3) & 7;
5601 case 0: /* prefetchnta */
5602 case 1: /* prefetchnt0 */
5603 case 2: /* prefetchnt0 */
5604 case 3: /* prefetchnt0 */
5607 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5608 /* nothing more to do */
5614 case 0x120: /* mov reg, crN */
5615 case 0x122: /* mov crN, reg */
5617 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5619 modrm
= ldub_code(s
->pc
++);
5620 if ((modrm
& 0xc0) != 0xc0)
5622 rm
= (modrm
& 7) | REX_B(s
);
5623 reg
= ((modrm
>> 3) & 7) | rex_r
;
5634 gen_op_mov_TN_reg
[ot
][0][rm
]();
5635 gen_op_movl_crN_T0(reg
);
5636 gen_jmp_im(s
->pc
- s
->cs_base
);
5639 gen_op_movtl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
5640 gen_op_mov_reg_T0
[ot
][rm
]();
5643 /* XXX: add CR8 for x86_64 */
5649 case 0x121: /* mov reg, drN */
5650 case 0x123: /* mov drN, reg */
5652 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5654 modrm
= ldub_code(s
->pc
++);
5655 if ((modrm
& 0xc0) != 0xc0)
5657 rm
= (modrm
& 7) | REX_B(s
);
5658 reg
= ((modrm
>> 3) & 7) | rex_r
;
5663 /* XXX: do it dynamically with CR4.DE bit */
5664 if (reg
== 4 || reg
== 5 || reg
>= 8)
5667 gen_op_mov_TN_reg
[ot
][0][rm
]();
5668 gen_op_movl_drN_T0(reg
);
5669 gen_jmp_im(s
->pc
- s
->cs_base
);
5672 gen_op_movtl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
5673 gen_op_mov_reg_T0
[ot
][rm
]();
5677 case 0x106: /* clts */
5679 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5682 /* abort block because static cpu state changed */
5683 gen_jmp_im(s
->pc
- s
->cs_base
);
5687 /* MMX/SSE/SSE2/PNI support */
5688 case 0x1c3: /* MOVNTI reg, mem */
5689 if (!(s
->cpuid_features
& CPUID_SSE2
))
5691 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
5692 modrm
= ldub_code(s
->pc
++);
5693 mod
= (modrm
>> 6) & 3;
5696 reg
= ((modrm
>> 3) & 7) | rex_r
;
5697 /* generate a generic store */
5698 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
5701 modrm
= ldub_code(s
->pc
++);
5702 mod
= (modrm
>> 6) & 3;
5703 op
= (modrm
>> 3) & 7;
5705 case 0: /* fxsave */
5706 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
))
5708 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5709 gen_op_fxsave_A0((s
->dflag
== 2));
5711 case 1: /* fxrstor */
5712 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
))
5714 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5715 gen_op_fxrstor_A0((s
->dflag
== 2));
5717 case 2: /* ldmxcsr */
5718 case 3: /* stmxcsr */
5719 if (s
->flags
& HF_TS_MASK
) {
5720 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5723 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
5726 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5728 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
5729 gen_op_movl_env_T0(offsetof(CPUX86State
, mxcsr
));
5731 gen_op_movl_T0_env(offsetof(CPUX86State
, mxcsr
));
5732 gen_op_st_T0_A0
[OT_LONG
+ s
->mem_index
]();
5735 case 5: /* lfence */
5736 case 6: /* mfence */
5737 case 7: /* sfence */
5738 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
5745 case 0x110 ... 0x117:
5746 case 0x128 ... 0x12f:
5747 case 0x150 ... 0x177:
5748 case 0x17c ... 0x17f:
5750 case 0x1c4 ... 0x1c6:
5751 case 0x1d0 ... 0x1fe:
5752 gen_sse(s
, b
, pc_start
, rex_r
);
5757 /* lock generation */
5758 if (s
->prefix
& PREFIX_LOCK
)
5762 if (s
->prefix
& PREFIX_LOCK
)
5764 /* XXX: ensure that no lock was generated */
5765 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
5769 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
5770 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
5772 /* flags read by an operation */
5773 static uint16_t opc_read_flags
[NB_OPS
] = {
5774 [INDEX_op_aas
] = CC_A
,
5775 [INDEX_op_aaa
] = CC_A
,
5776 [INDEX_op_das
] = CC_A
| CC_C
,
5777 [INDEX_op_daa
] = CC_A
| CC_C
,
5779 /* subtle: due to the incl/decl implementation, C is used */
5780 [INDEX_op_update_inc_cc
] = CC_C
,
5782 [INDEX_op_into
] = CC_O
,
5784 [INDEX_op_jb_subb
] = CC_C
,
5785 [INDEX_op_jb_subw
] = CC_C
,
5786 [INDEX_op_jb_subl
] = CC_C
,
5788 [INDEX_op_jz_subb
] = CC_Z
,
5789 [INDEX_op_jz_subw
] = CC_Z
,
5790 [INDEX_op_jz_subl
] = CC_Z
,
5792 [INDEX_op_jbe_subb
] = CC_Z
| CC_C
,
5793 [INDEX_op_jbe_subw
] = CC_Z
| CC_C
,
5794 [INDEX_op_jbe_subl
] = CC_Z
| CC_C
,
5796 [INDEX_op_js_subb
] = CC_S
,
5797 [INDEX_op_js_subw
] = CC_S
,
5798 [INDEX_op_js_subl
] = CC_S
,
5800 [INDEX_op_jl_subb
] = CC_O
| CC_S
,
5801 [INDEX_op_jl_subw
] = CC_O
| CC_S
,
5802 [INDEX_op_jl_subl
] = CC_O
| CC_S
,
5804 [INDEX_op_jle_subb
] = CC_O
| CC_S
| CC_Z
,
5805 [INDEX_op_jle_subw
] = CC_O
| CC_S
| CC_Z
,
5806 [INDEX_op_jle_subl
] = CC_O
| CC_S
| CC_Z
,
5808 [INDEX_op_loopnzw
] = CC_Z
,
5809 [INDEX_op_loopnzl
] = CC_Z
,
5810 [INDEX_op_loopzw
] = CC_Z
,
5811 [INDEX_op_loopzl
] = CC_Z
,
5813 [INDEX_op_seto_T0_cc
] = CC_O
,
5814 [INDEX_op_setb_T0_cc
] = CC_C
,
5815 [INDEX_op_setz_T0_cc
] = CC_Z
,
5816 [INDEX_op_setbe_T0_cc
] = CC_Z
| CC_C
,
5817 [INDEX_op_sets_T0_cc
] = CC_S
,
5818 [INDEX_op_setp_T0_cc
] = CC_P
,
5819 [INDEX_op_setl_T0_cc
] = CC_O
| CC_S
,
5820 [INDEX_op_setle_T0_cc
] = CC_O
| CC_S
| CC_Z
,
5822 [INDEX_op_setb_T0_subb
] = CC_C
,
5823 [INDEX_op_setb_T0_subw
] = CC_C
,
5824 [INDEX_op_setb_T0_subl
] = CC_C
,
5826 [INDEX_op_setz_T0_subb
] = CC_Z
,
5827 [INDEX_op_setz_T0_subw
] = CC_Z
,
5828 [INDEX_op_setz_T0_subl
] = CC_Z
,
5830 [INDEX_op_setbe_T0_subb
] = CC_Z
| CC_C
,
5831 [INDEX_op_setbe_T0_subw
] = CC_Z
| CC_C
,
5832 [INDEX_op_setbe_T0_subl
] = CC_Z
| CC_C
,
5834 [INDEX_op_sets_T0_subb
] = CC_S
,
5835 [INDEX_op_sets_T0_subw
] = CC_S
,
5836 [INDEX_op_sets_T0_subl
] = CC_S
,
5838 [INDEX_op_setl_T0_subb
] = CC_O
| CC_S
,
5839 [INDEX_op_setl_T0_subw
] = CC_O
| CC_S
,
5840 [INDEX_op_setl_T0_subl
] = CC_O
| CC_S
,
5842 [INDEX_op_setle_T0_subb
] = CC_O
| CC_S
| CC_Z
,
5843 [INDEX_op_setle_T0_subw
] = CC_O
| CC_S
| CC_Z
,
5844 [INDEX_op_setle_T0_subl
] = CC_O
| CC_S
| CC_Z
,
5846 [INDEX_op_movl_T0_eflags
] = CC_OSZAPC
,
5847 [INDEX_op_cmc
] = CC_C
,
5848 [INDEX_op_salc
] = CC_C
,
5850 /* needed for correct flag optimisation before string ops */
5851 [INDEX_op_jnz_ecxw
] = CC_OSZAPC
,
5852 [INDEX_op_jnz_ecxl
] = CC_OSZAPC
,
5853 [INDEX_op_jz_ecxw
] = CC_OSZAPC
,
5854 [INDEX_op_jz_ecxl
] = CC_OSZAPC
,
5856 #ifdef TARGET_X86_64
5857 [INDEX_op_jb_subq
] = CC_C
,
5858 [INDEX_op_jz_subq
] = CC_Z
,
5859 [INDEX_op_jbe_subq
] = CC_Z
| CC_C
,
5860 [INDEX_op_js_subq
] = CC_S
,
5861 [INDEX_op_jl_subq
] = CC_O
| CC_S
,
5862 [INDEX_op_jle_subq
] = CC_O
| CC_S
| CC_Z
,
5864 [INDEX_op_loopnzq
] = CC_Z
,
5865 [INDEX_op_loopzq
] = CC_Z
,
5867 [INDEX_op_setb_T0_subq
] = CC_C
,
5868 [INDEX_op_setz_T0_subq
] = CC_Z
,
5869 [INDEX_op_setbe_T0_subq
] = CC_Z
| CC_C
,
5870 [INDEX_op_sets_T0_subq
] = CC_S
,
5871 [INDEX_op_setl_T0_subq
] = CC_O
| CC_S
,
5872 [INDEX_op_setle_T0_subq
] = CC_O
| CC_S
| CC_Z
,
5874 [INDEX_op_jnz_ecxq
] = CC_OSZAPC
,
5875 [INDEX_op_jz_ecxq
] = CC_OSZAPC
,
5878 #define DEF_READF(SUFFIX)\
5879 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5880 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5881 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5882 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5883 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5884 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5885 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5886 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5888 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5889 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5890 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
5891 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5892 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5893 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5894 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5895 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
5899 #ifndef CONFIG_USER_ONLY
5905 /* flags written by an operation */
5906 static uint16_t opc_write_flags
[NB_OPS
] = {
5907 [INDEX_op_update2_cc
] = CC_OSZAPC
,
5908 [INDEX_op_update1_cc
] = CC_OSZAPC
,
5909 [INDEX_op_cmpl_T0_T1_cc
] = CC_OSZAPC
,
5910 [INDEX_op_update_neg_cc
] = CC_OSZAPC
,
5911 /* subtle: due to the incl/decl implementation, C is used */
5912 [INDEX_op_update_inc_cc
] = CC_OSZAPC
,
5913 [INDEX_op_testl_T0_T1_cc
] = CC_OSZAPC
,
5915 [INDEX_op_mulb_AL_T0
] = CC_OSZAPC
,
5916 [INDEX_op_mulw_AX_T0
] = CC_OSZAPC
,
5917 [INDEX_op_mull_EAX_T0
] = CC_OSZAPC
,
5918 X86_64_DEF([INDEX_op_mulq_EAX_T0
] = CC_OSZAPC
,)
5919 [INDEX_op_imulb_AL_T0
] = CC_OSZAPC
,
5920 [INDEX_op_imulw_AX_T0
] = CC_OSZAPC
,
5921 [INDEX_op_imull_EAX_T0
] = CC_OSZAPC
,
5922 X86_64_DEF([INDEX_op_imulq_EAX_T0
] = CC_OSZAPC
,)
5923 [INDEX_op_imulw_T0_T1
] = CC_OSZAPC
,
5924 [INDEX_op_imull_T0_T1
] = CC_OSZAPC
,
5925 X86_64_DEF([INDEX_op_imulq_T0_T1
] = CC_OSZAPC
,)
5928 [INDEX_op_ucomiss
] = CC_OSZAPC
,
5929 [INDEX_op_ucomisd
] = CC_OSZAPC
,
5930 [INDEX_op_comiss
] = CC_OSZAPC
,
5931 [INDEX_op_comisd
] = CC_OSZAPC
,
5934 [INDEX_op_aam
] = CC_OSZAPC
,
5935 [INDEX_op_aad
] = CC_OSZAPC
,
5936 [INDEX_op_aas
] = CC_OSZAPC
,
5937 [INDEX_op_aaa
] = CC_OSZAPC
,
5938 [INDEX_op_das
] = CC_OSZAPC
,
5939 [INDEX_op_daa
] = CC_OSZAPC
,
5941 [INDEX_op_movb_eflags_T0
] = CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
,
5942 [INDEX_op_movw_eflags_T0
] = CC_OSZAPC
,
5943 [INDEX_op_movl_eflags_T0
] = CC_OSZAPC
,
5944 [INDEX_op_movw_eflags_T0_io
] = CC_OSZAPC
,
5945 [INDEX_op_movl_eflags_T0_io
] = CC_OSZAPC
,
5946 [INDEX_op_movw_eflags_T0_cpl0
] = CC_OSZAPC
,
5947 [INDEX_op_movl_eflags_T0_cpl0
] = CC_OSZAPC
,
5948 [INDEX_op_clc
] = CC_C
,
5949 [INDEX_op_stc
] = CC_C
,
5950 [INDEX_op_cmc
] = CC_C
,
5952 [INDEX_op_btw_T0_T1_cc
] = CC_OSZAPC
,
5953 [INDEX_op_btl_T0_T1_cc
] = CC_OSZAPC
,
5954 X86_64_DEF([INDEX_op_btq_T0_T1_cc
] = CC_OSZAPC
,)
5955 [INDEX_op_btsw_T0_T1_cc
] = CC_OSZAPC
,
5956 [INDEX_op_btsl_T0_T1_cc
] = CC_OSZAPC
,
5957 X86_64_DEF([INDEX_op_btsq_T0_T1_cc
] = CC_OSZAPC
,)
5958 [INDEX_op_btrw_T0_T1_cc
] = CC_OSZAPC
,
5959 [INDEX_op_btrl_T0_T1_cc
] = CC_OSZAPC
,
5960 X86_64_DEF([INDEX_op_btrq_T0_T1_cc
] = CC_OSZAPC
,)
5961 [INDEX_op_btcw_T0_T1_cc
] = CC_OSZAPC
,
5962 [INDEX_op_btcl_T0_T1_cc
] = CC_OSZAPC
,
5963 X86_64_DEF([INDEX_op_btcq_T0_T1_cc
] = CC_OSZAPC
,)
5965 [INDEX_op_bsfw_T0_cc
] = CC_OSZAPC
,
5966 [INDEX_op_bsfl_T0_cc
] = CC_OSZAPC
,
5967 X86_64_DEF([INDEX_op_bsfq_T0_cc
] = CC_OSZAPC
,)
5968 [INDEX_op_bsrw_T0_cc
] = CC_OSZAPC
,
5969 [INDEX_op_bsrl_T0_cc
] = CC_OSZAPC
,
5970 X86_64_DEF([INDEX_op_bsrq_T0_cc
] = CC_OSZAPC
,)
5972 [INDEX_op_cmpxchgb_T0_T1_EAX_cc
] = CC_OSZAPC
,
5973 [INDEX_op_cmpxchgw_T0_T1_EAX_cc
] = CC_OSZAPC
,
5974 [INDEX_op_cmpxchgl_T0_T1_EAX_cc
] = CC_OSZAPC
,
5975 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc
] = CC_OSZAPC
,)
5977 [INDEX_op_cmpxchg8b
] = CC_Z
,
5978 [INDEX_op_lar
] = CC_Z
,
5979 [INDEX_op_lsl
] = CC_Z
,
5980 [INDEX_op_fcomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
5981 [INDEX_op_fucomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
5983 #define DEF_WRITEF(SUFFIX)\
5984 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5985 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5986 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5987 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5988 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5989 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5990 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5991 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5993 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5994 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5995 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5996 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
5997 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5998 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5999 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6000 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6002 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6003 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6004 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6005 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6006 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6007 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6008 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6009 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6011 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6012 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6013 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6014 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6016 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6017 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6018 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6019 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6021 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6022 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6023 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6024 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6026 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6027 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6028 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6029 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6030 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6031 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6033 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6034 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6035 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6036 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6037 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6038 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6040 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6041 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6042 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6043 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6048 #ifndef CONFIG_USER_ONLY
6054 /* simpler form of an operation if no flags need to be generated */
6055 static uint16_t opc_simpler
[NB_OPS
] = {
6056 [INDEX_op_update2_cc
] = INDEX_op_nop
,
6057 [INDEX_op_update1_cc
] = INDEX_op_nop
,
6058 [INDEX_op_update_neg_cc
] = INDEX_op_nop
,
6060 /* broken: CC_OP logic must be rewritten */
6061 [INDEX_op_update_inc_cc
] = INDEX_op_nop
,
6064 [INDEX_op_shlb_T0_T1_cc
] = INDEX_op_shlb_T0_T1
,
6065 [INDEX_op_shlw_T0_T1_cc
] = INDEX_op_shlw_T0_T1
,
6066 [INDEX_op_shll_T0_T1_cc
] = INDEX_op_shll_T0_T1
,
6067 X86_64_DEF([INDEX_op_shlq_T0_T1_cc
] = INDEX_op_shlq_T0_T1
,)
6069 [INDEX_op_shrb_T0_T1_cc
] = INDEX_op_shrb_T0_T1
,
6070 [INDEX_op_shrw_T0_T1_cc
] = INDEX_op_shrw_T0_T1
,
6071 [INDEX_op_shrl_T0_T1_cc
] = INDEX_op_shrl_T0_T1
,
6072 X86_64_DEF([INDEX_op_shrq_T0_T1_cc
] = INDEX_op_shrq_T0_T1
,)
6074 [INDEX_op_sarb_T0_T1_cc
] = INDEX_op_sarb_T0_T1
,
6075 [INDEX_op_sarw_T0_T1_cc
] = INDEX_op_sarw_T0_T1
,
6076 [INDEX_op_sarl_T0_T1_cc
] = INDEX_op_sarl_T0_T1
,
6077 X86_64_DEF([INDEX_op_sarq_T0_T1_cc
] = INDEX_op_sarq_T0_T1
,)
6079 #define DEF_SIMPLER(SUFFIX)\
6080 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6081 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6082 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6083 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6085 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6086 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6087 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6088 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6092 #ifndef CONFIG_USER_ONLY
6093 DEF_SIMPLER(_kernel
)
6098 void optimize_flags_init(void)
6101 /* put default values in arrays */
6102 for(i
= 0; i
< NB_OPS
; i
++) {
6103 if (opc_simpler
[i
] == 0)
6108 /* CPU flags computation optimization: we move backward thru the
6109 generated code to see which flags are needed. The operation is
6110 modified if suitable */
6111 static void optimize_flags(uint16_t *opc_buf
, int opc_buf_len
)
6114 int live_flags
, write_flags
, op
;
6116 opc_ptr
= opc_buf
+ opc_buf_len
;
6117 /* live_flags contains the flags needed by the next instructions
6118 in the code. At the end of the bloc, we consider that all the
6120 live_flags
= CC_OSZAPC
;
6121 while (opc_ptr
> opc_buf
) {
6123 /* if none of the flags written by the instruction is used,
6124 then we can try to find a simpler instruction */
6125 write_flags
= opc_write_flags
[op
];
6126 if ((live_flags
& write_flags
) == 0) {
6127 *opc_ptr
= opc_simpler
[op
];
6129 /* compute the live flags before the instruction */
6130 live_flags
&= ~write_flags
;
6131 live_flags
|= opc_read_flags
[op
];
6135 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6136 basic block 'tb'. If search_pc is TRUE, also generate PC
6137 information for each intermediate instruction. */
6138 static inline int gen_intermediate_code_internal(CPUState
*env
,
6139 TranslationBlock
*tb
,
6142 DisasContext dc1
, *dc
= &dc1
;
6143 target_ulong pc_ptr
;
6144 uint16_t *gen_opc_end
;
6145 int flags
, j
, lj
, cflags
;
6146 target_ulong pc_start
;
6147 target_ulong cs_base
;
6149 /* generate intermediate code */
6151 cs_base
= tb
->cs_base
;
6153 cflags
= tb
->cflags
;
6155 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
6156 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
6157 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
6158 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
6160 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
6161 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
6162 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
6163 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
6164 dc
->singlestep_enabled
= env
->singlestep_enabled
;
6165 dc
->cc_op
= CC_OP_DYNAMIC
;
6166 dc
->cs_base
= cs_base
;
6168 dc
->popl_esp_hack
= 0;
6169 /* select memory access functions */
6171 if (flags
& HF_SOFTMMU_MASK
) {
6173 dc
->mem_index
= 2 * 4;
6175 dc
->mem_index
= 1 * 4;
6177 dc
->cpuid_features
= env
->cpuid_features
;
6178 #ifdef TARGET_X86_64
6179 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
6180 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
6183 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
6184 (flags
& HF_INHIBIT_IRQ_MASK
)
6185 #ifndef CONFIG_SOFTMMU
6186 || (flags
& HF_SOFTMMU_MASK
)
6190 /* check addseg logic */
6191 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
6192 printf("ERROR addseg\n");
6195 gen_opc_ptr
= gen_opc_buf
;
6196 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
6197 gen_opparam_ptr
= gen_opparam_buf
;
6200 dc
->is_jmp
= DISAS_NEXT
;
6205 if (env
->nb_breakpoints
> 0) {
6206 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
6207 if (env
->breakpoints
[j
] == pc_ptr
) {
6208 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
6214 j
= gen_opc_ptr
- gen_opc_buf
;
6218 gen_opc_instr_start
[lj
++] = 0;
6220 gen_opc_pc
[lj
] = pc_ptr
;
6221 gen_opc_cc_op
[lj
] = dc
->cc_op
;
6222 gen_opc_instr_start
[lj
] = 1;
6224 pc_ptr
= disas_insn(dc
, pc_ptr
);
6225 /* stop translation if indicated */
6228 /* if single step mode, we generate only one instruction and
6229 generate an exception */
6230 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6231 the flag and abort the translation to give the irqs a
6232 change to be happen */
6233 if (dc
->tf
|| dc
->singlestep_enabled
||
6234 (flags
& HF_INHIBIT_IRQ_MASK
) ||
6235 (cflags
& CF_SINGLE_INSN
)) {
6236 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6240 /* if too long translation, stop generation too */
6241 if (gen_opc_ptr
>= gen_opc_end
||
6242 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
6243 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6248 *gen_opc_ptr
= INDEX_op_end
;
6249 /* we don't forget to fill the last values */
6251 j
= gen_opc_ptr
- gen_opc_buf
;
6254 gen_opc_instr_start
[lj
++] = 0;
6258 if (loglevel
& CPU_LOG_TB_CPU
) {
6259 cpu_dump_state(env
, logfile
, fprintf
, X86_DUMP_CCOP
);
6261 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
6263 fprintf(logfile
, "----------------\n");
6264 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
6265 #ifdef TARGET_X86_64
6270 disas_flags
= !dc
->code32
;
6271 target_disas(logfile
, pc_start
, pc_ptr
- pc_start
, disas_flags
);
6272 fprintf(logfile
, "\n");
6273 if (loglevel
& CPU_LOG_TB_OP
) {
6274 fprintf(logfile
, "OP:\n");
6275 dump_ops(gen_opc_buf
, gen_opparam_buf
);
6276 fprintf(logfile
, "\n");
6281 /* optimize flag computations */
6282 optimize_flags(gen_opc_buf
, gen_opc_ptr
- gen_opc_buf
);
6285 if (loglevel
& CPU_LOG_TB_OP_OPT
) {
6286 fprintf(logfile
, "AFTER FLAGS OPT:\n");
6287 dump_ops(gen_opc_buf
, gen_opparam_buf
);
6288 fprintf(logfile
, "\n");
6292 tb
->size
= pc_ptr
- pc_start
;
6296 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
6298 return gen_intermediate_code_internal(env
, tb
, 0);
6301 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
6303 return gen_intermediate_code_internal(env
, tb
, 1);