4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 /* XXX: move that elsewhere */
33 static uint16_t *gen_opc_ptr
;
34 static uint32_t *gen_opparam_ptr
;
36 #define PREFIX_REPZ 0x01
37 #define PREFIX_REPNZ 0x02
38 #define PREFIX_LOCK 0x04
39 #define PREFIX_DATA 0x08
40 #define PREFIX_ADR 0x10
43 #define X86_64_ONLY(x) x
44 #define X86_64_DEF(x...) x
45 #define CODE64(s) ((s)->code64)
46 #define REX_X(s) ((s)->rex_x)
47 #define REX_B(s) ((s)->rex_b)
48 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
50 #define BUGGY_64(x) NULL
53 #define X86_64_ONLY(x) NULL
54 #define X86_64_DEF(x...)
61 static int x86_64_hregs
;
64 #ifdef USE_DIRECT_JUMP
67 #define TBPARAM(x) (long)(x)
70 typedef struct DisasContext
{
71 /* current insn context */
72 int override
; /* -1 if no override */
75 target_ulong pc
; /* pc = eip + cs_base */
76 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
77 static state change (stop translation) */
78 /* current block context */
79 target_ulong cs_base
; /* base of CS segment */
80 int pe
; /* protected mode */
81 int code32
; /* 32 bit code segment */
83 int lma
; /* long mode active */
84 int code64
; /* 64 bit code segment */
87 int ss32
; /* 32 bit stack segment */
88 int cc_op
; /* current CC operation */
89 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
90 int f_st
; /* currently unused */
91 int vm86
; /* vm86 mode */
94 int tf
; /* TF cpu flag */
95 int singlestep_enabled
; /* "hardware" single step enabled */
96 int jmp_opt
; /* use direct block chaining for direct jumps */
97 int mem_index
; /* select memory access functions */
98 int flags
; /* all execution flags */
99 struct TranslationBlock
*tb
;
100 int popl_esp_hack
; /* for correct popl with esp base handling */
101 int rip_offset
; /* only used in x86_64, but left for simplicity */
105 static void gen_eob(DisasContext
*s
);
106 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
107 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
109 /* i386 arith/logic operations */
129 OP_SHL1
, /* undocumented */
134 #define DEF(s, n, copy_size) INDEX_op_ ## s,
151 /* I386 int registers */
152 OR_EAX
, /* MUST be even numbered */
161 OR_TMP0
= 16, /* temporary operand register */
163 OR_A0
, /* temporary register used when doing address evaluation */
168 #define NB_OP_SIZES 4
170 #define DEF_REGS(prefix, suffix) \
171 prefix ## EAX ## suffix,\
172 prefix ## ECX ## suffix,\
173 prefix ## EDX ## suffix,\
174 prefix ## EBX ## suffix,\
175 prefix ## ESP ## suffix,\
176 prefix ## EBP ## suffix,\
177 prefix ## ESI ## suffix,\
178 prefix ## EDI ## suffix,\
179 prefix ## R8 ## suffix,\
180 prefix ## R9 ## suffix,\
181 prefix ## R10 ## suffix,\
182 prefix ## R11 ## suffix,\
183 prefix ## R12 ## suffix,\
184 prefix ## R13 ## suffix,\
185 prefix ## R14 ## suffix,\
186 prefix ## R15 ## suffix,
188 #define DEF_BREGS(prefixb, prefixh, suffix) \
190 static void prefixb ## ESP ## suffix ## _wrapper(void) \
193 prefixb ## ESP ## suffix (); \
195 prefixh ## EAX ## suffix (); \
198 static void prefixb ## EBP ## suffix ## _wrapper(void) \
201 prefixb ## EBP ## suffix (); \
203 prefixh ## ECX ## suffix (); \
206 static void prefixb ## ESI ## suffix ## _wrapper(void) \
209 prefixb ## ESI ## suffix (); \
211 prefixh ## EDX ## suffix (); \
214 static void prefixb ## EDI ## suffix ## _wrapper(void) \
217 prefixb ## EDI ## suffix (); \
219 prefixh ## EBX ## suffix (); \
222 DEF_BREGS(gen_op_movb_
, gen_op_movh_
, _T0
)
223 DEF_BREGS(gen_op_movb_
, gen_op_movh_
, _T1
)
224 DEF_BREGS(gen_op_movl_T0_
, gen_op_movh_T0_
, )
225 DEF_BREGS(gen_op_movl_T1_
, gen_op_movh_T1_
, )
227 #else /* !TARGET_X86_64 */
229 #define NB_OP_SIZES 3
231 #define DEF_REGS(prefix, suffix) \
232 prefix ## EAX ## suffix,\
233 prefix ## ECX ## suffix,\
234 prefix ## EDX ## suffix,\
235 prefix ## EBX ## suffix,\
236 prefix ## ESP ## suffix,\
237 prefix ## EBP ## suffix,\
238 prefix ## ESI ## suffix,\
239 prefix ## EDI ## suffix,
241 #endif /* !TARGET_X86_64 */
243 static GenOpFunc
*gen_op_mov_reg_T0
[NB_OP_SIZES
][CPU_NB_REGS
] = {
250 gen_op_movb_ESP_T0_wrapper
,
251 gen_op_movb_EBP_T0_wrapper
,
252 gen_op_movb_ESI_T0_wrapper
,
253 gen_op_movb_EDI_T0_wrapper
,
270 DEF_REGS(gen_op_movw_
, _T0
)
273 DEF_REGS(gen_op_movl_
, _T0
)
277 DEF_REGS(gen_op_movq_
, _T0
)
282 static GenOpFunc
*gen_op_mov_reg_T1
[NB_OP_SIZES
][CPU_NB_REGS
] = {
289 gen_op_movb_ESP_T1_wrapper
,
290 gen_op_movb_EBP_T1_wrapper
,
291 gen_op_movb_ESI_T1_wrapper
,
292 gen_op_movb_EDI_T1_wrapper
,
309 DEF_REGS(gen_op_movw_
, _T1
)
312 DEF_REGS(gen_op_movl_
, _T1
)
316 DEF_REGS(gen_op_movq_
, _T1
)
321 static GenOpFunc
*gen_op_mov_reg_A0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
323 DEF_REGS(gen_op_movw_
, _A0
)
326 DEF_REGS(gen_op_movl_
, _A0
)
330 DEF_REGS(gen_op_movq_
, _A0
)
335 static GenOpFunc
*gen_op_mov_TN_reg
[NB_OP_SIZES
][2][CPU_NB_REGS
] =
344 gen_op_movl_T0_ESP_wrapper
,
345 gen_op_movl_T0_EBP_wrapper
,
346 gen_op_movl_T0_ESI_wrapper
,
347 gen_op_movl_T0_EDI_wrapper
,
369 gen_op_movl_T1_ESP_wrapper
,
370 gen_op_movl_T1_EBP_wrapper
,
371 gen_op_movl_T1_ESI_wrapper
,
372 gen_op_movl_T1_EDI_wrapper
,
391 DEF_REGS(gen_op_movl_T0_
, )
394 DEF_REGS(gen_op_movl_T1_
, )
399 DEF_REGS(gen_op_movl_T0_
, )
402 DEF_REGS(gen_op_movl_T1_
, )
408 DEF_REGS(gen_op_movl_T0_
, )
411 DEF_REGS(gen_op_movl_T1_
, )
417 static GenOpFunc
*gen_op_movl_A0_reg
[CPU_NB_REGS
] = {
418 DEF_REGS(gen_op_movl_A0_
, )
421 static GenOpFunc
*gen_op_addl_A0_reg_sN
[4][CPU_NB_REGS
] = {
423 DEF_REGS(gen_op_addl_A0_
, )
426 DEF_REGS(gen_op_addl_A0_
, _s1
)
429 DEF_REGS(gen_op_addl_A0_
, _s2
)
432 DEF_REGS(gen_op_addl_A0_
, _s3
)
437 static GenOpFunc
*gen_op_movq_A0_reg
[CPU_NB_REGS
] = {
438 DEF_REGS(gen_op_movq_A0_
, )
441 static GenOpFunc
*gen_op_addq_A0_reg_sN
[4][CPU_NB_REGS
] = {
443 DEF_REGS(gen_op_addq_A0_
, )
446 DEF_REGS(gen_op_addq_A0_
, _s1
)
449 DEF_REGS(gen_op_addq_A0_
, _s2
)
452 DEF_REGS(gen_op_addq_A0_
, _s3
)
457 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
459 DEF_REGS(gen_op_cmovw_
, _T1_T0
)
462 DEF_REGS(gen_op_cmovl_
, _T1_T0
)
466 DEF_REGS(gen_op_cmovq_
, _T1_T0
)
471 static GenOpFunc
*gen_op_arith_T0_T1_cc
[8] = {
482 #define DEF_ARITHC(SUFFIX)\
484 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
485 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
488 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
489 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
492 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
493 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
496 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
497 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
500 static GenOpFunc
*gen_op_arithc_T0_T1_cc
[4][2] = {
504 static GenOpFunc
*gen_op_arithc_mem_T0_T1_cc
[3 * 4][2] = {
506 #ifndef CONFIG_USER_ONLY
512 static const int cc_op_arithb
[8] = {
523 #define DEF_CMPXCHG(SUFFIX)\
524 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
525 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
526 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
527 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
529 static GenOpFunc
*gen_op_cmpxchg_T0_T1_EAX_cc
[4] = {
533 static GenOpFunc
*gen_op_cmpxchg_mem_T0_T1_EAX_cc
[3 * 4] = {
535 #ifndef CONFIG_USER_ONLY
541 #define DEF_SHIFT(SUFFIX)\
543 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
544 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
545 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
546 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
547 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
548 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
549 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
550 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
553 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
554 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
555 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
556 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
557 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
558 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
559 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
560 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
563 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
565 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
566 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
568 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
569 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
570 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
573 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
574 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
575 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
576 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
577 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
578 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
579 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
580 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
583 static GenOpFunc
*gen_op_shift_T0_T1_cc
[4][8] = {
587 static GenOpFunc
*gen_op_shift_mem_T0_T1_cc
[3 * 4][8] = {
589 #ifndef CONFIG_USER_ONLY
595 #define DEF_SHIFTD(SUFFIX, op)\
601 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
602 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
605 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
606 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
609 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
610 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
613 static GenOpFunc1
*gen_op_shiftd_T0_T1_im_cc
[4][2] = {
617 static GenOpFunc
*gen_op_shiftd_T0_T1_ECX_cc
[4][2] = {
621 static GenOpFunc1
*gen_op_shiftd_mem_T0_T1_im_cc
[3 * 4][2] = {
623 #ifndef CONFIG_USER_ONLY
624 DEF_SHIFTD(_kernel
, im
)
625 DEF_SHIFTD(_user
, im
)
629 static GenOpFunc
*gen_op_shiftd_mem_T0_T1_ECX_cc
[3 * 4][2] = {
630 DEF_SHIFTD(_raw
, ECX
)
631 #ifndef CONFIG_USER_ONLY
632 DEF_SHIFTD(_kernel
, ECX
)
633 DEF_SHIFTD(_user
, ECX
)
637 static GenOpFunc
*gen_op_btx_T0_T1_cc
[3][4] = {
640 gen_op_btsw_T0_T1_cc
,
641 gen_op_btrw_T0_T1_cc
,
642 gen_op_btcw_T0_T1_cc
,
646 gen_op_btsl_T0_T1_cc
,
647 gen_op_btrl_T0_T1_cc
,
648 gen_op_btcl_T0_T1_cc
,
653 gen_op_btsq_T0_T1_cc
,
654 gen_op_btrq_T0_T1_cc
,
655 gen_op_btcq_T0_T1_cc
,
660 static GenOpFunc
*gen_op_add_bit_A0_T1
[3] = {
661 gen_op_add_bitw_A0_T1
,
662 gen_op_add_bitl_A0_T1
,
663 X86_64_ONLY(gen_op_add_bitq_A0_T1
),
666 static GenOpFunc
*gen_op_bsx_T0_cc
[3][2] = {
683 static GenOpFunc
*gen_op_lds_T0_A0
[3 * 4] = {
684 gen_op_ldsb_raw_T0_A0
,
685 gen_op_ldsw_raw_T0_A0
,
686 X86_64_ONLY(gen_op_ldsl_raw_T0_A0
),
688 #ifndef CONFIG_USER_ONLY
689 gen_op_ldsb_kernel_T0_A0
,
690 gen_op_ldsw_kernel_T0_A0
,
691 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0
),
694 gen_op_ldsb_user_T0_A0
,
695 gen_op_ldsw_user_T0_A0
,
696 X86_64_ONLY(gen_op_ldsl_user_T0_A0
),
701 static GenOpFunc
*gen_op_ldu_T0_A0
[3 * 4] = {
702 gen_op_ldub_raw_T0_A0
,
703 gen_op_lduw_raw_T0_A0
,
707 #ifndef CONFIG_USER_ONLY
708 gen_op_ldub_kernel_T0_A0
,
709 gen_op_lduw_kernel_T0_A0
,
713 gen_op_ldub_user_T0_A0
,
714 gen_op_lduw_user_T0_A0
,
720 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
721 static GenOpFunc
*gen_op_ld_T0_A0
[3 * 4] = {
722 gen_op_ldub_raw_T0_A0
,
723 gen_op_lduw_raw_T0_A0
,
724 gen_op_ldl_raw_T0_A0
,
725 X86_64_ONLY(gen_op_ldq_raw_T0_A0
),
727 #ifndef CONFIG_USER_ONLY
728 gen_op_ldub_kernel_T0_A0
,
729 gen_op_lduw_kernel_T0_A0
,
730 gen_op_ldl_kernel_T0_A0
,
731 X86_64_ONLY(gen_op_ldq_kernel_T0_A0
),
733 gen_op_ldub_user_T0_A0
,
734 gen_op_lduw_user_T0_A0
,
735 gen_op_ldl_user_T0_A0
,
736 X86_64_ONLY(gen_op_ldq_user_T0_A0
),
740 static GenOpFunc
*gen_op_ld_T1_A0
[3 * 4] = {
741 gen_op_ldub_raw_T1_A0
,
742 gen_op_lduw_raw_T1_A0
,
743 gen_op_ldl_raw_T1_A0
,
744 X86_64_ONLY(gen_op_ldq_raw_T1_A0
),
746 #ifndef CONFIG_USER_ONLY
747 gen_op_ldub_kernel_T1_A0
,
748 gen_op_lduw_kernel_T1_A0
,
749 gen_op_ldl_kernel_T1_A0
,
750 X86_64_ONLY(gen_op_ldq_kernel_T1_A0
),
752 gen_op_ldub_user_T1_A0
,
753 gen_op_lduw_user_T1_A0
,
754 gen_op_ldl_user_T1_A0
,
755 X86_64_ONLY(gen_op_ldq_user_T1_A0
),
759 static GenOpFunc
*gen_op_st_T0_A0
[3 * 4] = {
760 gen_op_stb_raw_T0_A0
,
761 gen_op_stw_raw_T0_A0
,
762 gen_op_stl_raw_T0_A0
,
763 X86_64_ONLY(gen_op_stq_raw_T0_A0
),
765 #ifndef CONFIG_USER_ONLY
766 gen_op_stb_kernel_T0_A0
,
767 gen_op_stw_kernel_T0_A0
,
768 gen_op_stl_kernel_T0_A0
,
769 X86_64_ONLY(gen_op_stq_kernel_T0_A0
),
771 gen_op_stb_user_T0_A0
,
772 gen_op_stw_user_T0_A0
,
773 gen_op_stl_user_T0_A0
,
774 X86_64_ONLY(gen_op_stq_user_T0_A0
),
778 static GenOpFunc
*gen_op_st_T1_A0
[3 * 4] = {
780 gen_op_stw_raw_T1_A0
,
781 gen_op_stl_raw_T1_A0
,
782 X86_64_ONLY(gen_op_stq_raw_T1_A0
),
784 #ifndef CONFIG_USER_ONLY
786 gen_op_stw_kernel_T1_A0
,
787 gen_op_stl_kernel_T1_A0
,
788 X86_64_ONLY(gen_op_stq_kernel_T1_A0
),
791 gen_op_stw_user_T1_A0
,
792 gen_op_stl_user_T1_A0
,
793 X86_64_ONLY(gen_op_stq_user_T1_A0
),
797 static inline void gen_jmp_im(target_ulong pc
)
800 if (pc
== (uint32_t)pc
) {
801 gen_op_movl_eip_im(pc
);
802 } else if (pc
== (int32_t)pc
) {
803 gen_op_movq_eip_im(pc
);
805 gen_op_movq_eip_im64(pc
>> 32, pc
);
808 gen_op_movl_eip_im(pc
);
812 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
816 override
= s
->override
;
820 gen_op_movq_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
821 gen_op_addq_A0_reg_sN
[0][R_ESI
]();
823 gen_op_movq_A0_reg
[R_ESI
]();
829 if (s
->addseg
&& override
< 0)
832 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
833 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
835 gen_op_movl_A0_reg
[R_ESI
]();
838 /* 16 address, always override */
841 gen_op_movl_A0_reg
[R_ESI
]();
842 gen_op_andl_A0_ffff();
843 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
847 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
851 gen_op_movq_A0_reg
[R_EDI
]();
856 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
857 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
859 gen_op_movl_A0_reg
[R_EDI
]();
862 gen_op_movl_A0_reg
[R_EDI
]();
863 gen_op_andl_A0_ffff();
864 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
868 static GenOpFunc
*gen_op_movl_T0_Dshift
[4] = {
869 gen_op_movl_T0_Dshiftb
,
870 gen_op_movl_T0_Dshiftw
,
871 gen_op_movl_T0_Dshiftl
,
872 X86_64_ONLY(gen_op_movl_T0_Dshiftq
),
875 static GenOpFunc1
*gen_op_jnz_ecx
[3] = {
878 X86_64_ONLY(gen_op_jnz_ecxq
),
881 static GenOpFunc1
*gen_op_jz_ecx
[3] = {
884 X86_64_ONLY(gen_op_jz_ecxq
),
887 static GenOpFunc
*gen_op_dec_ECX
[3] = {
890 X86_64_ONLY(gen_op_decq_ECX
),
893 static GenOpFunc1
*gen_op_string_jnz_sub
[2][4] = {
898 X86_64_ONLY(gen_op_jnz_subq
),
904 X86_64_ONLY(gen_op_jz_subq
),
908 static GenOpFunc
*gen_op_in_DX_T0
[3] = {
914 static GenOpFunc
*gen_op_out_DX_T0
[3] = {
920 static GenOpFunc
*gen_op_in
[3] = {
926 static GenOpFunc
*gen_op_out
[3] = {
932 static GenOpFunc
*gen_check_io_T0
[3] = {
938 static GenOpFunc
*gen_check_io_DX
[3] = {
944 static void gen_check_io(DisasContext
*s
, int ot
, int use_dx
, target_ulong cur_eip
)
946 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
947 if (s
->cc_op
!= CC_OP_DYNAMIC
)
948 gen_op_set_cc_op(s
->cc_op
);
951 gen_check_io_DX
[ot
]();
953 gen_check_io_T0
[ot
]();
957 static inline void gen_movs(DisasContext
*s
, int ot
)
959 gen_string_movl_A0_ESI(s
);
960 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
961 gen_string_movl_A0_EDI(s
);
962 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
963 gen_op_movl_T0_Dshift
[ot
]();
966 gen_op_addq_ESI_T0();
967 gen_op_addq_EDI_T0();
971 gen_op_addl_ESI_T0();
972 gen_op_addl_EDI_T0();
974 gen_op_addw_ESI_T0();
975 gen_op_addw_EDI_T0();
979 static inline void gen_update_cc_op(DisasContext
*s
)
981 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
982 gen_op_set_cc_op(s
->cc_op
);
983 s
->cc_op
= CC_OP_DYNAMIC
;
987 /* XXX: does not work with gdbstub "ice" single step - not a
989 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
993 l1
= gen_new_label();
994 l2
= gen_new_label();
995 gen_op_jnz_ecx
[s
->aflag
](l1
);
997 gen_jmp_tb(s
, next_eip
, 1);
1002 static inline void gen_stos(DisasContext
*s
, int ot
)
1004 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
1005 gen_string_movl_A0_EDI(s
);
1006 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1007 gen_op_movl_T0_Dshift
[ot
]();
1008 #ifdef TARGET_X86_64
1009 if (s
->aflag
== 2) {
1010 gen_op_addq_EDI_T0();
1014 gen_op_addl_EDI_T0();
1016 gen_op_addw_EDI_T0();
1020 static inline void gen_lods(DisasContext
*s
, int ot
)
1022 gen_string_movl_A0_ESI(s
);
1023 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1024 gen_op_mov_reg_T0
[ot
][R_EAX
]();
1025 gen_op_movl_T0_Dshift
[ot
]();
1026 #ifdef TARGET_X86_64
1027 if (s
->aflag
== 2) {
1028 gen_op_addq_ESI_T0();
1032 gen_op_addl_ESI_T0();
1034 gen_op_addw_ESI_T0();
1038 static inline void gen_scas(DisasContext
*s
, int ot
)
1040 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
1041 gen_string_movl_A0_EDI(s
);
1042 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
1043 gen_op_cmpl_T0_T1_cc();
1044 gen_op_movl_T0_Dshift
[ot
]();
1045 #ifdef TARGET_X86_64
1046 if (s
->aflag
== 2) {
1047 gen_op_addq_EDI_T0();
1051 gen_op_addl_EDI_T0();
1053 gen_op_addw_EDI_T0();
1057 static inline void gen_cmps(DisasContext
*s
, int ot
)
1059 gen_string_movl_A0_ESI(s
);
1060 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1061 gen_string_movl_A0_EDI(s
);
1062 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
1063 gen_op_cmpl_T0_T1_cc();
1064 gen_op_movl_T0_Dshift
[ot
]();
1065 #ifdef TARGET_X86_64
1066 if (s
->aflag
== 2) {
1067 gen_op_addq_ESI_T0();
1068 gen_op_addq_EDI_T0();
1072 gen_op_addl_ESI_T0();
1073 gen_op_addl_EDI_T0();
1075 gen_op_addw_ESI_T0();
1076 gen_op_addw_EDI_T0();
1080 static inline void gen_ins(DisasContext
*s
, int ot
)
1082 gen_string_movl_A0_EDI(s
);
1084 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1085 gen_op_in_DX_T0
[ot
]();
1086 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1087 gen_op_movl_T0_Dshift
[ot
]();
1088 #ifdef TARGET_X86_64
1089 if (s
->aflag
== 2) {
1090 gen_op_addq_EDI_T0();
1094 gen_op_addl_EDI_T0();
1096 gen_op_addw_EDI_T0();
1100 static inline void gen_outs(DisasContext
*s
, int ot
)
1102 gen_string_movl_A0_ESI(s
);
1103 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1104 gen_op_out_DX_T0
[ot
]();
1105 gen_op_movl_T0_Dshift
[ot
]();
1106 #ifdef TARGET_X86_64
1107 if (s
->aflag
== 2) {
1108 gen_op_addq_ESI_T0();
1112 gen_op_addl_ESI_T0();
1114 gen_op_addw_ESI_T0();
1118 /* same method as Valgrind : we generate jumps to current or next
1120 #define GEN_REPZ(op) \
1121 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1122 target_ulong cur_eip, target_ulong next_eip) \
1125 gen_update_cc_op(s); \
1126 l2 = gen_jz_ecx_string(s, next_eip); \
1127 gen_ ## op(s, ot); \
1128 gen_op_dec_ECX[s->aflag](); \
1129 /* a loop would cause two single step exceptions if ECX = 1 \
1130 before rep string_insn */ \
1132 gen_op_jz_ecx[s->aflag](l2); \
1133 gen_jmp(s, cur_eip); \
1136 #define GEN_REPZ2(op) \
1137 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1138 target_ulong cur_eip, \
1139 target_ulong next_eip, \
1143 gen_update_cc_op(s); \
1144 l2 = gen_jz_ecx_string(s, next_eip); \
1145 gen_ ## op(s, ot); \
1146 gen_op_dec_ECX[s->aflag](); \
1147 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1148 gen_op_string_jnz_sub[nz][ot](l2);\
1150 gen_op_jz_ecx[s->aflag](l2); \
1151 gen_jmp(s, cur_eip); \
1173 static GenOpFunc1
*gen_jcc_sub
[4][8] = {
1204 #ifdef TARGET_X86_64
1207 BUGGY_64(gen_op_jb_subq
),
1209 BUGGY_64(gen_op_jbe_subq
),
1212 BUGGY_64(gen_op_jl_subq
),
1213 BUGGY_64(gen_op_jle_subq
),
1217 static GenOpFunc1
*gen_op_loop
[3][4] = {
1228 #ifdef TARGET_X86_64
1237 static GenOpFunc
*gen_setcc_slow
[8] = {
1248 static GenOpFunc
*gen_setcc_sub
[4][8] = {
1251 gen_op_setb_T0_subb
,
1252 gen_op_setz_T0_subb
,
1253 gen_op_setbe_T0_subb
,
1254 gen_op_sets_T0_subb
,
1256 gen_op_setl_T0_subb
,
1257 gen_op_setle_T0_subb
,
1261 gen_op_setb_T0_subw
,
1262 gen_op_setz_T0_subw
,
1263 gen_op_setbe_T0_subw
,
1264 gen_op_sets_T0_subw
,
1266 gen_op_setl_T0_subw
,
1267 gen_op_setle_T0_subw
,
1271 gen_op_setb_T0_subl
,
1272 gen_op_setz_T0_subl
,
1273 gen_op_setbe_T0_subl
,
1274 gen_op_sets_T0_subl
,
1276 gen_op_setl_T0_subl
,
1277 gen_op_setle_T0_subl
,
1279 #ifdef TARGET_X86_64
1282 gen_op_setb_T0_subq
,
1283 gen_op_setz_T0_subq
,
1284 gen_op_setbe_T0_subq
,
1285 gen_op_sets_T0_subq
,
1287 gen_op_setl_T0_subq
,
1288 gen_op_setle_T0_subq
,
1293 static GenOpFunc
*gen_op_fp_arith_ST0_FT0
[8] = {
1294 gen_op_fadd_ST0_FT0
,
1295 gen_op_fmul_ST0_FT0
,
1296 gen_op_fcom_ST0_FT0
,
1297 gen_op_fcom_ST0_FT0
,
1298 gen_op_fsub_ST0_FT0
,
1299 gen_op_fsubr_ST0_FT0
,
1300 gen_op_fdiv_ST0_FT0
,
1301 gen_op_fdivr_ST0_FT0
,
1304 /* NOTE the exception in "r" op ordering */
1305 static GenOpFunc1
*gen_op_fp_arith_STN_ST0
[8] = {
1306 gen_op_fadd_STN_ST0
,
1307 gen_op_fmul_STN_ST0
,
1310 gen_op_fsubr_STN_ST0
,
1311 gen_op_fsub_STN_ST0
,
1312 gen_op_fdivr_STN_ST0
,
1313 gen_op_fdiv_STN_ST0
,
1316 /* if d == OR_TMP0, it means memory operand (address in A0) */
1317 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1319 GenOpFunc
*gen_update_cc
;
1322 gen_op_mov_TN_reg
[ot
][0][d
]();
1324 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1329 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1330 gen_op_set_cc_op(s1
->cc_op
);
1332 gen_op_arithc_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1333 gen_op_mov_reg_T0
[ot
][d
]();
1335 gen_op_arithc_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
- OP_ADCL
]();
1337 s1
->cc_op
= CC_OP_DYNAMIC
;
1340 gen_op_addl_T0_T1();
1341 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1342 gen_update_cc
= gen_op_update2_cc
;
1345 gen_op_subl_T0_T1();
1346 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1347 gen_update_cc
= gen_op_update2_cc
;
1353 gen_op_arith_T0_T1_cc
[op
]();
1354 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1355 gen_update_cc
= gen_op_update1_cc
;
1358 gen_op_cmpl_T0_T1_cc();
1359 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1360 gen_update_cc
= NULL
;
1363 if (op
!= OP_CMPL
) {
1365 gen_op_mov_reg_T0
[ot
][d
]();
1367 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1369 /* the flags update must happen after the memory write (precise
1370 exception support) */
1376 /* if d == OR_TMP0, it means memory operand (address in A0) */
1377 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1380 gen_op_mov_TN_reg
[ot
][0][d
]();
1382 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1383 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1384 gen_op_set_cc_op(s1
->cc_op
);
1387 s1
->cc_op
= CC_OP_INCB
+ ot
;
1390 s1
->cc_op
= CC_OP_DECB
+ ot
;
1393 gen_op_mov_reg_T0
[ot
][d
]();
1395 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1396 gen_op_update_inc_cc();
1399 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1402 gen_op_mov_TN_reg
[ot
][0][d
]();
1404 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1406 gen_op_mov_TN_reg
[ot
][1][s
]();
1407 /* for zero counts, flags are not updated, so must do it dynamically */
1408 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1409 gen_op_set_cc_op(s1
->cc_op
);
1412 gen_op_shift_T0_T1_cc
[ot
][op
]();
1414 gen_op_shift_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
]();
1416 gen_op_mov_reg_T0
[ot
][d
]();
1417 s1
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1420 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1422 /* currently not optimized */
1423 gen_op_movl_T1_im(c
);
1424 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1427 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1435 int mod
, rm
, code
, override
, must_add_seg
;
1437 override
= s
->override
;
1438 must_add_seg
= s
->addseg
;
1441 mod
= (modrm
>> 6) & 3;
1453 code
= ldub_code(s
->pc
++);
1454 scale
= (code
>> 6) & 3;
1455 index
= ((code
>> 3) & 7) | REX_X(s
);
1462 if ((base
& 7) == 5) {
1464 disp
= (int32_t)ldl_code(s
->pc
);
1466 if (CODE64(s
) && !havesib
) {
1467 disp
+= s
->pc
+ s
->rip_offset
;
1474 disp
= (int8_t)ldub_code(s
->pc
++);
1478 disp
= ldl_code(s
->pc
);
1484 /* for correct popl handling with esp */
1485 if (base
== 4 && s
->popl_esp_hack
)
1486 disp
+= s
->popl_esp_hack
;
1487 #ifdef TARGET_X86_64
1488 if (s
->aflag
== 2) {
1489 gen_op_movq_A0_reg
[base
]();
1491 if ((int32_t)disp
== disp
)
1492 gen_op_addq_A0_im(disp
);
1494 gen_op_addq_A0_im64(disp
>> 32, disp
);
1499 gen_op_movl_A0_reg
[base
]();
1501 gen_op_addl_A0_im(disp
);
1504 #ifdef TARGET_X86_64
1505 if (s
->aflag
== 2) {
1506 if ((int32_t)disp
== disp
)
1507 gen_op_movq_A0_im(disp
);
1509 gen_op_movq_A0_im64(disp
>> 32, disp
);
1513 gen_op_movl_A0_im(disp
);
1516 /* XXX: index == 4 is always invalid */
1517 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1518 #ifdef TARGET_X86_64
1519 if (s
->aflag
== 2) {
1520 gen_op_addq_A0_reg_sN
[scale
][index
]();
1524 gen_op_addl_A0_reg_sN
[scale
][index
]();
1529 if (base
== R_EBP
|| base
== R_ESP
)
1534 #ifdef TARGET_X86_64
1535 if (s
->aflag
== 2) {
1536 gen_op_addq_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1540 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1547 disp
= lduw_code(s
->pc
);
1549 gen_op_movl_A0_im(disp
);
1550 rm
= 0; /* avoid SS override */
1557 disp
= (int8_t)ldub_code(s
->pc
++);
1561 disp
= lduw_code(s
->pc
);
1567 gen_op_movl_A0_reg
[R_EBX
]();
1568 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1571 gen_op_movl_A0_reg
[R_EBX
]();
1572 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1575 gen_op_movl_A0_reg
[R_EBP
]();
1576 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1579 gen_op_movl_A0_reg
[R_EBP
]();
1580 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1583 gen_op_movl_A0_reg
[R_ESI
]();
1586 gen_op_movl_A0_reg
[R_EDI
]();
1589 gen_op_movl_A0_reg
[R_EBP
]();
1593 gen_op_movl_A0_reg
[R_EBX
]();
1597 gen_op_addl_A0_im(disp
);
1598 gen_op_andl_A0_ffff();
1602 if (rm
== 2 || rm
== 3 || rm
== 6)
1607 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1617 /* used for LEA and MOV AX, mem */
1618 static void gen_add_A0_ds_seg(DisasContext
*s
)
1620 int override
, must_add_seg
;
1621 must_add_seg
= s
->addseg
;
1623 if (s
->override
>= 0) {
1624 override
= s
->override
;
1630 #ifdef TARGET_X86_64
1632 gen_op_addq_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1636 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1641 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1643 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1645 int mod
, rm
, opreg
, disp
;
1647 mod
= (modrm
>> 6) & 3;
1648 rm
= (modrm
& 7) | REX_B(s
);
1652 gen_op_mov_TN_reg
[ot
][0][reg
]();
1653 gen_op_mov_reg_T0
[ot
][rm
]();
1655 gen_op_mov_TN_reg
[ot
][0][rm
]();
1657 gen_op_mov_reg_T0
[ot
][reg
]();
1660 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
1663 gen_op_mov_TN_reg
[ot
][0][reg
]();
1664 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1666 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1668 gen_op_mov_reg_T0
[ot
][reg
]();
1673 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
1679 ret
= ldub_code(s
->pc
);
1683 ret
= lduw_code(s
->pc
);
1688 ret
= ldl_code(s
->pc
);
1695 static inline int insn_const_size(unsigned int ot
)
1703 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong eip
)
1705 TranslationBlock
*tb
;
1708 pc
= s
->cs_base
+ eip
;
1710 /* NOTE: we handle the case where the TB spans two pages here */
1711 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
1712 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
1713 /* jump to same page: we can use a direct jump */
1715 gen_op_goto_tb0(TBPARAM(tb
));
1717 gen_op_goto_tb1(TBPARAM(tb
));
1719 gen_op_movl_T0_im((long)tb
+ tb_num
);
1722 /* jump to another page: currently not optimized */
1728 static inline void gen_jcc(DisasContext
*s
, int b
,
1729 target_ulong val
, target_ulong next_eip
)
1731 TranslationBlock
*tb
;
1738 jcc_op
= (b
>> 1) & 7;
1742 /* we optimize the cmp/jcc case */
1747 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1750 /* some jumps are easy to compute */
1792 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1795 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1807 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1808 gen_op_set_cc_op(s
->cc_op
);
1809 s
->cc_op
= CC_OP_DYNAMIC
;
1813 gen_setcc_slow
[jcc_op
]();
1814 func
= gen_op_jnz_T0_label
;
1824 l1
= gen_new_label();
1827 gen_goto_tb(s
, 0, next_eip
);
1830 gen_goto_tb(s
, 1, val
);
1835 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1836 gen_op_set_cc_op(s
->cc_op
);
1837 s
->cc_op
= CC_OP_DYNAMIC
;
1839 gen_setcc_slow
[jcc_op
]();
1845 l1
= gen_new_label();
1846 l2
= gen_new_label();
1847 gen_op_jnz_T0_label(l1
);
1848 gen_jmp_im(next_eip
);
1849 gen_op_jmp_label(l2
);
1857 static void gen_setcc(DisasContext
*s
, int b
)
1863 jcc_op
= (b
>> 1) & 7;
1865 /* we optimize the cmp/jcc case */
1870 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1875 /* some jumps are easy to compute */
1902 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1905 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1913 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1914 gen_op_set_cc_op(s
->cc_op
);
1915 func
= gen_setcc_slow
[jcc_op
];
1924 /* move T0 to seg_reg and compute if the CPU state may change. Never
1925 call this function with seg_reg == R_CS */
1926 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
1928 if (s
->pe
&& !s
->vm86
) {
1929 /* XXX: optimize by finding processor state dynamically */
1930 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1931 gen_op_set_cc_op(s
->cc_op
);
1932 gen_jmp_im(cur_eip
);
1933 gen_op_movl_seg_T0(seg_reg
);
1934 /* abort translation because the addseg value may change or
1935 because ss32 may change. For R_SS, translation must always
1936 stop as a special handling must be done to disable hardware
1937 interrupts for the next instruction */
1938 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
1941 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
1942 if (seg_reg
== R_SS
)
1947 static inline void gen_stack_update(DisasContext
*s
, int addend
)
1949 #ifdef TARGET_X86_64
1952 gen_op_addq_ESP_8();
1954 gen_op_addq_ESP_im(addend
);
1959 gen_op_addl_ESP_2();
1960 else if (addend
== 4)
1961 gen_op_addl_ESP_4();
1963 gen_op_addl_ESP_im(addend
);
1966 gen_op_addw_ESP_2();
1967 else if (addend
== 4)
1968 gen_op_addw_ESP_4();
1970 gen_op_addw_ESP_im(addend
);
1974 /* generate a push. It depends on ss32, addseg and dflag */
1975 static void gen_push_T0(DisasContext
*s
)
1977 #ifdef TARGET_X86_64
1979 gen_op_movq_A0_reg
[R_ESP
]();
1982 gen_op_st_T0_A0
[OT_QUAD
+ s
->mem_index
]();
1985 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
1987 gen_op_movq_ESP_A0();
1991 gen_op_movl_A0_reg
[R_ESP
]();
1998 gen_op_movl_T1_A0();
1999 gen_op_addl_A0_SS();
2002 gen_op_andl_A0_ffff();
2003 gen_op_movl_T1_A0();
2004 gen_op_addl_A0_SS();
2006 gen_op_st_T0_A0
[s
->dflag
+ 1 + s
->mem_index
]();
2007 if (s
->ss32
&& !s
->addseg
)
2008 gen_op_movl_ESP_A0();
2010 gen_op_mov_reg_T1
[s
->ss32
+ 1][R_ESP
]();
2014 /* generate a push. It depends on ss32, addseg and dflag */
2015 /* slower version for T1, only used for call Ev */
2016 static void gen_push_T1(DisasContext
*s
)
2018 #ifdef TARGET_X86_64
2020 gen_op_movq_A0_reg
[R_ESP
]();
2023 gen_op_st_T1_A0
[OT_QUAD
+ s
->mem_index
]();
2026 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
2028 gen_op_movq_ESP_A0();
2032 gen_op_movl_A0_reg
[R_ESP
]();
2039 gen_op_addl_A0_SS();
2042 gen_op_andl_A0_ffff();
2043 gen_op_addl_A0_SS();
2045 gen_op_st_T1_A0
[s
->dflag
+ 1 + s
->mem_index
]();
2047 if (s
->ss32
&& !s
->addseg
)
2048 gen_op_movl_ESP_A0();
2050 gen_stack_update(s
, (-2) << s
->dflag
);
2054 /* two step pop is necessary for precise exceptions */
2055 static void gen_pop_T0(DisasContext
*s
)
2057 #ifdef TARGET_X86_64
2059 gen_op_movq_A0_reg
[R_ESP
]();
2060 gen_op_ld_T0_A0
[(s
->dflag
? OT_QUAD
: OT_WORD
) + s
->mem_index
]();
2064 gen_op_movl_A0_reg
[R_ESP
]();
2067 gen_op_addl_A0_SS();
2069 gen_op_andl_A0_ffff();
2070 gen_op_addl_A0_SS();
2072 gen_op_ld_T0_A0
[s
->dflag
+ 1 + s
->mem_index
]();
2076 static void gen_pop_update(DisasContext
*s
)
2078 #ifdef TARGET_X86_64
2079 if (CODE64(s
) && s
->dflag
) {
2080 gen_stack_update(s
, 8);
2084 gen_stack_update(s
, 2 << s
->dflag
);
2088 static void gen_stack_A0(DisasContext
*s
)
2090 gen_op_movl_A0_ESP();
2092 gen_op_andl_A0_ffff();
2093 gen_op_movl_T1_A0();
2095 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2098 /* NOTE: wrap around in 16 bit not fully handled */
2099 static void gen_pusha(DisasContext
*s
)
2102 gen_op_movl_A0_ESP();
2103 gen_op_addl_A0_im(-16 << s
->dflag
);
2105 gen_op_andl_A0_ffff();
2106 gen_op_movl_T1_A0();
2108 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2109 for(i
= 0;i
< 8; i
++) {
2110 gen_op_mov_TN_reg
[OT_LONG
][0][7 - i
]();
2111 gen_op_st_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
2112 gen_op_addl_A0_im(2 << s
->dflag
);
2114 gen_op_mov_reg_T1
[OT_WORD
+ s
->ss32
][R_ESP
]();
2117 /* NOTE: wrap around in 16 bit not fully handled */
2118 static void gen_popa(DisasContext
*s
)
2121 gen_op_movl_A0_ESP();
2123 gen_op_andl_A0_ffff();
2124 gen_op_movl_T1_A0();
2125 gen_op_addl_T1_im(16 << s
->dflag
);
2127 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2128 for(i
= 0;i
< 8; i
++) {
2129 /* ESP is not reloaded */
2131 gen_op_ld_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
2132 gen_op_mov_reg_T0
[OT_WORD
+ s
->dflag
][7 - i
]();
2134 gen_op_addl_A0_im(2 << s
->dflag
);
2136 gen_op_mov_reg_T1
[OT_WORD
+ s
->ss32
][R_ESP
]();
2139 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2144 #ifdef TARGET_X86_64
2146 ot
= s
->dflag
? OT_QUAD
: OT_WORD
;
2149 gen_op_movl_A0_ESP();
2150 gen_op_addq_A0_im(-opsize
);
2151 gen_op_movl_T1_A0();
2154 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
2155 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2157 gen_op_enter64_level(level
, (ot
== OT_QUAD
));
2159 gen_op_mov_reg_T1
[ot
][R_EBP
]();
2160 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2161 gen_op_mov_reg_T1
[OT_QUAD
][R_ESP
]();
2165 ot
= s
->dflag
+ OT_WORD
;
2166 opsize
= 2 << s
->dflag
;
2168 gen_op_movl_A0_ESP();
2169 gen_op_addl_A0_im(-opsize
);
2171 gen_op_andl_A0_ffff();
2172 gen_op_movl_T1_A0();
2174 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2176 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
2177 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2179 gen_op_enter_level(level
, s
->dflag
);
2181 gen_op_mov_reg_T1
[ot
][R_EBP
]();
2182 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2183 gen_op_mov_reg_T1
[OT_WORD
+ s
->ss32
][R_ESP
]();
2187 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2189 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2190 gen_op_set_cc_op(s
->cc_op
);
2191 gen_jmp_im(cur_eip
);
2192 gen_op_raise_exception(trapno
);
2196 /* an interrupt is different from an exception because of the
2197 priviledge checks */
2198 static void gen_interrupt(DisasContext
*s
, int intno
,
2199 target_ulong cur_eip
, target_ulong next_eip
)
2201 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2202 gen_op_set_cc_op(s
->cc_op
);
2203 gen_jmp_im(cur_eip
);
2204 gen_op_raise_interrupt(intno
, (int)(next_eip
- cur_eip
));
2208 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2210 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2211 gen_op_set_cc_op(s
->cc_op
);
2212 gen_jmp_im(cur_eip
);
2217 /* generate a generic end of block. Trace exception is also generated
2219 static void gen_eob(DisasContext
*s
)
2221 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2222 gen_op_set_cc_op(s
->cc_op
);
2223 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2224 gen_op_reset_inhibit_irq();
2226 if (s
->singlestep_enabled
) {
2229 gen_op_raise_exception(EXCP01_SSTP
);
2237 /* generate a jump to eip. No segment change must happen before as a
2238 direct call to the next block may occur */
2239 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2242 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2243 gen_op_set_cc_op(s
->cc_op
);
2244 s
->cc_op
= CC_OP_DYNAMIC
;
2246 gen_goto_tb(s
, tb_num
, eip
);
2254 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2256 gen_jmp_tb(s
, eip
, 0);
2259 static void gen_movtl_T0_im(target_ulong val
)
2261 #ifdef TARGET_X86_64
2262 if ((int32_t)val
== val
) {
2263 gen_op_movl_T0_im(val
);
2265 gen_op_movq_T0_im64(val
>> 32, val
);
2268 gen_op_movl_T0_im(val
);
2272 static void gen_movtl_T1_im(target_ulong val
)
2274 #ifdef TARGET_X86_64
2275 if ((int32_t)val
== val
) {
2276 gen_op_movl_T1_im(val
);
2278 gen_op_movq_T1_im64(val
>> 32, val
);
2281 gen_op_movl_T1_im(val
);
2285 static void gen_add_A0_im(DisasContext
*s
, int val
)
2287 #ifdef TARGET_X86_64
2289 gen_op_addq_A0_im(val
);
2292 gen_op_addl_A0_im(val
);
2295 static GenOpFunc1
*gen_ldq_env_A0
[3] = {
2296 gen_op_ldq_raw_env_A0
,
2297 #ifndef CONFIG_USER_ONLY
2298 gen_op_ldq_kernel_env_A0
,
2299 gen_op_ldq_user_env_A0
,
2303 static GenOpFunc1
*gen_stq_env_A0
[3] = {
2304 gen_op_stq_raw_env_A0
,
2305 #ifndef CONFIG_USER_ONLY
2306 gen_op_stq_kernel_env_A0
,
2307 gen_op_stq_user_env_A0
,
2311 static GenOpFunc1
*gen_ldo_env_A0
[3] = {
2312 gen_op_ldo_raw_env_A0
,
2313 #ifndef CONFIG_USER_ONLY
2314 gen_op_ldo_kernel_env_A0
,
2315 gen_op_ldo_user_env_A0
,
2319 static GenOpFunc1
*gen_sto_env_A0
[3] = {
2320 gen_op_sto_raw_env_A0
,
2321 #ifndef CONFIG_USER_ONLY
2322 gen_op_sto_kernel_env_A0
,
2323 gen_op_sto_user_env_A0
,
2327 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2329 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2330 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2331 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2333 static GenOpFunc2
*sse_op_table1
[256][4] = {
2334 /* pure SSE operations */
2335 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2336 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2337 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd, movsldup, movddup */
2338 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2339 [0x14] = { gen_op_punpckldq_xmm
, gen_op_punpcklqdq_xmm
},
2340 [0x15] = { gen_op_punpckhdq_xmm
, gen_op_punpckhqdq_xmm
},
2341 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2342 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2344 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2345 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2346 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2347 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2348 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2349 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2350 [0x2e] = { gen_op_ucomiss
, gen_op_ucomisd
},
2351 [0x2f] = { gen_op_comiss
, gen_op_comisd
},
2352 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2353 [0x51] = SSE_FOP(sqrt
),
2354 [0x52] = { gen_op_rsqrtps
, NULL
, gen_op_rsqrtss
, NULL
},
2355 [0x53] = { gen_op_rcpps
, NULL
, gen_op_rcpss
, NULL
},
2356 [0x54] = { gen_op_pand_xmm
, gen_op_pand_xmm
}, /* andps, andpd */
2357 [0x55] = { gen_op_pandn_xmm
, gen_op_pandn_xmm
}, /* andnps, andnpd */
2358 [0x56] = { gen_op_por_xmm
, gen_op_por_xmm
}, /* orps, orpd */
2359 [0x57] = { gen_op_pxor_xmm
, gen_op_pxor_xmm
}, /* xorps, xorpd */
2360 [0x58] = SSE_FOP(add
),
2361 [0x59] = SSE_FOP(mul
),
2362 [0x5a] = { gen_op_cvtps2pd
, gen_op_cvtpd2ps
,
2363 gen_op_cvtss2sd
, gen_op_cvtsd2ss
},
2364 [0x5b] = { gen_op_cvtdq2ps
, gen_op_cvtps2dq
, gen_op_cvttps2dq
},
2365 [0x5c] = SSE_FOP(sub
),
2366 [0x5d] = SSE_FOP(min
),
2367 [0x5e] = SSE_FOP(div
),
2368 [0x5f] = SSE_FOP(max
),
2370 [0xc2] = SSE_FOP(cmpeq
),
2371 [0xc6] = { (GenOpFunc2
*)gen_op_shufps
, (GenOpFunc2
*)gen_op_shufpd
},
2373 /* MMX ops and their SSE extensions */
2374 [0x60] = MMX_OP2(punpcklbw
),
2375 [0x61] = MMX_OP2(punpcklwd
),
2376 [0x62] = MMX_OP2(punpckldq
),
2377 [0x63] = MMX_OP2(packsswb
),
2378 [0x64] = MMX_OP2(pcmpgtb
),
2379 [0x65] = MMX_OP2(pcmpgtw
),
2380 [0x66] = MMX_OP2(pcmpgtl
),
2381 [0x67] = MMX_OP2(packuswb
),
2382 [0x68] = MMX_OP2(punpckhbw
),
2383 [0x69] = MMX_OP2(punpckhwd
),
2384 [0x6a] = MMX_OP2(punpckhdq
),
2385 [0x6b] = MMX_OP2(packssdw
),
2386 [0x6c] = { NULL
, gen_op_punpcklqdq_xmm
},
2387 [0x6d] = { NULL
, gen_op_punpckhqdq_xmm
},
2388 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2389 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2390 [0x70] = { (GenOpFunc2
*)gen_op_pshufw_mmx
,
2391 (GenOpFunc2
*)gen_op_pshufd_xmm
,
2392 (GenOpFunc2
*)gen_op_pshufhw_xmm
,
2393 (GenOpFunc2
*)gen_op_pshuflw_xmm
},
2394 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2395 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2396 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2397 [0x74] = MMX_OP2(pcmpeqb
),
2398 [0x75] = MMX_OP2(pcmpeqw
),
2399 [0x76] = MMX_OP2(pcmpeql
),
2400 [0x77] = { SSE_SPECIAL
}, /* emms */
2401 [0x7c] = { NULL
, gen_op_haddpd
, NULL
, gen_op_haddps
},
2402 [0x7d] = { NULL
, gen_op_hsubpd
, NULL
, gen_op_hsubps
},
2403 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2404 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2405 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2406 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2407 [0xd0] = { NULL
, gen_op_addsubpd
, NULL
, gen_op_addsubps
},
2408 [0xd1] = MMX_OP2(psrlw
),
2409 [0xd2] = MMX_OP2(psrld
),
2410 [0xd3] = MMX_OP2(psrlq
),
2411 [0xd4] = MMX_OP2(paddq
),
2412 [0xd5] = MMX_OP2(pmullw
),
2413 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2414 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2415 [0xd8] = MMX_OP2(psubusb
),
2416 [0xd9] = MMX_OP2(psubusw
),
2417 [0xda] = MMX_OP2(pminub
),
2418 [0xdb] = MMX_OP2(pand
),
2419 [0xdc] = MMX_OP2(paddusb
),
2420 [0xdd] = MMX_OP2(paddusw
),
2421 [0xde] = MMX_OP2(pmaxub
),
2422 [0xdf] = MMX_OP2(pandn
),
2423 [0xe0] = MMX_OP2(pavgb
),
2424 [0xe1] = MMX_OP2(psraw
),
2425 [0xe2] = MMX_OP2(psrad
),
2426 [0xe3] = MMX_OP2(pavgw
),
2427 [0xe4] = MMX_OP2(pmulhuw
),
2428 [0xe5] = MMX_OP2(pmulhw
),
2429 [0xe6] = { NULL
, gen_op_cvttpd2dq
, gen_op_cvtdq2pd
, gen_op_cvtpd2dq
},
2430 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2431 [0xe8] = MMX_OP2(psubsb
),
2432 [0xe9] = MMX_OP2(psubsw
),
2433 [0xea] = MMX_OP2(pminsw
),
2434 [0xeb] = MMX_OP2(por
),
2435 [0xec] = MMX_OP2(paddsb
),
2436 [0xed] = MMX_OP2(paddsw
),
2437 [0xee] = MMX_OP2(pmaxsw
),
2438 [0xef] = MMX_OP2(pxor
),
2439 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu */
2440 [0xf1] = MMX_OP2(psllw
),
2441 [0xf2] = MMX_OP2(pslld
),
2442 [0xf3] = MMX_OP2(psllq
),
2443 [0xf4] = MMX_OP2(pmuludq
),
2444 [0xf5] = MMX_OP2(pmaddwd
),
2445 [0xf6] = MMX_OP2(psadbw
),
2446 [0xf7] = MMX_OP2(maskmov
),
2447 [0xf8] = MMX_OP2(psubb
),
2448 [0xf9] = MMX_OP2(psubw
),
2449 [0xfa] = MMX_OP2(psubl
),
2450 [0xfb] = MMX_OP2(psubq
),
2451 [0xfc] = MMX_OP2(paddb
),
2452 [0xfd] = MMX_OP2(paddw
),
2453 [0xfe] = MMX_OP2(paddl
),
2456 static GenOpFunc2
*sse_op_table2
[3 * 8][2] = {
2457 [0 + 2] = MMX_OP2(psrlw
),
2458 [0 + 4] = MMX_OP2(psraw
),
2459 [0 + 6] = MMX_OP2(psllw
),
2460 [8 + 2] = MMX_OP2(psrld
),
2461 [8 + 4] = MMX_OP2(psrad
),
2462 [8 + 6] = MMX_OP2(pslld
),
2463 [16 + 2] = MMX_OP2(psrlq
),
2464 [16 + 3] = { NULL
, gen_op_psrldq_xmm
},
2465 [16 + 6] = MMX_OP2(psllq
),
2466 [16 + 7] = { NULL
, gen_op_pslldq_xmm
},
2469 static GenOpFunc1
*sse_op_table3
[4 * 3] = {
2472 X86_64_ONLY(gen_op_cvtsq2ss
),
2473 X86_64_ONLY(gen_op_cvtsq2sd
),
2477 X86_64_ONLY(gen_op_cvttss2sq
),
2478 X86_64_ONLY(gen_op_cvttsd2sq
),
2482 X86_64_ONLY(gen_op_cvtss2sq
),
2483 X86_64_ONLY(gen_op_cvtsd2sq
),
2486 static GenOpFunc2
*sse_op_table4
[8][4] = {
2497 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
2499 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
2500 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
2501 GenOpFunc2
*sse_op2
;
2502 GenOpFunc3
*sse_op3
;
2505 if (s
->prefix
& PREFIX_DATA
)
2507 else if (s
->prefix
& PREFIX_REPZ
)
2509 else if (s
->prefix
& PREFIX_REPNZ
)
2513 sse_op2
= sse_op_table1
[b
][b1
];
2516 if (b
<= 0x5f || b
== 0xc6 || b
== 0xc2) {
2526 /* simple MMX/SSE operation */
2527 if (s
->flags
& HF_TS_MASK
) {
2528 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
2531 if (s
->flags
& HF_EM_MASK
) {
2533 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
2536 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
2543 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2544 the static cpu state) */
2549 modrm
= ldub_code(s
->pc
++);
2550 reg
= ((modrm
>> 3) & 7);
2553 mod
= (modrm
>> 6) & 3;
2554 if (sse_op2
== SSE_SPECIAL
) {
2557 case 0x0e7: /* movntq */
2560 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2561 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2563 case 0x1e7: /* movntdq */
2564 case 0x02b: /* movntps */
2565 case 0x12b: /* movntps */
2566 case 0x3f0: /* lddqu */
2569 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2570 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2572 case 0x6e: /* movd mm, ea */
2573 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2574 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2576 case 0x16e: /* movd xmm, ea */
2577 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2578 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2580 case 0x6f: /* movq mm, ea */
2582 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2583 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2586 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
].mmx
),
2587 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2590 case 0x010: /* movups */
2591 case 0x110: /* movupd */
2592 case 0x028: /* movaps */
2593 case 0x128: /* movapd */
2594 case 0x16f: /* movdqa xmm, ea */
2595 case 0x26f: /* movdqu xmm, ea */
2597 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2598 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2600 rm
= (modrm
& 7) | REX_B(s
);
2601 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
2602 offsetof(CPUX86State
,xmm_regs
[rm
]));
2605 case 0x210: /* movss xmm, ea */
2607 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2608 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
2609 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2611 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2612 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2613 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2615 rm
= (modrm
& 7) | REX_B(s
);
2616 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2617 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2620 case 0x310: /* movsd xmm, ea */
2622 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2623 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2625 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2626 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2628 rm
= (modrm
& 7) | REX_B(s
);
2629 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2630 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2633 case 0x012: /* movlps */
2634 case 0x112: /* movlpd */
2636 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2637 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2640 rm
= (modrm
& 7) | REX_B(s
);
2641 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2642 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2645 case 0x212: /* movsldup */
2647 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2648 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2650 rm
= (modrm
& 7) | REX_B(s
);
2651 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2652 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2653 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2654 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(2)));
2656 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2657 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2658 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2659 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2661 case 0x312: /* movddup */
2663 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2664 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2666 rm
= (modrm
& 7) | REX_B(s
);
2667 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2668 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2670 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2671 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2673 case 0x016: /* movhps */
2674 case 0x116: /* movhpd */
2676 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2677 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2680 rm
= (modrm
& 7) | REX_B(s
);
2681 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2682 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2685 case 0x216: /* movshdup */
2687 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2688 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2690 rm
= (modrm
& 7) | REX_B(s
);
2691 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2692 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
2693 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2694 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
2696 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2697 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2698 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2699 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2701 case 0x7e: /* movd ea, mm */
2702 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2703 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2705 case 0x17e: /* movd ea, xmm */
2706 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2707 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2709 case 0x27e: /* movq xmm, ea */
2711 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2712 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2714 rm
= (modrm
& 7) | REX_B(s
);
2715 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2716 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2718 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2720 case 0x7f: /* movq ea, mm */
2722 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2723 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2726 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
2727 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2730 case 0x011: /* movups */
2731 case 0x111: /* movupd */
2732 case 0x029: /* movaps */
2733 case 0x129: /* movapd */
2734 case 0x17f: /* movdqa ea, xmm */
2735 case 0x27f: /* movdqu ea, xmm */
2737 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2738 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2740 rm
= (modrm
& 7) | REX_B(s
);
2741 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
2742 offsetof(CPUX86State
,xmm_regs
[reg
]));
2745 case 0x211: /* movss ea, xmm */
2747 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2748 gen_op_movl_T0_env(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2749 gen_op_st_T0_A0
[OT_LONG
+ s
->mem_index
]();
2751 rm
= (modrm
& 7) | REX_B(s
);
2752 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
2753 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2756 case 0x311: /* movsd ea, xmm */
2758 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2759 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2761 rm
= (modrm
& 7) | REX_B(s
);
2762 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2763 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2766 case 0x013: /* movlps */
2767 case 0x113: /* movlpd */
2769 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2770 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2775 case 0x017: /* movhps */
2776 case 0x117: /* movhpd */
2778 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2779 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2784 case 0x71: /* shift mm, im */
2787 case 0x171: /* shift xmm, im */
2790 val
= ldub_code(s
->pc
++);
2792 gen_op_movl_T0_im(val
);
2793 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
2795 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
2796 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
2798 gen_op_movl_T0_im(val
);
2799 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
2801 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
2802 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
2804 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
2808 rm
= (modrm
& 7) | REX_B(s
);
2809 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2812 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2814 sse_op2(op2_offset
, op1_offset
);
2816 case 0x050: /* movmskps */
2817 rm
= (modrm
& 7) | REX_B(s
);
2818 gen_op_movmskps(offsetof(CPUX86State
,xmm_regs
[rm
]));
2819 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
2821 case 0x150: /* movmskpd */
2822 rm
= (modrm
& 7) | REX_B(s
);
2823 gen_op_movmskpd(offsetof(CPUX86State
,xmm_regs
[rm
]));
2824 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
2826 case 0x02a: /* cvtpi2ps */
2827 case 0x12a: /* cvtpi2pd */
2830 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2831 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
2832 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
2835 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2837 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2840 gen_op_cvtpi2ps(op1_offset
, op2_offset
);
2844 gen_op_cvtpi2pd(op1_offset
, op2_offset
);
2848 case 0x22a: /* cvtsi2ss */
2849 case 0x32a: /* cvtsi2sd */
2850 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
2851 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
2852 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2853 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)](op1_offset
);
2855 case 0x02c: /* cvttps2pi */
2856 case 0x12c: /* cvttpd2pi */
2857 case 0x02d: /* cvtps2pi */
2858 case 0x12d: /* cvtpd2pi */
2861 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2862 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
2863 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
2865 rm
= (modrm
& 7) | REX_B(s
);
2866 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2868 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
2871 gen_op_cvttps2pi(op1_offset
, op2_offset
);
2874 gen_op_cvttpd2pi(op1_offset
, op2_offset
);
2877 gen_op_cvtps2pi(op1_offset
, op2_offset
);
2880 gen_op_cvtpd2pi(op1_offset
, op2_offset
);
2884 case 0x22c: /* cvttss2si */
2885 case 0x32c: /* cvttsd2si */
2886 case 0x22d: /* cvtss2si */
2887 case 0x32d: /* cvtsd2si */
2888 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
2890 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2892 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_t0
.XMM_Q(0)));
2894 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
2895 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
2897 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
2899 rm
= (modrm
& 7) | REX_B(s
);
2900 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2902 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
2903 (b
& 1) * 4](op2_offset
);
2904 gen_op_mov_reg_T0
[ot
][reg
]();
2906 case 0xc4: /* pinsrw */
2909 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
2910 val
= ldub_code(s
->pc
++);
2913 gen_op_pinsrw_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]), val
);
2916 gen_op_pinsrw_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
), val
);
2919 case 0xc5: /* pextrw */
2923 val
= ldub_code(s
->pc
++);
2926 rm
= (modrm
& 7) | REX_B(s
);
2927 gen_op_pextrw_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]), val
);
2931 gen_op_pextrw_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
), val
);
2933 reg
= ((modrm
>> 3) & 7) | rex_r
;
2934 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
2936 case 0x1d6: /* movq ea, xmm */
2938 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2939 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2941 rm
= (modrm
& 7) | REX_B(s
);
2942 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2943 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2944 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2947 case 0x2d6: /* movq2dq */
2950 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2951 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2952 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2954 case 0x3d6: /* movdq2q */
2956 rm
= (modrm
& 7) | REX_B(s
);
2957 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
),
2958 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2960 case 0xd7: /* pmovmskb */
2965 rm
= (modrm
& 7) | REX_B(s
);
2966 gen_op_pmovmskb_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]));
2969 gen_op_pmovmskb_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2971 reg
= ((modrm
>> 3) & 7) | rex_r
;
2972 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
2978 /* generic MMX or SSE operation */
2981 /* maskmov : we must prepare A0 */
2984 #ifdef TARGET_X86_64
2985 if (s
->aflag
== 2) {
2986 gen_op_movq_A0_reg
[R_EDI
]();
2990 gen_op_movl_A0_reg
[R_EDI
]();
2992 gen_op_andl_A0_ffff();
2994 gen_add_A0_ds_seg(s
);
2996 case 0x70: /* pshufx insn */
2997 case 0xc6: /* pshufx insn */
2998 case 0xc2: /* compare insns */
3005 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3007 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3008 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3009 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f && b
!= 0x5b) ||
3011 /* specific case for SSE single instructions */
3014 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
3015 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3018 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
3021 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
3024 rm
= (modrm
& 7) | REX_B(s
);
3025 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3028 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3030 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3031 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3032 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
3035 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3039 case 0x70: /* pshufx insn */
3040 case 0xc6: /* pshufx insn */
3041 val
= ldub_code(s
->pc
++);
3042 sse_op3
= (GenOpFunc3
*)sse_op2
;
3043 sse_op3(op1_offset
, op2_offset
, val
);
3047 val
= ldub_code(s
->pc
++);
3050 sse_op2
= sse_op_table4
[val
][b1
];
3051 sse_op2(op1_offset
, op2_offset
);
3054 sse_op2(op1_offset
, op2_offset
);
3057 if (b
== 0x2e || b
== 0x2f) {
3058 s
->cc_op
= CC_OP_EFLAGS
;
3064 /* convert one instruction. s->is_jmp is set if the translation must
3065 be stopped. Return the next pc value */
3066 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
3068 int b
, prefixes
, aflag
, dflag
;
3070 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
3071 target_ulong next_eip
, tval
;
3081 #ifdef TARGET_X86_64
3086 s
->rip_offset
= 0; /* for relative ip address */
3088 b
= ldub_code(s
->pc
);
3090 /* check prefixes */
3091 #ifdef TARGET_X86_64
3095 prefixes
|= PREFIX_REPZ
;
3098 prefixes
|= PREFIX_REPNZ
;
3101 prefixes
|= PREFIX_LOCK
;
3122 prefixes
|= PREFIX_DATA
;
3125 prefixes
|= PREFIX_ADR
;
3129 rex_w
= (b
>> 3) & 1;
3130 rex_r
= (b
& 0x4) << 1;
3131 s
->rex_x
= (b
& 0x2) << 2;
3132 REX_B(s
) = (b
& 0x1) << 3;
3133 x86_64_hregs
= 1; /* select uniform byte register addressing */
3137 /* 0x66 is ignored if rex.w is set */
3140 if (prefixes
& PREFIX_DATA
)
3143 if (!(prefixes
& PREFIX_ADR
))
3150 prefixes
|= PREFIX_REPZ
;
3153 prefixes
|= PREFIX_REPNZ
;
3156 prefixes
|= PREFIX_LOCK
;
3177 prefixes
|= PREFIX_DATA
;
3180 prefixes
|= PREFIX_ADR
;
3183 if (prefixes
& PREFIX_DATA
)
3185 if (prefixes
& PREFIX_ADR
)
3189 s
->prefix
= prefixes
;
3193 /* lock generation */
3194 if (prefixes
& PREFIX_LOCK
)
3197 /* now check op code */
3201 /**************************/
3202 /* extended op code */
3203 b
= ldub_code(s
->pc
++) | 0x100;
3206 /**************************/
3224 ot
= dflag
+ OT_WORD
;
3227 case 0: /* OP Ev, Gv */
3228 modrm
= ldub_code(s
->pc
++);
3229 reg
= ((modrm
>> 3) & 7) | rex_r
;
3230 mod
= (modrm
>> 6) & 3;
3231 rm
= (modrm
& 7) | REX_B(s
);
3233 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3235 } else if (op
== OP_XORL
&& rm
== reg
) {
3237 /* xor reg, reg optimisation */
3239 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3240 gen_op_mov_reg_T0
[ot
][reg
]();
3241 gen_op_update1_cc();
3246 gen_op_mov_TN_reg
[ot
][1][reg
]();
3247 gen_op(s
, op
, ot
, opreg
);
3249 case 1: /* OP Gv, Ev */
3250 modrm
= ldub_code(s
->pc
++);
3251 mod
= (modrm
>> 6) & 3;
3252 reg
= ((modrm
>> 3) & 7) | rex_r
;
3253 rm
= (modrm
& 7) | REX_B(s
);
3255 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3256 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3257 } else if (op
== OP_XORL
&& rm
== reg
) {
3260 gen_op_mov_TN_reg
[ot
][1][rm
]();
3262 gen_op(s
, op
, ot
, reg
);
3264 case 2: /* OP A, Iv */
3265 val
= insn_get(s
, ot
);
3266 gen_op_movl_T1_im(val
);
3267 gen_op(s
, op
, ot
, OR_EAX
);
3273 case 0x80: /* GRP1 */
3283 ot
= dflag
+ OT_WORD
;
3285 modrm
= ldub_code(s
->pc
++);
3286 mod
= (modrm
>> 6) & 3;
3287 rm
= (modrm
& 7) | REX_B(s
);
3288 op
= (modrm
>> 3) & 7;
3294 s
->rip_offset
= insn_const_size(ot
);
3295 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3306 val
= insn_get(s
, ot
);
3309 val
= (int8_t)insn_get(s
, OT_BYTE
);
3312 gen_op_movl_T1_im(val
);
3313 gen_op(s
, op
, ot
, opreg
);
3317 /**************************/
3318 /* inc, dec, and other misc arith */
3319 case 0x40 ... 0x47: /* inc Gv */
3320 ot
= dflag
? OT_LONG
: OT_WORD
;
3321 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
3323 case 0x48 ... 0x4f: /* dec Gv */
3324 ot
= dflag
? OT_LONG
: OT_WORD
;
3325 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
3327 case 0xf6: /* GRP3 */
3332 ot
= dflag
+ OT_WORD
;
3334 modrm
= ldub_code(s
->pc
++);
3335 mod
= (modrm
>> 6) & 3;
3336 rm
= (modrm
& 7) | REX_B(s
);
3337 op
= (modrm
>> 3) & 7;
3340 s
->rip_offset
= insn_const_size(ot
);
3341 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3342 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3344 gen_op_mov_TN_reg
[ot
][0][rm
]();
3349 val
= insn_get(s
, ot
);
3350 gen_op_movl_T1_im(val
);
3351 gen_op_testl_T0_T1_cc();
3352 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3357 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3359 gen_op_mov_reg_T0
[ot
][rm
]();
3365 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3367 gen_op_mov_reg_T0
[ot
][rm
]();
3369 gen_op_update_neg_cc();
3370 s
->cc_op
= CC_OP_SUBB
+ ot
;
3375 gen_op_mulb_AL_T0();
3376 s
->cc_op
= CC_OP_MULB
;
3379 gen_op_mulw_AX_T0();
3380 s
->cc_op
= CC_OP_MULW
;
3384 gen_op_mull_EAX_T0();
3385 s
->cc_op
= CC_OP_MULL
;
3387 #ifdef TARGET_X86_64
3389 gen_op_mulq_EAX_T0();
3390 s
->cc_op
= CC_OP_MULQ
;
3398 gen_op_imulb_AL_T0();
3399 s
->cc_op
= CC_OP_MULB
;
3402 gen_op_imulw_AX_T0();
3403 s
->cc_op
= CC_OP_MULW
;
3407 gen_op_imull_EAX_T0();
3408 s
->cc_op
= CC_OP_MULL
;
3410 #ifdef TARGET_X86_64
3412 gen_op_imulq_EAX_T0();
3413 s
->cc_op
= CC_OP_MULQ
;
3421 gen_jmp_im(pc_start
- s
->cs_base
);
3422 gen_op_divb_AL_T0();
3425 gen_jmp_im(pc_start
- s
->cs_base
);
3426 gen_op_divw_AX_T0();
3430 gen_jmp_im(pc_start
- s
->cs_base
);
3431 gen_op_divl_EAX_T0();
3433 #ifdef TARGET_X86_64
3435 gen_jmp_im(pc_start
- s
->cs_base
);
3436 gen_op_divq_EAX_T0();
3444 gen_jmp_im(pc_start
- s
->cs_base
);
3445 gen_op_idivb_AL_T0();
3448 gen_jmp_im(pc_start
- s
->cs_base
);
3449 gen_op_idivw_AX_T0();
3453 gen_jmp_im(pc_start
- s
->cs_base
);
3454 gen_op_idivl_EAX_T0();
3456 #ifdef TARGET_X86_64
3458 gen_jmp_im(pc_start
- s
->cs_base
);
3459 gen_op_idivq_EAX_T0();
3469 case 0xfe: /* GRP4 */
3470 case 0xff: /* GRP5 */
3474 ot
= dflag
+ OT_WORD
;
3476 modrm
= ldub_code(s
->pc
++);
3477 mod
= (modrm
>> 6) & 3;
3478 rm
= (modrm
& 7) | REX_B(s
);
3479 op
= (modrm
>> 3) & 7;
3480 if (op
>= 2 && b
== 0xfe) {
3484 if (op
== 2 || op
== 4) {
3485 /* operand size for jumps is 64 bit */
3487 } else if (op
== 3 || op
== 5) {
3488 /* for call calls, the operand is 16 or 32 bit, even
3490 ot
= dflag
? OT_LONG
: OT_WORD
;
3491 } else if (op
== 6) {
3492 /* default push size is 64 bit */
3493 ot
= dflag
? OT_QUAD
: OT_WORD
;
3497 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3498 if (op
>= 2 && op
!= 3 && op
!= 5)
3499 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3501 gen_op_mov_TN_reg
[ot
][0][rm
]();
3505 case 0: /* inc Ev */
3510 gen_inc(s
, ot
, opreg
, 1);
3512 case 1: /* dec Ev */
3517 gen_inc(s
, ot
, opreg
, -1);
3519 case 2: /* call Ev */
3520 /* XXX: optimize if memory (no 'and' is necessary) */
3522 gen_op_andl_T0_ffff();
3523 next_eip
= s
->pc
- s
->cs_base
;
3524 gen_movtl_T1_im(next_eip
);
3529 case 3: /* lcall Ev */
3530 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3531 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3532 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
3534 if (s
->pe
&& !s
->vm86
) {
3535 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3536 gen_op_set_cc_op(s
->cc_op
);
3537 gen_jmp_im(pc_start
- s
->cs_base
);
3538 gen_op_lcall_protected_T0_T1(dflag
, s
->pc
- pc_start
);
3540 gen_op_lcall_real_T0_T1(dflag
, s
->pc
- s
->cs_base
);
3544 case 4: /* jmp Ev */
3546 gen_op_andl_T0_ffff();
3550 case 5: /* ljmp Ev */
3551 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3552 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3553 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
3555 if (s
->pe
&& !s
->vm86
) {
3556 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3557 gen_op_set_cc_op(s
->cc_op
);
3558 gen_jmp_im(pc_start
- s
->cs_base
);
3559 gen_op_ljmp_protected_T0_T1(s
->pc
- pc_start
);
3561 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
3562 gen_op_movl_T0_T1();
3567 case 6: /* push Ev */
3575 case 0x84: /* test Ev, Gv */
3580 ot
= dflag
+ OT_WORD
;
3582 modrm
= ldub_code(s
->pc
++);
3583 mod
= (modrm
>> 6) & 3;
3584 rm
= (modrm
& 7) | REX_B(s
);
3585 reg
= ((modrm
>> 3) & 7) | rex_r
;
3587 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3588 gen_op_mov_TN_reg
[ot
][1][reg
]();
3589 gen_op_testl_T0_T1_cc();
3590 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3593 case 0xa8: /* test eAX, Iv */
3598 ot
= dflag
+ OT_WORD
;
3599 val
= insn_get(s
, ot
);
3601 gen_op_mov_TN_reg
[ot
][0][OR_EAX
]();
3602 gen_op_movl_T1_im(val
);
3603 gen_op_testl_T0_T1_cc();
3604 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3607 case 0x98: /* CWDE/CBW */
3608 #ifdef TARGET_X86_64
3610 gen_op_movslq_RAX_EAX();
3614 gen_op_movswl_EAX_AX();
3616 gen_op_movsbw_AX_AL();
3618 case 0x99: /* CDQ/CWD */
3619 #ifdef TARGET_X86_64
3621 gen_op_movsqo_RDX_RAX();
3625 gen_op_movslq_EDX_EAX();
3627 gen_op_movswl_DX_AX();
3629 case 0x1af: /* imul Gv, Ev */
3630 case 0x69: /* imul Gv, Ev, I */
3632 ot
= dflag
+ OT_WORD
;
3633 modrm
= ldub_code(s
->pc
++);
3634 reg
= ((modrm
>> 3) & 7) | rex_r
;
3636 s
->rip_offset
= insn_const_size(ot
);
3639 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3641 val
= insn_get(s
, ot
);
3642 gen_op_movl_T1_im(val
);
3643 } else if (b
== 0x6b) {
3644 val
= (int8_t)insn_get(s
, OT_BYTE
);
3645 gen_op_movl_T1_im(val
);
3647 gen_op_mov_TN_reg
[ot
][1][reg
]();
3650 #ifdef TARGET_X86_64
3651 if (ot
== OT_QUAD
) {
3652 gen_op_imulq_T0_T1();
3655 if (ot
== OT_LONG
) {
3656 gen_op_imull_T0_T1();
3658 gen_op_imulw_T0_T1();
3660 gen_op_mov_reg_T0
[ot
][reg
]();
3661 s
->cc_op
= CC_OP_MULB
+ ot
;
3664 case 0x1c1: /* xadd Ev, Gv */
3668 ot
= dflag
+ OT_WORD
;
3669 modrm
= ldub_code(s
->pc
++);
3670 reg
= ((modrm
>> 3) & 7) | rex_r
;
3671 mod
= (modrm
>> 6) & 3;
3673 rm
= (modrm
& 7) | REX_B(s
);
3674 gen_op_mov_TN_reg
[ot
][0][reg
]();
3675 gen_op_mov_TN_reg
[ot
][1][rm
]();
3676 gen_op_addl_T0_T1();
3677 gen_op_mov_reg_T1
[ot
][reg
]();
3678 gen_op_mov_reg_T0
[ot
][rm
]();
3680 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3681 gen_op_mov_TN_reg
[ot
][0][reg
]();
3682 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3683 gen_op_addl_T0_T1();
3684 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3685 gen_op_mov_reg_T1
[ot
][reg
]();
3687 gen_op_update2_cc();
3688 s
->cc_op
= CC_OP_ADDB
+ ot
;
3691 case 0x1b1: /* cmpxchg Ev, Gv */
3695 ot
= dflag
+ OT_WORD
;
3696 modrm
= ldub_code(s
->pc
++);
3697 reg
= ((modrm
>> 3) & 7) | rex_r
;
3698 mod
= (modrm
>> 6) & 3;
3699 gen_op_mov_TN_reg
[ot
][1][reg
]();
3701 rm
= (modrm
& 7) | REX_B(s
);
3702 gen_op_mov_TN_reg
[ot
][0][rm
]();
3703 gen_op_cmpxchg_T0_T1_EAX_cc
[ot
]();
3704 gen_op_mov_reg_T0
[ot
][rm
]();
3706 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3707 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3708 gen_op_cmpxchg_mem_T0_T1_EAX_cc
[ot
+ s
->mem_index
]();
3710 s
->cc_op
= CC_OP_SUBB
+ ot
;
3712 case 0x1c7: /* cmpxchg8b */
3713 modrm
= ldub_code(s
->pc
++);
3714 mod
= (modrm
>> 6) & 3;
3717 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3718 gen_op_set_cc_op(s
->cc_op
);
3719 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3721 s
->cc_op
= CC_OP_EFLAGS
;
3724 /**************************/
3726 case 0x50 ... 0x57: /* push */
3727 gen_op_mov_TN_reg
[OT_LONG
][0][(b
& 7) | REX_B(s
)]();
3730 case 0x58 ... 0x5f: /* pop */
3732 ot
= dflag
? OT_QUAD
: OT_WORD
;
3734 ot
= dflag
+ OT_WORD
;
3737 /* NOTE: order is important for pop %sp */
3739 gen_op_mov_reg_T0
[ot
][(b
& 7) | REX_B(s
)]();
3741 case 0x60: /* pusha */
3746 case 0x61: /* popa */
3751 case 0x68: /* push Iv */
3754 ot
= dflag
? OT_QUAD
: OT_WORD
;
3756 ot
= dflag
+ OT_WORD
;
3759 val
= insn_get(s
, ot
);
3761 val
= (int8_t)insn_get(s
, OT_BYTE
);
3762 gen_op_movl_T0_im(val
);
3765 case 0x8f: /* pop Ev */
3767 ot
= dflag
? OT_QUAD
: OT_WORD
;
3769 ot
= dflag
+ OT_WORD
;
3771 modrm
= ldub_code(s
->pc
++);
3772 mod
= (modrm
>> 6) & 3;
3775 /* NOTE: order is important for pop %sp */
3777 rm
= (modrm
& 7) | REX_B(s
);
3778 gen_op_mov_reg_T0
[ot
][rm
]();
3780 /* NOTE: order is important too for MMU exceptions */
3781 s
->popl_esp_hack
= 1 << ot
;
3782 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3783 s
->popl_esp_hack
= 0;
3787 case 0xc8: /* enter */
3790 val
= lduw_code(s
->pc
);
3792 level
= ldub_code(s
->pc
++);
3793 gen_enter(s
, val
, level
);
3796 case 0xc9: /* leave */
3797 /* XXX: exception not precise (ESP is updated before potential exception) */
3799 gen_op_mov_TN_reg
[OT_QUAD
][0][R_EBP
]();
3800 gen_op_mov_reg_T0
[OT_QUAD
][R_ESP
]();
3801 } else if (s
->ss32
) {
3802 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
3803 gen_op_mov_reg_T0
[OT_LONG
][R_ESP
]();
3805 gen_op_mov_TN_reg
[OT_WORD
][0][R_EBP
]();
3806 gen_op_mov_reg_T0
[OT_WORD
][R_ESP
]();
3810 ot
= dflag
? OT_QUAD
: OT_WORD
;
3812 ot
= dflag
+ OT_WORD
;
3814 gen_op_mov_reg_T0
[ot
][R_EBP
]();
3817 case 0x06: /* push es */
3818 case 0x0e: /* push cs */
3819 case 0x16: /* push ss */
3820 case 0x1e: /* push ds */
3823 gen_op_movl_T0_seg(b
>> 3);
3826 case 0x1a0: /* push fs */
3827 case 0x1a8: /* push gs */
3828 gen_op_movl_T0_seg((b
>> 3) & 7);
3831 case 0x07: /* pop es */
3832 case 0x17: /* pop ss */
3833 case 0x1f: /* pop ds */
3838 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
3841 /* if reg == SS, inhibit interrupts/trace. */
3842 /* If several instructions disable interrupts, only the
3844 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
3845 gen_op_set_inhibit_irq();
3849 gen_jmp_im(s
->pc
- s
->cs_base
);
3853 case 0x1a1: /* pop fs */
3854 case 0x1a9: /* pop gs */
3856 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
3859 gen_jmp_im(s
->pc
- s
->cs_base
);
3864 /**************************/
3867 case 0x89: /* mov Gv, Ev */
3871 ot
= dflag
+ OT_WORD
;
3872 modrm
= ldub_code(s
->pc
++);
3873 reg
= ((modrm
>> 3) & 7) | rex_r
;
3875 /* generate a generic store */
3876 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
3879 case 0xc7: /* mov Ev, Iv */
3883 ot
= dflag
+ OT_WORD
;
3884 modrm
= ldub_code(s
->pc
++);
3885 mod
= (modrm
>> 6) & 3;
3887 s
->rip_offset
= insn_const_size(ot
);
3888 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3890 val
= insn_get(s
, ot
);
3891 gen_op_movl_T0_im(val
);
3893 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3895 gen_op_mov_reg_T0
[ot
][(modrm
& 7) | REX_B(s
)]();
3898 case 0x8b: /* mov Ev, Gv */
3902 ot
= OT_WORD
+ dflag
;
3903 modrm
= ldub_code(s
->pc
++);
3904 reg
= ((modrm
>> 3) & 7) | rex_r
;
3906 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3907 gen_op_mov_reg_T0
[ot
][reg
]();
3909 case 0x8e: /* mov seg, Gv */
3910 modrm
= ldub_code(s
->pc
++);
3911 reg
= (modrm
>> 3) & 7;
3912 if (reg
>= 6 || reg
== R_CS
)
3914 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3915 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
3917 /* if reg == SS, inhibit interrupts/trace */
3918 /* If several instructions disable interrupts, only the
3920 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
3921 gen_op_set_inhibit_irq();
3925 gen_jmp_im(s
->pc
- s
->cs_base
);
3929 case 0x8c: /* mov Gv, seg */
3930 modrm
= ldub_code(s
->pc
++);
3931 reg
= (modrm
>> 3) & 7;
3932 mod
= (modrm
>> 6) & 3;
3935 gen_op_movl_T0_seg(reg
);
3937 ot
= OT_WORD
+ dflag
;
3940 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3943 case 0x1b6: /* movzbS Gv, Eb */
3944 case 0x1b7: /* movzwS Gv, Eb */
3945 case 0x1be: /* movsbS Gv, Eb */
3946 case 0x1bf: /* movswS Gv, Eb */
3949 /* d_ot is the size of destination */
3950 d_ot
= dflag
+ OT_WORD
;
3951 /* ot is the size of source */
3952 ot
= (b
& 1) + OT_BYTE
;
3953 modrm
= ldub_code(s
->pc
++);
3954 reg
= ((modrm
>> 3) & 7) | rex_r
;
3955 mod
= (modrm
>> 6) & 3;
3956 rm
= (modrm
& 7) | REX_B(s
);
3959 gen_op_mov_TN_reg
[ot
][0][rm
]();
3960 switch(ot
| (b
& 8)) {
3962 gen_op_movzbl_T0_T0();
3965 gen_op_movsbl_T0_T0();
3968 gen_op_movzwl_T0_T0();
3972 gen_op_movswl_T0_T0();
3975 gen_op_mov_reg_T0
[d_ot
][reg
]();
3977 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3979 gen_op_lds_T0_A0
[ot
+ s
->mem_index
]();
3981 gen_op_ldu_T0_A0
[ot
+ s
->mem_index
]();
3983 gen_op_mov_reg_T0
[d_ot
][reg
]();
3988 case 0x8d: /* lea */
3989 ot
= dflag
+ OT_WORD
;
3990 modrm
= ldub_code(s
->pc
++);
3991 mod
= (modrm
>> 6) & 3;
3994 reg
= ((modrm
>> 3) & 7) | rex_r
;
3995 /* we must ensure that no segment is added */
3999 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4001 gen_op_mov_reg_A0
[ot
- OT_WORD
][reg
]();
4004 case 0xa0: /* mov EAX, Ov */
4006 case 0xa2: /* mov Ov, EAX */
4009 target_ulong offset_addr
;
4014 ot
= dflag
+ OT_WORD
;
4015 #ifdef TARGET_X86_64
4016 if (s
->aflag
== 2) {
4017 offset_addr
= ldq_code(s
->pc
);
4019 if (offset_addr
== (int32_t)offset_addr
)
4020 gen_op_movq_A0_im(offset_addr
);
4022 gen_op_movq_A0_im64(offset_addr
>> 32, offset_addr
);
4027 offset_addr
= insn_get(s
, OT_LONG
);
4029 offset_addr
= insn_get(s
, OT_WORD
);
4031 gen_op_movl_A0_im(offset_addr
);
4033 gen_add_A0_ds_seg(s
);
4035 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
4036 gen_op_mov_reg_T0
[ot
][R_EAX
]();
4038 gen_op_mov_TN_reg
[ot
][0][R_EAX
]();
4039 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
4043 case 0xd7: /* xlat */
4044 #ifdef TARGET_X86_64
4045 if (s
->aflag
== 2) {
4046 gen_op_movq_A0_reg
[R_EBX
]();
4047 gen_op_addq_A0_AL();
4051 gen_op_movl_A0_reg
[R_EBX
]();
4052 gen_op_addl_A0_AL();
4054 gen_op_andl_A0_ffff();
4056 gen_add_A0_ds_seg(s
);
4057 gen_op_ldu_T0_A0
[OT_BYTE
+ s
->mem_index
]();
4058 gen_op_mov_reg_T0
[OT_BYTE
][R_EAX
]();
4060 case 0xb0 ... 0xb7: /* mov R, Ib */
4061 val
= insn_get(s
, OT_BYTE
);
4062 gen_op_movl_T0_im(val
);
4063 gen_op_mov_reg_T0
[OT_BYTE
][(b
& 7) | REX_B(s
)]();
4065 case 0xb8 ... 0xbf: /* mov R, Iv */
4066 #ifdef TARGET_X86_64
4070 tmp
= ldq_code(s
->pc
);
4072 reg
= (b
& 7) | REX_B(s
);
4073 gen_movtl_T0_im(tmp
);
4074 gen_op_mov_reg_T0
[OT_QUAD
][reg
]();
4078 ot
= dflag
? OT_LONG
: OT_WORD
;
4079 val
= insn_get(s
, ot
);
4080 reg
= (b
& 7) | REX_B(s
);
4081 gen_op_movl_T0_im(val
);
4082 gen_op_mov_reg_T0
[ot
][reg
]();
4086 case 0x91 ... 0x97: /* xchg R, EAX */
4087 ot
= dflag
+ OT_WORD
;
4088 reg
= (b
& 7) | REX_B(s
);
4092 case 0x87: /* xchg Ev, Gv */
4096 ot
= dflag
+ OT_WORD
;
4097 modrm
= ldub_code(s
->pc
++);
4098 reg
= ((modrm
>> 3) & 7) | rex_r
;
4099 mod
= (modrm
>> 6) & 3;
4101 rm
= (modrm
& 7) | REX_B(s
);
4103 gen_op_mov_TN_reg
[ot
][0][reg
]();
4104 gen_op_mov_TN_reg
[ot
][1][rm
]();
4105 gen_op_mov_reg_T0
[ot
][rm
]();
4106 gen_op_mov_reg_T1
[ot
][reg
]();
4108 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4109 gen_op_mov_TN_reg
[ot
][0][reg
]();
4110 /* for xchg, lock is implicit */
4111 if (!(prefixes
& PREFIX_LOCK
))
4113 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
4114 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
4115 if (!(prefixes
& PREFIX_LOCK
))
4117 gen_op_mov_reg_T1
[ot
][reg
]();
4120 case 0xc4: /* les Gv */
4125 case 0xc5: /* lds Gv */
4130 case 0x1b2: /* lss Gv */
4133 case 0x1b4: /* lfs Gv */
4136 case 0x1b5: /* lgs Gv */
4139 ot
= dflag
? OT_LONG
: OT_WORD
;
4140 modrm
= ldub_code(s
->pc
++);
4141 reg
= ((modrm
>> 3) & 7) | rex_r
;
4142 mod
= (modrm
>> 6) & 3;
4145 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4146 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
4147 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4148 /* load the segment first to handle exceptions properly */
4149 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
4150 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
4151 /* then put the data */
4152 gen_op_mov_reg_T1
[ot
][reg
]();
4154 gen_jmp_im(s
->pc
- s
->cs_base
);
4159 /************************/
4170 ot
= dflag
+ OT_WORD
;
4172 modrm
= ldub_code(s
->pc
++);
4173 mod
= (modrm
>> 6) & 3;
4174 op
= (modrm
>> 3) & 7;
4180 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4183 opreg
= (modrm
& 7) | REX_B(s
);
4188 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
4191 shift
= ldub_code(s
->pc
++);
4193 gen_shifti(s
, op
, ot
, opreg
, shift
);
4208 case 0x1a4: /* shld imm */
4212 case 0x1a5: /* shld cl */
4216 case 0x1ac: /* shrd imm */
4220 case 0x1ad: /* shrd cl */
4224 ot
= dflag
+ OT_WORD
;
4225 modrm
= ldub_code(s
->pc
++);
4226 mod
= (modrm
>> 6) & 3;
4227 rm
= (modrm
& 7) | REX_B(s
);
4228 reg
= ((modrm
>> 3) & 7) | rex_r
;
4231 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4232 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
4234 gen_op_mov_TN_reg
[ot
][0][rm
]();
4236 gen_op_mov_TN_reg
[ot
][1][reg
]();
4239 val
= ldub_code(s
->pc
++);
4246 gen_op_shiftd_T0_T1_im_cc
[ot
][op
](val
);
4248 gen_op_shiftd_mem_T0_T1_im_cc
[ot
+ s
->mem_index
][op
](val
);
4249 if (op
== 0 && ot
!= OT_WORD
)
4250 s
->cc_op
= CC_OP_SHLB
+ ot
;
4252 s
->cc_op
= CC_OP_SARB
+ ot
;
4255 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4256 gen_op_set_cc_op(s
->cc_op
);
4258 gen_op_shiftd_T0_T1_ECX_cc
[ot
][op
]();
4260 gen_op_shiftd_mem_T0_T1_ECX_cc
[ot
+ s
->mem_index
][op
]();
4261 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
4264 gen_op_mov_reg_T0
[ot
][rm
]();
4268 /************************/
4271 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
4272 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4273 /* XXX: what to do if illegal op ? */
4274 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
4277 modrm
= ldub_code(s
->pc
++);
4278 mod
= (modrm
>> 6) & 3;
4280 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
4283 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4285 case 0x00 ... 0x07: /* fxxxs */
4286 case 0x10 ... 0x17: /* fixxxl */
4287 case 0x20 ... 0x27: /* fxxxl */
4288 case 0x30 ... 0x37: /* fixxx */
4295 gen_op_flds_FT0_A0();
4298 gen_op_fildl_FT0_A0();
4301 gen_op_fldl_FT0_A0();
4305 gen_op_fild_FT0_A0();
4309 gen_op_fp_arith_ST0_FT0
[op1
]();
4311 /* fcomp needs pop */
4316 case 0x08: /* flds */
4317 case 0x0a: /* fsts */
4318 case 0x0b: /* fstps */
4319 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4320 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4321 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4326 gen_op_flds_ST0_A0();
4329 gen_op_fildl_ST0_A0();
4332 gen_op_fldl_ST0_A0();
4336 gen_op_fild_ST0_A0();
4343 gen_op_fisttl_ST0_A0();
4346 gen_op_fisttll_ST0_A0();
4350 gen_op_fistt_ST0_A0();
4357 gen_op_fsts_ST0_A0();
4360 gen_op_fistl_ST0_A0();
4363 gen_op_fstl_ST0_A0();
4367 gen_op_fist_ST0_A0();
4375 case 0x0c: /* fldenv mem */
4376 gen_op_fldenv_A0(s
->dflag
);
4378 case 0x0d: /* fldcw mem */
4381 case 0x0e: /* fnstenv mem */
4382 gen_op_fnstenv_A0(s
->dflag
);
4384 case 0x0f: /* fnstcw mem */
4387 case 0x1d: /* fldt mem */
4388 gen_op_fldt_ST0_A0();
4390 case 0x1f: /* fstpt mem */
4391 gen_op_fstt_ST0_A0();
4394 case 0x2c: /* frstor mem */
4395 gen_op_frstor_A0(s
->dflag
);
4397 case 0x2e: /* fnsave mem */
4398 gen_op_fnsave_A0(s
->dflag
);
4400 case 0x2f: /* fnstsw mem */
4403 case 0x3c: /* fbld */
4404 gen_op_fbld_ST0_A0();
4406 case 0x3e: /* fbstp */
4407 gen_op_fbst_ST0_A0();
4410 case 0x3d: /* fildll */
4411 gen_op_fildll_ST0_A0();
4413 case 0x3f: /* fistpll */
4414 gen_op_fistll_ST0_A0();
4421 /* register float ops */
4425 case 0x08: /* fld sti */
4427 gen_op_fmov_ST0_STN((opreg
+ 1) & 7);
4429 case 0x09: /* fxchg sti */
4430 case 0x29: /* fxchg4 sti, undocumented op */
4431 case 0x39: /* fxchg7 sti, undocumented op */
4432 gen_op_fxchg_ST0_STN(opreg
);
4434 case 0x0a: /* grp d9/2 */
4437 /* check exceptions (FreeBSD FPU probe) */
4438 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4439 gen_op_set_cc_op(s
->cc_op
);
4440 gen_jmp_im(pc_start
- s
->cs_base
);
4447 case 0x0c: /* grp d9/4 */
4457 gen_op_fcom_ST0_FT0();
4466 case 0x0d: /* grp d9/5 */
4475 gen_op_fldl2t_ST0();
4479 gen_op_fldl2e_ST0();
4487 gen_op_fldlg2_ST0();
4491 gen_op_fldln2_ST0();
4502 case 0x0e: /* grp d9/6 */
4513 case 3: /* fpatan */
4516 case 4: /* fxtract */
4519 case 5: /* fprem1 */
4522 case 6: /* fdecstp */
4526 case 7: /* fincstp */
4531 case 0x0f: /* grp d9/7 */
4536 case 1: /* fyl2xp1 */
4542 case 3: /* fsincos */
4545 case 5: /* fscale */
4548 case 4: /* frndint */
4560 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4561 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4562 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4568 gen_op_fp_arith_STN_ST0
[op1
](opreg
);
4572 gen_op_fmov_FT0_STN(opreg
);
4573 gen_op_fp_arith_ST0_FT0
[op1
]();
4577 case 0x02: /* fcom */
4578 case 0x22: /* fcom2, undocumented op */
4579 gen_op_fmov_FT0_STN(opreg
);
4580 gen_op_fcom_ST0_FT0();
4582 case 0x03: /* fcomp */
4583 case 0x23: /* fcomp3, undocumented op */
4584 case 0x32: /* fcomp5, undocumented op */
4585 gen_op_fmov_FT0_STN(opreg
);
4586 gen_op_fcom_ST0_FT0();
4589 case 0x15: /* da/5 */
4591 case 1: /* fucompp */
4592 gen_op_fmov_FT0_STN(1);
4593 gen_op_fucom_ST0_FT0();
4603 case 0: /* feni (287 only, just do nop here) */
4605 case 1: /* fdisi (287 only, just do nop here) */
4610 case 3: /* fninit */
4613 case 4: /* fsetpm (287 only, just do nop here) */
4619 case 0x1d: /* fucomi */
4620 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4621 gen_op_set_cc_op(s
->cc_op
);
4622 gen_op_fmov_FT0_STN(opreg
);
4623 gen_op_fucomi_ST0_FT0();
4624 s
->cc_op
= CC_OP_EFLAGS
;
4626 case 0x1e: /* fcomi */
4627 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4628 gen_op_set_cc_op(s
->cc_op
);
4629 gen_op_fmov_FT0_STN(opreg
);
4630 gen_op_fcomi_ST0_FT0();
4631 s
->cc_op
= CC_OP_EFLAGS
;
4633 case 0x28: /* ffree sti */
4634 gen_op_ffree_STN(opreg
);
4636 case 0x2a: /* fst sti */
4637 gen_op_fmov_STN_ST0(opreg
);
4639 case 0x2b: /* fstp sti */
4640 case 0x0b: /* fstp1 sti, undocumented op */
4641 case 0x3a: /* fstp8 sti, undocumented op */
4642 case 0x3b: /* fstp9 sti, undocumented op */
4643 gen_op_fmov_STN_ST0(opreg
);
4646 case 0x2c: /* fucom st(i) */
4647 gen_op_fmov_FT0_STN(opreg
);
4648 gen_op_fucom_ST0_FT0();
4650 case 0x2d: /* fucomp st(i) */
4651 gen_op_fmov_FT0_STN(opreg
);
4652 gen_op_fucom_ST0_FT0();
4655 case 0x33: /* de/3 */
4657 case 1: /* fcompp */
4658 gen_op_fmov_FT0_STN(1);
4659 gen_op_fcom_ST0_FT0();
4667 case 0x38: /* ffreep sti, undocumented op */
4668 gen_op_ffree_STN(opreg
);
4671 case 0x3c: /* df/4 */
4674 gen_op_fnstsw_EAX();
4680 case 0x3d: /* fucomip */
4681 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4682 gen_op_set_cc_op(s
->cc_op
);
4683 gen_op_fmov_FT0_STN(opreg
);
4684 gen_op_fucomi_ST0_FT0();
4686 s
->cc_op
= CC_OP_EFLAGS
;
4688 case 0x3e: /* fcomip */
4689 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4690 gen_op_set_cc_op(s
->cc_op
);
4691 gen_op_fmov_FT0_STN(opreg
);
4692 gen_op_fcomi_ST0_FT0();
4694 s
->cc_op
= CC_OP_EFLAGS
;
4696 case 0x10 ... 0x13: /* fcmovxx */
4700 const static uint8_t fcmov_cc
[8] = {
4706 op1
= fcmov_cc
[op
& 3] | ((op
>> 3) & 1);
4708 gen_op_fcmov_ST0_STN_T0(opreg
);
4715 #ifdef USE_CODE_COPY
4716 s
->tb
->cflags
|= CF_TB_FP_USED
;
4719 /************************/
4722 case 0xa4: /* movsS */
4727 ot
= dflag
+ OT_WORD
;
4729 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4730 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4736 case 0xaa: /* stosS */
4741 ot
= dflag
+ OT_WORD
;
4743 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4744 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4749 case 0xac: /* lodsS */
4754 ot
= dflag
+ OT_WORD
;
4755 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4756 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4761 case 0xae: /* scasS */
4766 ot
= dflag
+ OT_WORD
;
4767 if (prefixes
& PREFIX_REPNZ
) {
4768 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4769 } else if (prefixes
& PREFIX_REPZ
) {
4770 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4773 s
->cc_op
= CC_OP_SUBB
+ ot
;
4777 case 0xa6: /* cmpsS */
4782 ot
= dflag
+ OT_WORD
;
4783 if (prefixes
& PREFIX_REPNZ
) {
4784 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4785 } else if (prefixes
& PREFIX_REPZ
) {
4786 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4789 s
->cc_op
= CC_OP_SUBB
+ ot
;
4792 case 0x6c: /* insS */
4797 ot
= dflag
? OT_LONG
: OT_WORD
;
4798 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4799 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4800 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4805 case 0x6e: /* outsS */
4810 ot
= dflag
? OT_LONG
: OT_WORD
;
4811 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4812 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4813 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4819 /************************/
4826 ot
= dflag
? OT_LONG
: OT_WORD
;
4827 val
= ldub_code(s
->pc
++);
4828 gen_op_movl_T0_im(val
);
4829 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4831 gen_op_mov_reg_T1
[ot
][R_EAX
]();
4838 ot
= dflag
? OT_LONG
: OT_WORD
;
4839 val
= ldub_code(s
->pc
++);
4840 gen_op_movl_T0_im(val
);
4841 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4842 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
4850 ot
= dflag
? OT_LONG
: OT_WORD
;
4851 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
4852 gen_op_andl_T0_ffff();
4853 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4855 gen_op_mov_reg_T1
[ot
][R_EAX
]();
4862 ot
= dflag
? OT_LONG
: OT_WORD
;
4863 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
4864 gen_op_andl_T0_ffff();
4865 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4866 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
4870 /************************/
4872 case 0xc2: /* ret im */
4873 val
= ldsw_code(s
->pc
);
4876 if (CODE64(s
) && s
->dflag
)
4878 gen_stack_update(s
, val
+ (2 << s
->dflag
));
4880 gen_op_andl_T0_ffff();
4884 case 0xc3: /* ret */
4888 gen_op_andl_T0_ffff();
4892 case 0xca: /* lret im */
4893 val
= ldsw_code(s
->pc
);
4896 if (s
->pe
&& !s
->vm86
) {
4897 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4898 gen_op_set_cc_op(s
->cc_op
);
4899 gen_jmp_im(pc_start
- s
->cs_base
);
4900 gen_op_lret_protected(s
->dflag
, val
);
4904 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
4906 gen_op_andl_T0_ffff();
4907 /* NOTE: keeping EIP updated is not a problem in case of
4911 gen_op_addl_A0_im(2 << s
->dflag
);
4912 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
4913 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
4914 /* add stack offset */
4915 gen_stack_update(s
, val
+ (4 << s
->dflag
));
4919 case 0xcb: /* lret */
4922 case 0xcf: /* iret */
4925 gen_op_iret_real(s
->dflag
);
4926 s
->cc_op
= CC_OP_EFLAGS
;
4927 } else if (s
->vm86
) {
4929 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4931 gen_op_iret_real(s
->dflag
);
4932 s
->cc_op
= CC_OP_EFLAGS
;
4935 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4936 gen_op_set_cc_op(s
->cc_op
);
4937 gen_jmp_im(pc_start
- s
->cs_base
);
4938 gen_op_iret_protected(s
->dflag
, s
->pc
- s
->cs_base
);
4939 s
->cc_op
= CC_OP_EFLAGS
;
4943 case 0xe8: /* call im */
4946 tval
= (int32_t)insn_get(s
, OT_LONG
);
4948 tval
= (int16_t)insn_get(s
, OT_WORD
);
4949 next_eip
= s
->pc
- s
->cs_base
;
4953 gen_movtl_T0_im(next_eip
);
4958 case 0x9a: /* lcall im */
4960 unsigned int selector
, offset
;
4964 ot
= dflag
? OT_LONG
: OT_WORD
;
4965 offset
= insn_get(s
, ot
);
4966 selector
= insn_get(s
, OT_WORD
);
4968 gen_op_movl_T0_im(selector
);
4969 gen_op_movl_T1_imu(offset
);
4972 case 0xe9: /* jmp im */
4974 tval
= (int32_t)insn_get(s
, OT_LONG
);
4976 tval
= (int16_t)insn_get(s
, OT_WORD
);
4977 tval
+= s
->pc
- s
->cs_base
;
4982 case 0xea: /* ljmp im */
4984 unsigned int selector
, offset
;
4988 ot
= dflag
? OT_LONG
: OT_WORD
;
4989 offset
= insn_get(s
, ot
);
4990 selector
= insn_get(s
, OT_WORD
);
4992 gen_op_movl_T0_im(selector
);
4993 gen_op_movl_T1_imu(offset
);
4996 case 0xeb: /* jmp Jb */
4997 tval
= (int8_t)insn_get(s
, OT_BYTE
);
4998 tval
+= s
->pc
- s
->cs_base
;
5003 case 0x70 ... 0x7f: /* jcc Jb */
5004 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5006 case 0x180 ... 0x18f: /* jcc Jv */
5008 tval
= (int32_t)insn_get(s
, OT_LONG
);
5010 tval
= (int16_t)insn_get(s
, OT_WORD
);
5013 next_eip
= s
->pc
- s
->cs_base
;
5017 gen_jcc(s
, b
, tval
, next_eip
);
5020 case 0x190 ... 0x19f: /* setcc Gv */
5021 modrm
= ldub_code(s
->pc
++);
5023 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
5025 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5026 ot
= dflag
+ OT_WORD
;
5027 modrm
= ldub_code(s
->pc
++);
5028 reg
= ((modrm
>> 3) & 7) | rex_r
;
5029 mod
= (modrm
>> 6) & 3;
5032 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5033 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
5035 rm
= (modrm
& 7) | REX_B(s
);
5036 gen_op_mov_TN_reg
[ot
][1][rm
]();
5038 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
5041 /************************/
5043 case 0x9c: /* pushf */
5044 if (s
->vm86
&& s
->iopl
!= 3) {
5045 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5047 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5048 gen_op_set_cc_op(s
->cc_op
);
5049 gen_op_movl_T0_eflags();
5053 case 0x9d: /* popf */
5054 if (s
->vm86
&& s
->iopl
!= 3) {
5055 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5060 gen_op_movl_eflags_T0_cpl0();
5062 gen_op_movw_eflags_T0_cpl0();
5065 if (s
->cpl
<= s
->iopl
) {
5067 gen_op_movl_eflags_T0_io();
5069 gen_op_movw_eflags_T0_io();
5073 gen_op_movl_eflags_T0();
5075 gen_op_movw_eflags_T0();
5080 s
->cc_op
= CC_OP_EFLAGS
;
5081 /* abort translation because TF flag may change */
5082 gen_jmp_im(s
->pc
- s
->cs_base
);
5086 case 0x9e: /* sahf */
5089 gen_op_mov_TN_reg
[OT_BYTE
][0][R_AH
]();
5090 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5091 gen_op_set_cc_op(s
->cc_op
);
5092 gen_op_movb_eflags_T0();
5093 s
->cc_op
= CC_OP_EFLAGS
;
5095 case 0x9f: /* lahf */
5098 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5099 gen_op_set_cc_op(s
->cc_op
);
5100 gen_op_movl_T0_eflags();
5101 gen_op_mov_reg_T0
[OT_BYTE
][R_AH
]();
5103 case 0xf5: /* cmc */
5104 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5105 gen_op_set_cc_op(s
->cc_op
);
5107 s
->cc_op
= CC_OP_EFLAGS
;
5109 case 0xf8: /* clc */
5110 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5111 gen_op_set_cc_op(s
->cc_op
);
5113 s
->cc_op
= CC_OP_EFLAGS
;
5115 case 0xf9: /* stc */
5116 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5117 gen_op_set_cc_op(s
->cc_op
);
5119 s
->cc_op
= CC_OP_EFLAGS
;
5121 case 0xfc: /* cld */
5124 case 0xfd: /* std */
5128 /************************/
5129 /* bit operations */
5130 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5131 ot
= dflag
+ OT_WORD
;
5132 modrm
= ldub_code(s
->pc
++);
5133 op
= (modrm
>> 3) & 7;
5134 mod
= (modrm
>> 6) & 3;
5135 rm
= (modrm
& 7) | REX_B(s
);
5138 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5139 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
5141 gen_op_mov_TN_reg
[ot
][0][rm
]();
5144 val
= ldub_code(s
->pc
++);
5145 gen_op_movl_T1_im(val
);
5149 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5150 s
->cc_op
= CC_OP_SARB
+ ot
;
5153 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
5155 gen_op_mov_reg_T0
[ot
][rm
]();
5156 gen_op_update_bt_cc();
5159 case 0x1a3: /* bt Gv, Ev */
5162 case 0x1ab: /* bts */
5165 case 0x1b3: /* btr */
5168 case 0x1bb: /* btc */
5171 ot
= dflag
+ OT_WORD
;
5172 modrm
= ldub_code(s
->pc
++);
5173 reg
= ((modrm
>> 3) & 7) | rex_r
;
5174 mod
= (modrm
>> 6) & 3;
5175 rm
= (modrm
& 7) | REX_B(s
);
5176 gen_op_mov_TN_reg
[OT_LONG
][1][reg
]();
5178 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5179 /* specific case: we need to add a displacement */
5180 gen_op_add_bit_A0_T1
[ot
- OT_WORD
]();
5181 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
5183 gen_op_mov_TN_reg
[ot
][0][rm
]();
5185 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5186 s
->cc_op
= CC_OP_SARB
+ ot
;
5189 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
5191 gen_op_mov_reg_T0
[ot
][rm
]();
5192 gen_op_update_bt_cc();
5195 case 0x1bc: /* bsf */
5196 case 0x1bd: /* bsr */
5197 ot
= dflag
+ OT_WORD
;
5198 modrm
= ldub_code(s
->pc
++);
5199 reg
= ((modrm
>> 3) & 7) | rex_r
;
5200 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5201 /* NOTE: in order to handle the 0 case, we must load the
5202 result. It could be optimized with a generated jump */
5203 gen_op_mov_TN_reg
[ot
][1][reg
]();
5204 gen_op_bsx_T0_cc
[ot
- OT_WORD
][b
& 1]();
5205 gen_op_mov_reg_T1
[ot
][reg
]();
5206 s
->cc_op
= CC_OP_LOGICB
+ ot
;
5208 /************************/
5210 case 0x27: /* daa */
5213 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5214 gen_op_set_cc_op(s
->cc_op
);
5216 s
->cc_op
= CC_OP_EFLAGS
;
5218 case 0x2f: /* das */
5221 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5222 gen_op_set_cc_op(s
->cc_op
);
5224 s
->cc_op
= CC_OP_EFLAGS
;
5226 case 0x37: /* aaa */
5229 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5230 gen_op_set_cc_op(s
->cc_op
);
5232 s
->cc_op
= CC_OP_EFLAGS
;
5234 case 0x3f: /* aas */
5237 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5238 gen_op_set_cc_op(s
->cc_op
);
5240 s
->cc_op
= CC_OP_EFLAGS
;
5242 case 0xd4: /* aam */
5245 val
= ldub_code(s
->pc
++);
5247 s
->cc_op
= CC_OP_LOGICB
;
5249 case 0xd5: /* aad */
5252 val
= ldub_code(s
->pc
++);
5254 s
->cc_op
= CC_OP_LOGICB
;
5256 /************************/
5258 case 0x90: /* nop */
5259 /* XXX: xchg + rex handling */
5260 /* XXX: correct lock test for all insn */
5261 if (prefixes
& PREFIX_LOCK
)
5264 case 0x9b: /* fwait */
5265 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
5266 (HF_MP_MASK
| HF_TS_MASK
)) {
5267 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5269 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5270 gen_op_set_cc_op(s
->cc_op
);
5271 gen_jmp_im(pc_start
- s
->cs_base
);
5275 case 0xcc: /* int3 */
5276 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5278 case 0xcd: /* int N */
5279 val
= ldub_code(s
->pc
++);
5280 if (s
->vm86
&& s
->iopl
!= 3) {
5281 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5283 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5286 case 0xce: /* into */
5289 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5290 gen_op_set_cc_op(s
->cc_op
);
5291 gen_jmp_im(pc_start
- s
->cs_base
);
5292 gen_op_into(s
->pc
- pc_start
);
5294 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5296 gen_debug(s
, pc_start
- s
->cs_base
);
5299 tb_flush(cpu_single_env
);
5300 cpu_set_log(CPU_LOG_INT
| CPU_LOG_TB_IN_ASM
);
5303 case 0xfa: /* cli */
5305 if (s
->cpl
<= s
->iopl
) {
5308 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5314 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5318 case 0xfb: /* sti */
5320 if (s
->cpl
<= s
->iopl
) {
5323 /* interruptions are enabled only the first insn after sti */
5324 /* If several instructions disable interrupts, only the
5326 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5327 gen_op_set_inhibit_irq();
5328 /* give a chance to handle pending irqs */
5329 gen_jmp_im(s
->pc
- s
->cs_base
);
5332 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5338 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5342 case 0x62: /* bound */
5345 ot
= dflag
? OT_LONG
: OT_WORD
;
5346 modrm
= ldub_code(s
->pc
++);
5347 reg
= (modrm
>> 3) & 7;
5348 mod
= (modrm
>> 6) & 3;
5351 gen_op_mov_TN_reg
[ot
][0][reg
]();
5352 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5353 gen_jmp_im(pc_start
- s
->cs_base
);
5359 case 0x1c8 ... 0x1cf: /* bswap reg */
5360 reg
= (b
& 7) | REX_B(s
);
5361 #ifdef TARGET_X86_64
5363 gen_op_mov_TN_reg
[OT_QUAD
][0][reg
]();
5365 gen_op_mov_reg_T0
[OT_QUAD
][reg
]();
5369 gen_op_mov_TN_reg
[OT_LONG
][0][reg
]();
5371 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
5374 case 0xd6: /* salc */
5377 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5378 gen_op_set_cc_op(s
->cc_op
);
5381 case 0xe0: /* loopnz */
5382 case 0xe1: /* loopz */
5383 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5384 gen_op_set_cc_op(s
->cc_op
);
5386 case 0xe2: /* loop */
5387 case 0xe3: /* jecxz */
5391 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5392 next_eip
= s
->pc
- s
->cs_base
;
5397 l1
= gen_new_label();
5398 l2
= gen_new_label();
5401 gen_op_jz_ecx
[s
->aflag
](l1
);
5403 gen_op_dec_ECX
[s
->aflag
]();
5406 gen_op_loop
[s
->aflag
][b
](l1
);
5409 gen_jmp_im(next_eip
);
5410 gen_op_jmp_label(l2
);
5417 case 0x130: /* wrmsr */
5418 case 0x132: /* rdmsr */
5420 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5428 case 0x131: /* rdtsc */
5429 gen_jmp_im(pc_start
- s
->cs_base
);
5432 case 0x134: /* sysenter */
5436 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5438 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5439 gen_op_set_cc_op(s
->cc_op
);
5440 s
->cc_op
= CC_OP_DYNAMIC
;
5442 gen_jmp_im(pc_start
- s
->cs_base
);
5447 case 0x135: /* sysexit */
5451 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5453 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5454 gen_op_set_cc_op(s
->cc_op
);
5455 s
->cc_op
= CC_OP_DYNAMIC
;
5457 gen_jmp_im(pc_start
- s
->cs_base
);
5462 #ifdef TARGET_X86_64
5463 case 0x105: /* syscall */
5464 /* XXX: is it usable in real mode ? */
5465 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5466 gen_op_set_cc_op(s
->cc_op
);
5467 s
->cc_op
= CC_OP_DYNAMIC
;
5469 gen_jmp_im(pc_start
- s
->cs_base
);
5470 gen_op_syscall(s
->pc
- pc_start
);
5473 case 0x107: /* sysret */
5475 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5477 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5478 gen_op_set_cc_op(s
->cc_op
);
5479 s
->cc_op
= CC_OP_DYNAMIC
;
5481 gen_jmp_im(pc_start
- s
->cs_base
);
5482 gen_op_sysret(s
->dflag
);
5483 /* condition codes are modified only in long mode */
5485 s
->cc_op
= CC_OP_EFLAGS
;
5490 case 0x1a2: /* cpuid */
5493 case 0xf4: /* hlt */
5495 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5497 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5498 gen_op_set_cc_op(s
->cc_op
);
5499 gen_jmp_im(s
->pc
- s
->cs_base
);
5505 modrm
= ldub_code(s
->pc
++);
5506 mod
= (modrm
>> 6) & 3;
5507 op
= (modrm
>> 3) & 7;
5510 if (!s
->pe
|| s
->vm86
)
5512 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
5516 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5519 if (!s
->pe
|| s
->vm86
)
5522 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5524 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5525 gen_jmp_im(pc_start
- s
->cs_base
);
5530 if (!s
->pe
|| s
->vm86
)
5532 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
5536 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5539 if (!s
->pe
|| s
->vm86
)
5542 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5544 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5545 gen_jmp_im(pc_start
- s
->cs_base
);
5551 if (!s
->pe
|| s
->vm86
)
5553 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5554 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5555 gen_op_set_cc_op(s
->cc_op
);
5560 s
->cc_op
= CC_OP_EFLAGS
;
5567 modrm
= ldub_code(s
->pc
++);
5568 mod
= (modrm
>> 6) & 3;
5569 op
= (modrm
>> 3) & 7;
5575 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5577 gen_op_movl_T0_env(offsetof(CPUX86State
,gdt
.limit
));
5579 gen_op_movl_T0_env(offsetof(CPUX86State
,idt
.limit
));
5580 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
5581 gen_add_A0_im(s
, 2);
5583 gen_op_movtl_T0_env(offsetof(CPUX86State
,gdt
.base
));
5585 gen_op_movtl_T0_env(offsetof(CPUX86State
,idt
.base
));
5587 gen_op_andl_T0_im(0xffffff);
5588 gen_op_st_T0_A0
[CODE64(s
) + OT_LONG
+ s
->mem_index
]();
5595 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5597 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5598 gen_op_ld_T1_A0
[OT_WORD
+ s
->mem_index
]();
5599 gen_add_A0_im(s
, 2);
5600 gen_op_ld_T0_A0
[CODE64(s
) + OT_LONG
+ s
->mem_index
]();
5602 gen_op_andl_T0_im(0xffffff);
5604 gen_op_movtl_env_T0(offsetof(CPUX86State
,gdt
.base
));
5605 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
5607 gen_op_movtl_env_T0(offsetof(CPUX86State
,idt
.base
));
5608 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
5613 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
5614 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
5618 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5620 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5622 gen_jmp_im(s
->pc
- s
->cs_base
);
5626 case 7: /* invlpg */
5628 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5631 #ifdef TARGET_X86_64
5632 if (CODE64(s
) && (modrm
& 7) == 0) {
5634 gen_op_movtl_T0_env(offsetof(CPUX86State
,segs
[R_GS
].base
));
5635 gen_op_movtl_T1_env(offsetof(CPUX86State
,kernelgsbase
));
5636 gen_op_movtl_env_T1(offsetof(CPUX86State
,segs
[R_GS
].base
));
5637 gen_op_movtl_env_T0(offsetof(CPUX86State
,kernelgsbase
));
5644 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5646 gen_jmp_im(s
->pc
- s
->cs_base
);
5655 case 0x108: /* invd */
5656 case 0x109: /* wbinvd */
5658 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5663 case 0x63: /* arpl or movslS (x86_64) */
5664 #ifdef TARGET_X86_64
5667 /* d_ot is the size of destination */
5668 d_ot
= dflag
+ OT_WORD
;
5670 modrm
= ldub_code(s
->pc
++);
5671 reg
= ((modrm
>> 3) & 7) | rex_r
;
5672 mod
= (modrm
>> 6) & 3;
5673 rm
= (modrm
& 7) | REX_B(s
);
5676 gen_op_mov_TN_reg
[OT_LONG
][0][rm
]();
5678 if (d_ot
== OT_QUAD
)
5679 gen_op_movslq_T0_T0();
5680 gen_op_mov_reg_T0
[d_ot
][reg
]();
5682 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5683 if (d_ot
== OT_QUAD
) {
5684 gen_op_lds_T0_A0
[OT_LONG
+ s
->mem_index
]();
5686 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
5688 gen_op_mov_reg_T0
[d_ot
][reg
]();
5693 if (!s
->pe
|| s
->vm86
)
5695 ot
= dflag
? OT_LONG
: OT_WORD
;
5696 modrm
= ldub_code(s
->pc
++);
5697 reg
= (modrm
>> 3) & 7;
5698 mod
= (modrm
>> 6) & 3;
5701 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5702 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
5704 gen_op_mov_TN_reg
[ot
][0][rm
]();
5706 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5707 gen_op_set_cc_op(s
->cc_op
);
5709 s
->cc_op
= CC_OP_EFLAGS
;
5711 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
5713 gen_op_mov_reg_T0
[ot
][rm
]();
5715 gen_op_arpl_update();
5718 case 0x102: /* lar */
5719 case 0x103: /* lsl */
5720 if (!s
->pe
|| s
->vm86
)
5722 ot
= dflag
? OT_LONG
: OT_WORD
;
5723 modrm
= ldub_code(s
->pc
++);
5724 reg
= ((modrm
>> 3) & 7) | rex_r
;
5725 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5726 gen_op_mov_TN_reg
[ot
][1][reg
]();
5727 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5728 gen_op_set_cc_op(s
->cc_op
);
5733 s
->cc_op
= CC_OP_EFLAGS
;
5734 gen_op_mov_reg_T1
[ot
][reg
]();
5737 modrm
= ldub_code(s
->pc
++);
5738 mod
= (modrm
>> 6) & 3;
5739 op
= (modrm
>> 3) & 7;
5741 case 0: /* prefetchnta */
5742 case 1: /* prefetchnt0 */
5743 case 2: /* prefetchnt0 */
5744 case 3: /* prefetchnt0 */
5747 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5748 /* nothing more to do */
5754 case 0x120: /* mov reg, crN */
5755 case 0x122: /* mov crN, reg */
5757 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5759 modrm
= ldub_code(s
->pc
++);
5760 if ((modrm
& 0xc0) != 0xc0)
5762 rm
= (modrm
& 7) | REX_B(s
);
5763 reg
= ((modrm
>> 3) & 7) | rex_r
;
5775 gen_op_mov_TN_reg
[ot
][0][rm
]();
5776 gen_op_movl_crN_T0(reg
);
5777 gen_jmp_im(s
->pc
- s
->cs_base
);
5780 #if !defined(CONFIG_USER_ONLY)
5782 gen_op_movtl_T0_cr8();
5785 gen_op_movtl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
5786 gen_op_mov_reg_T0
[ot
][rm
]();
5794 case 0x121: /* mov reg, drN */
5795 case 0x123: /* mov drN, reg */
5797 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5799 modrm
= ldub_code(s
->pc
++);
5800 if ((modrm
& 0xc0) != 0xc0)
5802 rm
= (modrm
& 7) | REX_B(s
);
5803 reg
= ((modrm
>> 3) & 7) | rex_r
;
5808 /* XXX: do it dynamically with CR4.DE bit */
5809 if (reg
== 4 || reg
== 5 || reg
>= 8)
5812 gen_op_mov_TN_reg
[ot
][0][rm
]();
5813 gen_op_movl_drN_T0(reg
);
5814 gen_jmp_im(s
->pc
- s
->cs_base
);
5817 gen_op_movtl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
5818 gen_op_mov_reg_T0
[ot
][rm
]();
5822 case 0x106: /* clts */
5824 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5827 /* abort block because static cpu state changed */
5828 gen_jmp_im(s
->pc
- s
->cs_base
);
5832 /* MMX/SSE/SSE2/PNI support */
5833 case 0x1c3: /* MOVNTI reg, mem */
5834 if (!(s
->cpuid_features
& CPUID_SSE2
))
5836 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
5837 modrm
= ldub_code(s
->pc
++);
5838 mod
= (modrm
>> 6) & 3;
5841 reg
= ((modrm
>> 3) & 7) | rex_r
;
5842 /* generate a generic store */
5843 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
5846 modrm
= ldub_code(s
->pc
++);
5847 mod
= (modrm
>> 6) & 3;
5848 op
= (modrm
>> 3) & 7;
5850 case 0: /* fxsave */
5851 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
5852 (s
->flags
& HF_EM_MASK
))
5854 if (s
->flags
& HF_TS_MASK
) {
5855 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5858 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5859 gen_op_fxsave_A0((s
->dflag
== 2));
5861 case 1: /* fxrstor */
5862 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
5863 (s
->flags
& HF_EM_MASK
))
5865 if (s
->flags
& HF_TS_MASK
) {
5866 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5869 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5870 gen_op_fxrstor_A0((s
->dflag
== 2));
5872 case 2: /* ldmxcsr */
5873 case 3: /* stmxcsr */
5874 if (s
->flags
& HF_TS_MASK
) {
5875 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5878 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
5881 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5883 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
5884 gen_op_movl_env_T0(offsetof(CPUX86State
, mxcsr
));
5886 gen_op_movl_T0_env(offsetof(CPUX86State
, mxcsr
));
5887 gen_op_st_T0_A0
[OT_LONG
+ s
->mem_index
]();
5890 case 5: /* lfence */
5891 case 6: /* mfence */
5892 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
5895 case 7: /* sfence / clflush */
5896 if ((modrm
& 0xc7) == 0xc0) {
5898 if (!(s
->cpuid_features
& CPUID_SSE
))
5902 if (!(s
->cpuid_features
& CPUID_CLFLUSH
))
5904 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5911 case 0x10d: /* prefetch */
5912 modrm
= ldub_code(s
->pc
++);
5913 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5914 /* ignore for now */
5916 case 0x110 ... 0x117:
5917 case 0x128 ... 0x12f:
5918 case 0x150 ... 0x177:
5919 case 0x17c ... 0x17f:
5921 case 0x1c4 ... 0x1c6:
5922 case 0x1d0 ... 0x1fe:
5923 gen_sse(s
, b
, pc_start
, rex_r
);
5928 /* lock generation */
5929 if (s
->prefix
& PREFIX_LOCK
)
5933 if (s
->prefix
& PREFIX_LOCK
)
5935 /* XXX: ensure that no lock was generated */
5936 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
5940 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
5941 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
5943 /* flags read by an operation */
5944 static uint16_t opc_read_flags
[NB_OPS
] = {
5945 [INDEX_op_aas
] = CC_A
,
5946 [INDEX_op_aaa
] = CC_A
,
5947 [INDEX_op_das
] = CC_A
| CC_C
,
5948 [INDEX_op_daa
] = CC_A
| CC_C
,
5950 /* subtle: due to the incl/decl implementation, C is used */
5951 [INDEX_op_update_inc_cc
] = CC_C
,
5953 [INDEX_op_into
] = CC_O
,
5955 [INDEX_op_jb_subb
] = CC_C
,
5956 [INDEX_op_jb_subw
] = CC_C
,
5957 [INDEX_op_jb_subl
] = CC_C
,
5959 [INDEX_op_jz_subb
] = CC_Z
,
5960 [INDEX_op_jz_subw
] = CC_Z
,
5961 [INDEX_op_jz_subl
] = CC_Z
,
5963 [INDEX_op_jbe_subb
] = CC_Z
| CC_C
,
5964 [INDEX_op_jbe_subw
] = CC_Z
| CC_C
,
5965 [INDEX_op_jbe_subl
] = CC_Z
| CC_C
,
5967 [INDEX_op_js_subb
] = CC_S
,
5968 [INDEX_op_js_subw
] = CC_S
,
5969 [INDEX_op_js_subl
] = CC_S
,
5971 [INDEX_op_jl_subb
] = CC_O
| CC_S
,
5972 [INDEX_op_jl_subw
] = CC_O
| CC_S
,
5973 [INDEX_op_jl_subl
] = CC_O
| CC_S
,
5975 [INDEX_op_jle_subb
] = CC_O
| CC_S
| CC_Z
,
5976 [INDEX_op_jle_subw
] = CC_O
| CC_S
| CC_Z
,
5977 [INDEX_op_jle_subl
] = CC_O
| CC_S
| CC_Z
,
5979 [INDEX_op_loopnzw
] = CC_Z
,
5980 [INDEX_op_loopnzl
] = CC_Z
,
5981 [INDEX_op_loopzw
] = CC_Z
,
5982 [INDEX_op_loopzl
] = CC_Z
,
5984 [INDEX_op_seto_T0_cc
] = CC_O
,
5985 [INDEX_op_setb_T0_cc
] = CC_C
,
5986 [INDEX_op_setz_T0_cc
] = CC_Z
,
5987 [INDEX_op_setbe_T0_cc
] = CC_Z
| CC_C
,
5988 [INDEX_op_sets_T0_cc
] = CC_S
,
5989 [INDEX_op_setp_T0_cc
] = CC_P
,
5990 [INDEX_op_setl_T0_cc
] = CC_O
| CC_S
,
5991 [INDEX_op_setle_T0_cc
] = CC_O
| CC_S
| CC_Z
,
5993 [INDEX_op_setb_T0_subb
] = CC_C
,
5994 [INDEX_op_setb_T0_subw
] = CC_C
,
5995 [INDEX_op_setb_T0_subl
] = CC_C
,
5997 [INDEX_op_setz_T0_subb
] = CC_Z
,
5998 [INDEX_op_setz_T0_subw
] = CC_Z
,
5999 [INDEX_op_setz_T0_subl
] = CC_Z
,
6001 [INDEX_op_setbe_T0_subb
] = CC_Z
| CC_C
,
6002 [INDEX_op_setbe_T0_subw
] = CC_Z
| CC_C
,
6003 [INDEX_op_setbe_T0_subl
] = CC_Z
| CC_C
,
6005 [INDEX_op_sets_T0_subb
] = CC_S
,
6006 [INDEX_op_sets_T0_subw
] = CC_S
,
6007 [INDEX_op_sets_T0_subl
] = CC_S
,
6009 [INDEX_op_setl_T0_subb
] = CC_O
| CC_S
,
6010 [INDEX_op_setl_T0_subw
] = CC_O
| CC_S
,
6011 [INDEX_op_setl_T0_subl
] = CC_O
| CC_S
,
6013 [INDEX_op_setle_T0_subb
] = CC_O
| CC_S
| CC_Z
,
6014 [INDEX_op_setle_T0_subw
] = CC_O
| CC_S
| CC_Z
,
6015 [INDEX_op_setle_T0_subl
] = CC_O
| CC_S
| CC_Z
,
6017 [INDEX_op_movl_T0_eflags
] = CC_OSZAPC
,
6018 [INDEX_op_cmc
] = CC_C
,
6019 [INDEX_op_salc
] = CC_C
,
6021 /* needed for correct flag optimisation before string ops */
6022 [INDEX_op_jnz_ecxw
] = CC_OSZAPC
,
6023 [INDEX_op_jnz_ecxl
] = CC_OSZAPC
,
6024 [INDEX_op_jz_ecxw
] = CC_OSZAPC
,
6025 [INDEX_op_jz_ecxl
] = CC_OSZAPC
,
6027 #ifdef TARGET_X86_64
6028 [INDEX_op_jb_subq
] = CC_C
,
6029 [INDEX_op_jz_subq
] = CC_Z
,
6030 [INDEX_op_jbe_subq
] = CC_Z
| CC_C
,
6031 [INDEX_op_js_subq
] = CC_S
,
6032 [INDEX_op_jl_subq
] = CC_O
| CC_S
,
6033 [INDEX_op_jle_subq
] = CC_O
| CC_S
| CC_Z
,
6035 [INDEX_op_loopnzq
] = CC_Z
,
6036 [INDEX_op_loopzq
] = CC_Z
,
6038 [INDEX_op_setb_T0_subq
] = CC_C
,
6039 [INDEX_op_setz_T0_subq
] = CC_Z
,
6040 [INDEX_op_setbe_T0_subq
] = CC_Z
| CC_C
,
6041 [INDEX_op_sets_T0_subq
] = CC_S
,
6042 [INDEX_op_setl_T0_subq
] = CC_O
| CC_S
,
6043 [INDEX_op_setle_T0_subq
] = CC_O
| CC_S
| CC_Z
,
6045 [INDEX_op_jnz_ecxq
] = CC_OSZAPC
,
6046 [INDEX_op_jz_ecxq
] = CC_OSZAPC
,
6049 #define DEF_READF(SUFFIX)\
6050 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6051 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6052 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6053 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6054 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6055 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6056 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6057 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6059 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6060 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6061 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6062 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6063 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6064 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6065 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6066 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6070 #ifndef CONFIG_USER_ONLY
6076 /* flags written by an operation */
6077 static uint16_t opc_write_flags
[NB_OPS
] = {
6078 [INDEX_op_update2_cc
] = CC_OSZAPC
,
6079 [INDEX_op_update1_cc
] = CC_OSZAPC
,
6080 [INDEX_op_cmpl_T0_T1_cc
] = CC_OSZAPC
,
6081 [INDEX_op_update_neg_cc
] = CC_OSZAPC
,
6082 /* subtle: due to the incl/decl implementation, C is used */
6083 [INDEX_op_update_inc_cc
] = CC_OSZAPC
,
6084 [INDEX_op_testl_T0_T1_cc
] = CC_OSZAPC
,
6086 [INDEX_op_mulb_AL_T0
] = CC_OSZAPC
,
6087 [INDEX_op_mulw_AX_T0
] = CC_OSZAPC
,
6088 [INDEX_op_mull_EAX_T0
] = CC_OSZAPC
,
6089 X86_64_DEF([INDEX_op_mulq_EAX_T0
] = CC_OSZAPC
,)
6090 [INDEX_op_imulb_AL_T0
] = CC_OSZAPC
,
6091 [INDEX_op_imulw_AX_T0
] = CC_OSZAPC
,
6092 [INDEX_op_imull_EAX_T0
] = CC_OSZAPC
,
6093 X86_64_DEF([INDEX_op_imulq_EAX_T0
] = CC_OSZAPC
,)
6094 [INDEX_op_imulw_T0_T1
] = CC_OSZAPC
,
6095 [INDEX_op_imull_T0_T1
] = CC_OSZAPC
,
6096 X86_64_DEF([INDEX_op_imulq_T0_T1
] = CC_OSZAPC
,)
6099 [INDEX_op_ucomiss
] = CC_OSZAPC
,
6100 [INDEX_op_ucomisd
] = CC_OSZAPC
,
6101 [INDEX_op_comiss
] = CC_OSZAPC
,
6102 [INDEX_op_comisd
] = CC_OSZAPC
,
6105 [INDEX_op_aam
] = CC_OSZAPC
,
6106 [INDEX_op_aad
] = CC_OSZAPC
,
6107 [INDEX_op_aas
] = CC_OSZAPC
,
6108 [INDEX_op_aaa
] = CC_OSZAPC
,
6109 [INDEX_op_das
] = CC_OSZAPC
,
6110 [INDEX_op_daa
] = CC_OSZAPC
,
6112 [INDEX_op_movb_eflags_T0
] = CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
,
6113 [INDEX_op_movw_eflags_T0
] = CC_OSZAPC
,
6114 [INDEX_op_movl_eflags_T0
] = CC_OSZAPC
,
6115 [INDEX_op_movw_eflags_T0_io
] = CC_OSZAPC
,
6116 [INDEX_op_movl_eflags_T0_io
] = CC_OSZAPC
,
6117 [INDEX_op_movw_eflags_T0_cpl0
] = CC_OSZAPC
,
6118 [INDEX_op_movl_eflags_T0_cpl0
] = CC_OSZAPC
,
6119 [INDEX_op_clc
] = CC_C
,
6120 [INDEX_op_stc
] = CC_C
,
6121 [INDEX_op_cmc
] = CC_C
,
6123 [INDEX_op_btw_T0_T1_cc
] = CC_OSZAPC
,
6124 [INDEX_op_btl_T0_T1_cc
] = CC_OSZAPC
,
6125 X86_64_DEF([INDEX_op_btq_T0_T1_cc
] = CC_OSZAPC
,)
6126 [INDEX_op_btsw_T0_T1_cc
] = CC_OSZAPC
,
6127 [INDEX_op_btsl_T0_T1_cc
] = CC_OSZAPC
,
6128 X86_64_DEF([INDEX_op_btsq_T0_T1_cc
] = CC_OSZAPC
,)
6129 [INDEX_op_btrw_T0_T1_cc
] = CC_OSZAPC
,
6130 [INDEX_op_btrl_T0_T1_cc
] = CC_OSZAPC
,
6131 X86_64_DEF([INDEX_op_btrq_T0_T1_cc
] = CC_OSZAPC
,)
6132 [INDEX_op_btcw_T0_T1_cc
] = CC_OSZAPC
,
6133 [INDEX_op_btcl_T0_T1_cc
] = CC_OSZAPC
,
6134 X86_64_DEF([INDEX_op_btcq_T0_T1_cc
] = CC_OSZAPC
,)
6136 [INDEX_op_bsfw_T0_cc
] = CC_OSZAPC
,
6137 [INDEX_op_bsfl_T0_cc
] = CC_OSZAPC
,
6138 X86_64_DEF([INDEX_op_bsfq_T0_cc
] = CC_OSZAPC
,)
6139 [INDEX_op_bsrw_T0_cc
] = CC_OSZAPC
,
6140 [INDEX_op_bsrl_T0_cc
] = CC_OSZAPC
,
6141 X86_64_DEF([INDEX_op_bsrq_T0_cc
] = CC_OSZAPC
,)
6143 [INDEX_op_cmpxchgb_T0_T1_EAX_cc
] = CC_OSZAPC
,
6144 [INDEX_op_cmpxchgw_T0_T1_EAX_cc
] = CC_OSZAPC
,
6145 [INDEX_op_cmpxchgl_T0_T1_EAX_cc
] = CC_OSZAPC
,
6146 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc
] = CC_OSZAPC
,)
6148 [INDEX_op_cmpxchg8b
] = CC_Z
,
6149 [INDEX_op_lar
] = CC_Z
,
6150 [INDEX_op_lsl
] = CC_Z
,
6151 [INDEX_op_verr
] = CC_Z
,
6152 [INDEX_op_verw
] = CC_Z
,
6153 [INDEX_op_fcomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6154 [INDEX_op_fucomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6156 #define DEF_WRITEF(SUFFIX)\
6157 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6158 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6159 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6160 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6161 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6162 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6163 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6164 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6166 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6167 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6168 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6169 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6170 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6171 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6172 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6173 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6175 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6176 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6177 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6178 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6179 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6180 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6181 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6182 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6184 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6185 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6186 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6187 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6189 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6190 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6191 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6192 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6194 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6195 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6196 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6197 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6199 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6200 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6201 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6202 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6203 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6204 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6206 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6207 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6208 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6209 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6210 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6211 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6213 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6214 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6215 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6216 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6221 #ifndef CONFIG_USER_ONLY
6227 /* simpler form of an operation if no flags need to be generated */
6228 static uint16_t opc_simpler
[NB_OPS
] = {
6229 [INDEX_op_update2_cc
] = INDEX_op_nop
,
6230 [INDEX_op_update1_cc
] = INDEX_op_nop
,
6231 [INDEX_op_update_neg_cc
] = INDEX_op_nop
,
6233 /* broken: CC_OP logic must be rewritten */
6234 [INDEX_op_update_inc_cc
] = INDEX_op_nop
,
6237 [INDEX_op_shlb_T0_T1_cc
] = INDEX_op_shlb_T0_T1
,
6238 [INDEX_op_shlw_T0_T1_cc
] = INDEX_op_shlw_T0_T1
,
6239 [INDEX_op_shll_T0_T1_cc
] = INDEX_op_shll_T0_T1
,
6240 X86_64_DEF([INDEX_op_shlq_T0_T1_cc
] = INDEX_op_shlq_T0_T1
,)
6242 [INDEX_op_shrb_T0_T1_cc
] = INDEX_op_shrb_T0_T1
,
6243 [INDEX_op_shrw_T0_T1_cc
] = INDEX_op_shrw_T0_T1
,
6244 [INDEX_op_shrl_T0_T1_cc
] = INDEX_op_shrl_T0_T1
,
6245 X86_64_DEF([INDEX_op_shrq_T0_T1_cc
] = INDEX_op_shrq_T0_T1
,)
6247 [INDEX_op_sarb_T0_T1_cc
] = INDEX_op_sarb_T0_T1
,
6248 [INDEX_op_sarw_T0_T1_cc
] = INDEX_op_sarw_T0_T1
,
6249 [INDEX_op_sarl_T0_T1_cc
] = INDEX_op_sarl_T0_T1
,
6250 X86_64_DEF([INDEX_op_sarq_T0_T1_cc
] = INDEX_op_sarq_T0_T1
,)
6252 #define DEF_SIMPLER(SUFFIX)\
6253 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6254 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6255 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6256 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6258 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6259 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6260 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6261 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6265 #ifndef CONFIG_USER_ONLY
6266 DEF_SIMPLER(_kernel
)
6271 void optimize_flags_init(void)
6274 /* put default values in arrays */
6275 for(i
= 0; i
< NB_OPS
; i
++) {
6276 if (opc_simpler
[i
] == 0)
6281 /* CPU flags computation optimization: we move backward thru the
6282 generated code to see which flags are needed. The operation is
6283 modified if suitable */
6284 static void optimize_flags(uint16_t *opc_buf
, int opc_buf_len
)
6287 int live_flags
, write_flags
, op
;
6289 opc_ptr
= opc_buf
+ opc_buf_len
;
6290 /* live_flags contains the flags needed by the next instructions
6291 in the code. At the end of the bloc, we consider that all the
6293 live_flags
= CC_OSZAPC
;
6294 while (opc_ptr
> opc_buf
) {
6296 /* if none of the flags written by the instruction is used,
6297 then we can try to find a simpler instruction */
6298 write_flags
= opc_write_flags
[op
];
6299 if ((live_flags
& write_flags
) == 0) {
6300 *opc_ptr
= opc_simpler
[op
];
6302 /* compute the live flags before the instruction */
6303 live_flags
&= ~write_flags
;
6304 live_flags
|= opc_read_flags
[op
];
6308 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6309 basic block 'tb'. If search_pc is TRUE, also generate PC
6310 information for each intermediate instruction. */
6311 static inline int gen_intermediate_code_internal(CPUState
*env
,
6312 TranslationBlock
*tb
,
6315 DisasContext dc1
, *dc
= &dc1
;
6316 target_ulong pc_ptr
;
6317 uint16_t *gen_opc_end
;
6318 int flags
, j
, lj
, cflags
;
6319 target_ulong pc_start
;
6320 target_ulong cs_base
;
6322 /* generate intermediate code */
6324 cs_base
= tb
->cs_base
;
6326 cflags
= tb
->cflags
;
6328 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
6329 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
6330 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
6331 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
6333 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
6334 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
6335 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
6336 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
6337 dc
->singlestep_enabled
= env
->singlestep_enabled
;
6338 dc
->cc_op
= CC_OP_DYNAMIC
;
6339 dc
->cs_base
= cs_base
;
6341 dc
->popl_esp_hack
= 0;
6342 /* select memory access functions */
6344 if (flags
& HF_SOFTMMU_MASK
) {
6346 dc
->mem_index
= 2 * 4;
6348 dc
->mem_index
= 1 * 4;
6350 dc
->cpuid_features
= env
->cpuid_features
;
6351 #ifdef TARGET_X86_64
6352 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
6353 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
6356 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
6357 (flags
& HF_INHIBIT_IRQ_MASK
)
6358 #ifndef CONFIG_SOFTMMU
6359 || (flags
& HF_SOFTMMU_MASK
)
6363 /* check addseg logic */
6364 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
6365 printf("ERROR addseg\n");
6368 gen_opc_ptr
= gen_opc_buf
;
6369 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
6370 gen_opparam_ptr
= gen_opparam_buf
;
6373 dc
->is_jmp
= DISAS_NEXT
;
6378 if (env
->nb_breakpoints
> 0) {
6379 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
6380 if (env
->breakpoints
[j
] == pc_ptr
) {
6381 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
6387 j
= gen_opc_ptr
- gen_opc_buf
;
6391 gen_opc_instr_start
[lj
++] = 0;
6393 gen_opc_pc
[lj
] = pc_ptr
;
6394 gen_opc_cc_op
[lj
] = dc
->cc_op
;
6395 gen_opc_instr_start
[lj
] = 1;
6397 pc_ptr
= disas_insn(dc
, pc_ptr
);
6398 /* stop translation if indicated */
6401 /* if single step mode, we generate only one instruction and
6402 generate an exception */
6403 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6404 the flag and abort the translation to give the irqs a
6405 change to be happen */
6406 if (dc
->tf
|| dc
->singlestep_enabled
||
6407 (flags
& HF_INHIBIT_IRQ_MASK
) ||
6408 (cflags
& CF_SINGLE_INSN
)) {
6409 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6413 /* if too long translation, stop generation too */
6414 if (gen_opc_ptr
>= gen_opc_end
||
6415 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
6416 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6421 *gen_opc_ptr
= INDEX_op_end
;
6422 /* we don't forget to fill the last values */
6424 j
= gen_opc_ptr
- gen_opc_buf
;
6427 gen_opc_instr_start
[lj
++] = 0;
6431 if (loglevel
& CPU_LOG_TB_CPU
) {
6432 cpu_dump_state(env
, logfile
, fprintf
, X86_DUMP_CCOP
);
6434 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
6436 fprintf(logfile
, "----------------\n");
6437 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
6438 #ifdef TARGET_X86_64
6443 disas_flags
= !dc
->code32
;
6444 target_disas(logfile
, pc_start
, pc_ptr
- pc_start
, disas_flags
);
6445 fprintf(logfile
, "\n");
6446 if (loglevel
& CPU_LOG_TB_OP
) {
6447 fprintf(logfile
, "OP:\n");
6448 dump_ops(gen_opc_buf
, gen_opparam_buf
);
6449 fprintf(logfile
, "\n");
6454 /* optimize flag computations */
6455 optimize_flags(gen_opc_buf
, gen_opc_ptr
- gen_opc_buf
);
6458 if (loglevel
& CPU_LOG_TB_OP_OPT
) {
6459 fprintf(logfile
, "AFTER FLAGS OPT:\n");
6460 dump_ops(gen_opc_buf
, gen_opparam_buf
);
6461 fprintf(logfile
, "\n");
6465 tb
->size
= pc_ptr
- pc_start
;
6469 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
6471 return gen_intermediate_code_internal(env
, tb
, 0);
6474 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
6476 return gen_intermediate_code_internal(env
, tb
, 1);