4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
33 /* XXX: move that elsewhere */
34 static uint16_t *gen_opc_ptr
;
35 static uint32_t *gen_opparam_ptr
;
37 #define PREFIX_REPZ 0x01
38 #define PREFIX_REPNZ 0x02
39 #define PREFIX_LOCK 0x04
40 #define PREFIX_DATA 0x08
41 #define PREFIX_ADR 0x10
43 typedef struct DisasContext
{
44 /* current insn context */
45 int override
; /* -1 if no override */
48 uint8_t *pc
; /* pc = eip + cs_base */
49 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
50 static state change (stop translation) */
51 /* current block context */
52 uint8_t *cs_base
; /* base of CS segment */
53 int pe
; /* protected mode */
54 int code32
; /* 32 bit code segment */
55 int ss32
; /* 32 bit stack segment */
56 int cc_op
; /* current CC operation */
57 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
58 int f_st
; /* currently unused */
59 int vm86
; /* vm86 mode */
62 int tf
; /* TF cpu flag */
63 int singlestep_enabled
; /* "hardware" single step enabled */
64 int jmp_opt
; /* use direct block chaining for direct jumps */
65 int mem_index
; /* select memory access functions */
66 struct TranslationBlock
*tb
;
67 int popl_esp_hack
; /* for correct popl with esp base handling */
70 static void gen_eob(DisasContext
*s
);
71 static void gen_jmp(DisasContext
*s
, unsigned int eip
);
73 /* i386 arith/logic operations */
93 OP_SHL1
, /* undocumented */
98 #define DEF(s, n, copy_size) INDEX_op_ ## s,
115 /* I386 int registers */
116 OR_EAX
, /* MUST be even numbered */
124 OR_TMP0
, /* temporary operand register */
126 OR_A0
, /* temporary register used when doing address evaluation */
127 OR_ZERO
, /* fixed zero register */
131 static GenOpFunc
*gen_op_mov_reg_T0
[3][8] = {
164 static GenOpFunc
*gen_op_mov_reg_T1
[3][8] = {
197 static GenOpFunc
*gen_op_mov_reg_A0
[2][8] = {
220 static GenOpFunc
*gen_op_mov_TN_reg
[3][2][8] =
290 static GenOpFunc
*gen_op_movl_A0_reg
[8] = {
301 static GenOpFunc
*gen_op_addl_A0_reg_sN
[4][8] = {
313 gen_op_addl_A0_EAX_s1
,
314 gen_op_addl_A0_ECX_s1
,
315 gen_op_addl_A0_EDX_s1
,
316 gen_op_addl_A0_EBX_s1
,
317 gen_op_addl_A0_ESP_s1
,
318 gen_op_addl_A0_EBP_s1
,
319 gen_op_addl_A0_ESI_s1
,
320 gen_op_addl_A0_EDI_s1
,
323 gen_op_addl_A0_EAX_s2
,
324 gen_op_addl_A0_ECX_s2
,
325 gen_op_addl_A0_EDX_s2
,
326 gen_op_addl_A0_EBX_s2
,
327 gen_op_addl_A0_ESP_s2
,
328 gen_op_addl_A0_EBP_s2
,
329 gen_op_addl_A0_ESI_s2
,
330 gen_op_addl_A0_EDI_s2
,
333 gen_op_addl_A0_EAX_s3
,
334 gen_op_addl_A0_ECX_s3
,
335 gen_op_addl_A0_EDX_s3
,
336 gen_op_addl_A0_EBX_s3
,
337 gen_op_addl_A0_ESP_s3
,
338 gen_op_addl_A0_EBP_s3
,
339 gen_op_addl_A0_ESI_s3
,
340 gen_op_addl_A0_EDI_s3
,
344 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[2][8] = {
346 gen_op_cmovw_EAX_T1_T0
,
347 gen_op_cmovw_ECX_T1_T0
,
348 gen_op_cmovw_EDX_T1_T0
,
349 gen_op_cmovw_EBX_T1_T0
,
350 gen_op_cmovw_ESP_T1_T0
,
351 gen_op_cmovw_EBP_T1_T0
,
352 gen_op_cmovw_ESI_T1_T0
,
353 gen_op_cmovw_EDI_T1_T0
,
356 gen_op_cmovl_EAX_T1_T0
,
357 gen_op_cmovl_ECX_T1_T0
,
358 gen_op_cmovl_EDX_T1_T0
,
359 gen_op_cmovl_EBX_T1_T0
,
360 gen_op_cmovl_ESP_T1_T0
,
361 gen_op_cmovl_EBP_T1_T0
,
362 gen_op_cmovl_ESI_T1_T0
,
363 gen_op_cmovl_EDI_T1_T0
,
367 static GenOpFunc
*gen_op_arith_T0_T1_cc
[8] = {
378 #define DEF_ARITHC(SUFFIX)\
380 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
381 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
384 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
385 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
388 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
389 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
392 static GenOpFunc
*gen_op_arithc_T0_T1_cc
[3][2] = {
396 static GenOpFunc
*gen_op_arithc_mem_T0_T1_cc
[9][2] = {
398 #ifndef CONFIG_USER_ONLY
404 static const int cc_op_arithb
[8] = {
415 #define DEF_CMPXCHG(SUFFIX)\
416 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
417 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
418 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,
421 static GenOpFunc
*gen_op_cmpxchg_T0_T1_EAX_cc
[3] = {
425 static GenOpFunc
*gen_op_cmpxchg_mem_T0_T1_EAX_cc
[9] = {
427 #ifndef CONFIG_USER_ONLY
433 #define DEF_SHIFT(SUFFIX)\
435 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
436 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
437 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
438 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
439 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
440 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
441 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
442 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
445 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
446 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
447 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
448 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
449 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
450 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
451 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
452 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
455 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
456 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
457 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
458 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
459 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
460 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
461 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
462 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
465 static GenOpFunc
*gen_op_shift_T0_T1_cc
[3][8] = {
469 static GenOpFunc
*gen_op_shift_mem_T0_T1_cc
[9][8] = {
471 #ifndef CONFIG_USER_ONLY
477 #define DEF_SHIFTD(SUFFIX, op)\
483 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
484 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
487 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
488 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
492 static GenOpFunc1
*gen_op_shiftd_T0_T1_im_cc
[3][2] = {
496 static GenOpFunc
*gen_op_shiftd_T0_T1_ECX_cc
[3][2] = {
500 static GenOpFunc1
*gen_op_shiftd_mem_T0_T1_im_cc
[9][2] = {
502 #ifndef CONFIG_USER_ONLY
503 DEF_SHIFTD(_kernel
, im
)
504 DEF_SHIFTD(_user
, im
)
508 static GenOpFunc
*gen_op_shiftd_mem_T0_T1_ECX_cc
[9][2] = {
509 DEF_SHIFTD(_raw
, ECX
)
510 #ifndef CONFIG_USER_ONLY
511 DEF_SHIFTD(_kernel
, ECX
)
512 DEF_SHIFTD(_user
, ECX
)
516 static GenOpFunc
*gen_op_btx_T0_T1_cc
[2][4] = {
519 gen_op_btsw_T0_T1_cc
,
520 gen_op_btrw_T0_T1_cc
,
521 gen_op_btcw_T0_T1_cc
,
525 gen_op_btsl_T0_T1_cc
,
526 gen_op_btrl_T0_T1_cc
,
527 gen_op_btcl_T0_T1_cc
,
531 static GenOpFunc
*gen_op_bsx_T0_cc
[2][2] = {
542 static GenOpFunc
*gen_op_lds_T0_A0
[3 * 3] = {
543 gen_op_ldsb_raw_T0_A0
,
544 gen_op_ldsw_raw_T0_A0
,
546 #ifndef CONFIG_USER_ONLY
547 gen_op_ldsb_kernel_T0_A0
,
548 gen_op_ldsw_kernel_T0_A0
,
551 gen_op_ldsb_user_T0_A0
,
552 gen_op_ldsw_user_T0_A0
,
557 static GenOpFunc
*gen_op_ldu_T0_A0
[3 * 3] = {
558 gen_op_ldub_raw_T0_A0
,
559 gen_op_lduw_raw_T0_A0
,
562 #ifndef CONFIG_USER_ONLY
563 gen_op_ldub_kernel_T0_A0
,
564 gen_op_lduw_kernel_T0_A0
,
567 gen_op_ldub_user_T0_A0
,
568 gen_op_lduw_user_T0_A0
,
573 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
574 static GenOpFunc
*gen_op_ld_T0_A0
[3 * 3] = {
575 gen_op_ldub_raw_T0_A0
,
576 gen_op_lduw_raw_T0_A0
,
577 gen_op_ldl_raw_T0_A0
,
579 #ifndef CONFIG_USER_ONLY
580 gen_op_ldub_kernel_T0_A0
,
581 gen_op_lduw_kernel_T0_A0
,
582 gen_op_ldl_kernel_T0_A0
,
584 gen_op_ldub_user_T0_A0
,
585 gen_op_lduw_user_T0_A0
,
586 gen_op_ldl_user_T0_A0
,
590 static GenOpFunc
*gen_op_ld_T1_A0
[3 * 3] = {
591 gen_op_ldub_raw_T1_A0
,
592 gen_op_lduw_raw_T1_A0
,
593 gen_op_ldl_raw_T1_A0
,
595 #ifndef CONFIG_USER_ONLY
596 gen_op_ldub_kernel_T1_A0
,
597 gen_op_lduw_kernel_T1_A0
,
598 gen_op_ldl_kernel_T1_A0
,
600 gen_op_ldub_user_T1_A0
,
601 gen_op_lduw_user_T1_A0
,
602 gen_op_ldl_user_T1_A0
,
606 static GenOpFunc
*gen_op_st_T0_A0
[3 * 3] = {
607 gen_op_stb_raw_T0_A0
,
608 gen_op_stw_raw_T0_A0
,
609 gen_op_stl_raw_T0_A0
,
611 #ifndef CONFIG_USER_ONLY
612 gen_op_stb_kernel_T0_A0
,
613 gen_op_stw_kernel_T0_A0
,
614 gen_op_stl_kernel_T0_A0
,
616 gen_op_stb_user_T0_A0
,
617 gen_op_stw_user_T0_A0
,
618 gen_op_stl_user_T0_A0
,
622 static GenOpFunc
*gen_op_st_T1_A0
[3 * 3] = {
624 gen_op_stw_raw_T1_A0
,
625 gen_op_stl_raw_T1_A0
,
627 #ifndef CONFIG_USER_ONLY
629 gen_op_stw_kernel_T1_A0
,
630 gen_op_stl_kernel_T1_A0
,
633 gen_op_stw_user_T1_A0
,
634 gen_op_stl_user_T1_A0
,
638 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
642 override
= s
->override
;
645 if (s
->addseg
&& override
< 0)
648 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
649 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
651 gen_op_movl_A0_reg
[R_ESI
]();
654 /* 16 address, always override */
657 gen_op_movl_A0_reg
[R_ESI
]();
658 gen_op_andl_A0_ffff();
659 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
663 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
667 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
668 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
670 gen_op_movl_A0_reg
[R_EDI
]();
673 gen_op_movl_A0_reg
[R_EDI
]();
674 gen_op_andl_A0_ffff();
675 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
679 static GenOpFunc
*gen_op_movl_T0_Dshift
[3] = {
680 gen_op_movl_T0_Dshiftb
,
681 gen_op_movl_T0_Dshiftw
,
682 gen_op_movl_T0_Dshiftl
,
685 static GenOpFunc2
*gen_op_jz_ecx
[2] = {
690 static GenOpFunc1
*gen_op_jz_ecx_im
[2] = {
695 static GenOpFunc
*gen_op_dec_ECX
[2] = {
700 static GenOpFunc1
*gen_op_string_jnz_sub
[2][3] = {
702 gen_op_string_jnz_subb
,
703 gen_op_string_jnz_subw
,
704 gen_op_string_jnz_subl
,
707 gen_op_string_jz_subb
,
708 gen_op_string_jz_subw
,
709 gen_op_string_jz_subl
,
713 static GenOpFunc1
*gen_op_string_jnz_sub_im
[2][3] = {
715 gen_op_string_jnz_subb_im
,
716 gen_op_string_jnz_subw_im
,
717 gen_op_string_jnz_subl_im
,
720 gen_op_string_jz_subb_im
,
721 gen_op_string_jz_subw_im
,
722 gen_op_string_jz_subl_im
,
726 static GenOpFunc
*gen_op_in_DX_T0
[3] = {
732 static GenOpFunc
*gen_op_out_DX_T0
[3] = {
738 static GenOpFunc
*gen_op_in
[3] = {
744 static GenOpFunc
*gen_op_out
[3] = {
750 static GenOpFunc
*gen_check_io_T0
[3] = {
756 static GenOpFunc
*gen_check_io_DX
[3] = {
762 static void gen_check_io(DisasContext
*s
, int ot
, int use_dx
, int cur_eip
)
764 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
765 if (s
->cc_op
!= CC_OP_DYNAMIC
)
766 gen_op_set_cc_op(s
->cc_op
);
767 gen_op_jmp_im(cur_eip
);
769 gen_check_io_DX
[ot
]();
771 gen_check_io_T0
[ot
]();
775 static inline void gen_movs(DisasContext
*s
, int ot
)
777 gen_string_movl_A0_ESI(s
);
778 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
779 gen_string_movl_A0_EDI(s
);
780 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
781 gen_op_movl_T0_Dshift
[ot
]();
783 gen_op_addl_ESI_T0();
784 gen_op_addl_EDI_T0();
786 gen_op_addw_ESI_T0();
787 gen_op_addw_EDI_T0();
791 static inline void gen_update_cc_op(DisasContext
*s
)
793 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
794 gen_op_set_cc_op(s
->cc_op
);
795 s
->cc_op
= CC_OP_DYNAMIC
;
799 static inline void gen_jz_ecx_string(DisasContext
*s
, unsigned int next_eip
)
802 gen_op_jz_ecx
[s
->aflag
]((long)s
->tb
, next_eip
);
804 /* XXX: does not work with gdbstub "ice" single step - not a
806 gen_op_jz_ecx_im
[s
->aflag
](next_eip
);
810 static inline void gen_stos(DisasContext
*s
, int ot
)
812 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
813 gen_string_movl_A0_EDI(s
);
814 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
815 gen_op_movl_T0_Dshift
[ot
]();
817 gen_op_addl_EDI_T0();
819 gen_op_addw_EDI_T0();
823 static inline void gen_lods(DisasContext
*s
, int ot
)
825 gen_string_movl_A0_ESI(s
);
826 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
827 gen_op_mov_reg_T0
[ot
][R_EAX
]();
828 gen_op_movl_T0_Dshift
[ot
]();
830 gen_op_addl_ESI_T0();
832 gen_op_addw_ESI_T0();
836 static inline void gen_scas(DisasContext
*s
, int ot
)
838 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
839 gen_string_movl_A0_EDI(s
);
840 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
841 gen_op_cmpl_T0_T1_cc();
842 gen_op_movl_T0_Dshift
[ot
]();
844 gen_op_addl_EDI_T0();
846 gen_op_addw_EDI_T0();
850 static inline void gen_cmps(DisasContext
*s
, int ot
)
852 gen_string_movl_A0_ESI(s
);
853 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
854 gen_string_movl_A0_EDI(s
);
855 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
856 gen_op_cmpl_T0_T1_cc();
857 gen_op_movl_T0_Dshift
[ot
]();
859 gen_op_addl_ESI_T0();
860 gen_op_addl_EDI_T0();
862 gen_op_addw_ESI_T0();
863 gen_op_addw_EDI_T0();
867 static inline void gen_ins(DisasContext
*s
, int ot
)
869 gen_op_in_DX_T0
[ot
]();
870 gen_string_movl_A0_EDI(s
);
871 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
872 gen_op_movl_T0_Dshift
[ot
]();
874 gen_op_addl_EDI_T0();
876 gen_op_addw_EDI_T0();
880 static inline void gen_outs(DisasContext
*s
, int ot
)
882 gen_string_movl_A0_ESI(s
);
883 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
884 gen_op_out_DX_T0
[ot
]();
885 gen_op_movl_T0_Dshift
[ot
]();
887 gen_op_addl_ESI_T0();
889 gen_op_addw_ESI_T0();
893 /* same method as Valgrind : we generate jumps to current or next
895 #define GEN_REPZ(op) \
896 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
897 unsigned int cur_eip, unsigned int next_eip) \
899 gen_update_cc_op(s); \
900 gen_jz_ecx_string(s, next_eip); \
902 gen_op_dec_ECX[s->aflag](); \
903 /* a loop would cause two single step exceptions if ECX = 1 \
904 before rep string_insn */ \
906 gen_op_jz_ecx_im[s->aflag](next_eip); \
907 gen_jmp(s, cur_eip); \
910 #define GEN_REPZ2(op) \
911 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
912 unsigned int cur_eip, \
913 unsigned int next_eip, \
916 gen_update_cc_op(s); \
917 gen_jz_ecx_string(s, next_eip); \
919 gen_op_dec_ECX[s->aflag](); \
920 gen_op_set_cc_op(CC_OP_SUBB + ot); \
922 gen_op_string_jnz_sub_im[nz][ot](next_eip); \
924 gen_op_string_jnz_sub[nz][ot]((long)s->tb); \
926 gen_op_jz_ecx_im[s->aflag](next_eip); \
927 gen_jmp(s, cur_eip); \
949 static GenOpFunc3
*gen_jcc_sub
[3][8] = {
981 static GenOpFunc2
*gen_op_loop
[2][4] = {
996 static GenOpFunc
*gen_setcc_slow
[8] = {
1007 static GenOpFunc
*gen_setcc_sub
[3][8] = {
1010 gen_op_setb_T0_subb
,
1011 gen_op_setz_T0_subb
,
1012 gen_op_setbe_T0_subb
,
1013 gen_op_sets_T0_subb
,
1015 gen_op_setl_T0_subb
,
1016 gen_op_setle_T0_subb
,
1020 gen_op_setb_T0_subw
,
1021 gen_op_setz_T0_subw
,
1022 gen_op_setbe_T0_subw
,
1023 gen_op_sets_T0_subw
,
1025 gen_op_setl_T0_subw
,
1026 gen_op_setle_T0_subw
,
1030 gen_op_setb_T0_subl
,
1031 gen_op_setz_T0_subl
,
1032 gen_op_setbe_T0_subl
,
1033 gen_op_sets_T0_subl
,
1035 gen_op_setl_T0_subl
,
1036 gen_op_setle_T0_subl
,
1040 static GenOpFunc
*gen_op_fp_arith_ST0_FT0
[8] = {
1041 gen_op_fadd_ST0_FT0
,
1042 gen_op_fmul_ST0_FT0
,
1043 gen_op_fcom_ST0_FT0
,
1044 gen_op_fcom_ST0_FT0
,
1045 gen_op_fsub_ST0_FT0
,
1046 gen_op_fsubr_ST0_FT0
,
1047 gen_op_fdiv_ST0_FT0
,
1048 gen_op_fdivr_ST0_FT0
,
1051 /* NOTE the exception in "r" op ordering */
1052 static GenOpFunc1
*gen_op_fp_arith_STN_ST0
[8] = {
1053 gen_op_fadd_STN_ST0
,
1054 gen_op_fmul_STN_ST0
,
1057 gen_op_fsubr_STN_ST0
,
1058 gen_op_fsub_STN_ST0
,
1059 gen_op_fdivr_STN_ST0
,
1060 gen_op_fdiv_STN_ST0
,
1063 /* if d == OR_TMP0, it means memory operand (address in A0) */
1064 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1066 GenOpFunc
*gen_update_cc
;
1069 gen_op_mov_TN_reg
[ot
][0][d
]();
1071 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1076 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1077 gen_op_set_cc_op(s1
->cc_op
);
1079 gen_op_arithc_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1080 gen_op_mov_reg_T0
[ot
][d
]();
1082 gen_op_arithc_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
- OP_ADCL
]();
1084 s1
->cc_op
= CC_OP_DYNAMIC
;
1087 gen_op_addl_T0_T1();
1088 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1089 gen_update_cc
= gen_op_update2_cc
;
1092 gen_op_subl_T0_T1();
1093 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1094 gen_update_cc
= gen_op_update2_cc
;
1100 gen_op_arith_T0_T1_cc
[op
]();
1101 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1102 gen_update_cc
= gen_op_update1_cc
;
1105 gen_op_cmpl_T0_T1_cc();
1106 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1107 gen_update_cc
= NULL
;
1110 if (op
!= OP_CMPL
) {
1112 gen_op_mov_reg_T0
[ot
][d
]();
1114 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1116 /* the flags update must happen after the memory write (precise
1117 exception support) */
1123 /* if d == OR_TMP0, it means memory operand (address in A0) */
1124 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1127 gen_op_mov_TN_reg
[ot
][0][d
]();
1129 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1130 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1131 gen_op_set_cc_op(s1
->cc_op
);
1134 s1
->cc_op
= CC_OP_INCB
+ ot
;
1137 s1
->cc_op
= CC_OP_DECB
+ ot
;
1140 gen_op_mov_reg_T0
[ot
][d
]();
1142 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1143 gen_op_update_inc_cc();
1146 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1149 gen_op_mov_TN_reg
[ot
][0][d
]();
1151 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1153 gen_op_mov_TN_reg
[ot
][1][s
]();
1154 /* for zero counts, flags are not updated, so must do it dynamically */
1155 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1156 gen_op_set_cc_op(s1
->cc_op
);
1159 gen_op_shift_T0_T1_cc
[ot
][op
]();
1161 gen_op_shift_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
]();
1163 gen_op_mov_reg_T0
[ot
][d
]();
1164 s1
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1167 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1169 /* currently not optimized */
1170 gen_op_movl_T1_im(c
);
1171 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1174 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1181 int mod
, rm
, code
, override
, must_add_seg
;
1183 override
= s
->override
;
1184 must_add_seg
= s
->addseg
;
1187 mod
= (modrm
>> 6) & 3;
1199 code
= ldub_code(s
->pc
++);
1200 scale
= (code
>> 6) & 3;
1201 index
= (code
>> 3) & 7;
1209 disp
= ldl_code(s
->pc
);
1216 disp
= (int8_t)ldub_code(s
->pc
++);
1220 disp
= ldl_code(s
->pc
);
1226 /* for correct popl handling with esp */
1227 if (base
== 4 && s
->popl_esp_hack
)
1228 disp
+= s
->popl_esp_hack
;
1229 gen_op_movl_A0_reg
[base
]();
1231 gen_op_addl_A0_im(disp
);
1233 gen_op_movl_A0_im(disp
);
1235 /* XXX: index == 4 is always invalid */
1236 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1237 gen_op_addl_A0_reg_sN
[scale
][index
]();
1241 if (base
== R_EBP
|| base
== R_ESP
)
1246 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1252 disp
= lduw_code(s
->pc
);
1254 gen_op_movl_A0_im(disp
);
1255 rm
= 0; /* avoid SS override */
1262 disp
= (int8_t)ldub_code(s
->pc
++);
1266 disp
= lduw_code(s
->pc
);
1272 gen_op_movl_A0_reg
[R_EBX
]();
1273 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1276 gen_op_movl_A0_reg
[R_EBX
]();
1277 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1280 gen_op_movl_A0_reg
[R_EBP
]();
1281 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1284 gen_op_movl_A0_reg
[R_EBP
]();
1285 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1288 gen_op_movl_A0_reg
[R_ESI
]();
1291 gen_op_movl_A0_reg
[R_EDI
]();
1294 gen_op_movl_A0_reg
[R_EBP
]();
1298 gen_op_movl_A0_reg
[R_EBX
]();
1302 gen_op_addl_A0_im(disp
);
1303 gen_op_andl_A0_ffff();
1307 if (rm
== 2 || rm
== 3 || rm
== 6)
1312 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1322 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1324 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1326 int mod
, rm
, opreg
, disp
;
1328 mod
= (modrm
>> 6) & 3;
1333 gen_op_mov_TN_reg
[ot
][0][reg
]();
1334 gen_op_mov_reg_T0
[ot
][rm
]();
1336 gen_op_mov_TN_reg
[ot
][0][rm
]();
1338 gen_op_mov_reg_T0
[ot
][reg
]();
1341 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
1344 gen_op_mov_TN_reg
[ot
][0][reg
]();
1345 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1347 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1349 gen_op_mov_reg_T0
[ot
][reg
]();
1354 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
1360 ret
= ldub_code(s
->pc
);
1364 ret
= lduw_code(s
->pc
);
1369 ret
= ldl_code(s
->pc
);
1376 static inline void gen_jcc(DisasContext
*s
, int b
, int val
, int next_eip
)
1378 TranslationBlock
*tb
;
1383 jcc_op
= (b
>> 1) & 7;
1387 /* we optimize the cmp/jcc case */
1391 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1394 /* some jumps are easy to compute */
1421 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 3][jcc_op
];
1424 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 3][jcc_op
];
1436 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1437 gen_op_set_cc_op(s
->cc_op
);
1440 gen_setcc_slow
[jcc_op
]();
1446 func((long)tb
, val
, next_eip
);
1448 func((long)tb
, next_eip
, val
);
1452 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1453 gen_op_set_cc_op(s
->cc_op
);
1454 s
->cc_op
= CC_OP_DYNAMIC
;
1456 gen_setcc_slow
[jcc_op
]();
1458 gen_op_jcc_im(val
, next_eip
);
1460 gen_op_jcc_im(next_eip
, val
);
1466 static void gen_setcc(DisasContext
*s
, int b
)
1472 jcc_op
= (b
>> 1) & 7;
1474 /* we optimize the cmp/jcc case */
1478 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1483 /* some jumps are easy to compute */
1501 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 3][jcc_op
];
1504 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 3][jcc_op
];
1512 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1513 gen_op_set_cc_op(s
->cc_op
);
1514 func
= gen_setcc_slow
[jcc_op
];
1523 /* move T0 to seg_reg and compute if the CPU state may change. Never
1524 call this function with seg_reg == R_CS */
1525 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, unsigned int cur_eip
)
1527 if (s
->pe
&& !s
->vm86
) {
1528 /* XXX: optimize by finding processor state dynamically */
1529 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1530 gen_op_set_cc_op(s
->cc_op
);
1531 gen_op_jmp_im(cur_eip
);
1532 gen_op_movl_seg_T0(seg_reg
);
1534 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
1536 /* abort translation because the register may have a non zero base
1537 or because ss32 may change. For R_SS, translation must always
1538 stop as a special handling must be done to disable hardware
1539 interrupts for the next instruction */
1540 if (seg_reg
== R_SS
|| (!s
->addseg
&& seg_reg
< R_FS
))
1544 static inline void gen_stack_update(DisasContext
*s
, int addend
)
1548 gen_op_addl_ESP_2();
1549 else if (addend
== 4)
1550 gen_op_addl_ESP_4();
1552 gen_op_addl_ESP_im(addend
);
1555 gen_op_addw_ESP_2();
1556 else if (addend
== 4)
1557 gen_op_addw_ESP_4();
1559 gen_op_addw_ESP_im(addend
);
1563 /* generate a push. It depends on ss32, addseg and dflag */
1564 static void gen_push_T0(DisasContext
*s
)
1566 gen_op_movl_A0_reg
[R_ESP
]();
1573 gen_op_movl_T1_A0();
1574 gen_op_addl_A0_SS();
1577 gen_op_andl_A0_ffff();
1578 gen_op_movl_T1_A0();
1579 gen_op_addl_A0_SS();
1581 gen_op_st_T0_A0
[s
->dflag
+ 1 + s
->mem_index
]();
1582 if (s
->ss32
&& !s
->addseg
)
1583 gen_op_movl_ESP_A0();
1585 gen_op_mov_reg_T1
[s
->ss32
+ 1][R_ESP
]();
1588 /* generate a push. It depends on ss32, addseg and dflag */
1589 /* slower version for T1, only used for call Ev */
1590 static void gen_push_T1(DisasContext
*s
)
1592 gen_op_movl_A0_reg
[R_ESP
]();
1599 gen_op_addl_A0_SS();
1602 gen_op_andl_A0_ffff();
1603 gen_op_addl_A0_SS();
1605 gen_op_st_T1_A0
[s
->dflag
+ 1 + s
->mem_index
]();
1607 if (s
->ss32
&& !s
->addseg
)
1608 gen_op_movl_ESP_A0();
1610 gen_stack_update(s
, (-2) << s
->dflag
);
1613 /* two step pop is necessary for precise exceptions */
1614 static void gen_pop_T0(DisasContext
*s
)
1616 gen_op_movl_A0_reg
[R_ESP
]();
1619 gen_op_addl_A0_SS();
1621 gen_op_andl_A0_ffff();
1622 gen_op_addl_A0_SS();
1624 gen_op_ld_T0_A0
[s
->dflag
+ 1 + s
->mem_index
]();
1627 static void gen_pop_update(DisasContext
*s
)
1629 gen_stack_update(s
, 2 << s
->dflag
);
1632 static void gen_stack_A0(DisasContext
*s
)
1634 gen_op_movl_A0_ESP();
1636 gen_op_andl_A0_ffff();
1637 gen_op_movl_T1_A0();
1639 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
1642 /* NOTE: wrap around in 16 bit not fully handled */
1643 static void gen_pusha(DisasContext
*s
)
1646 gen_op_movl_A0_ESP();
1647 gen_op_addl_A0_im(-16 << s
->dflag
);
1649 gen_op_andl_A0_ffff();
1650 gen_op_movl_T1_A0();
1652 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
1653 for(i
= 0;i
< 8; i
++) {
1654 gen_op_mov_TN_reg
[OT_LONG
][0][7 - i
]();
1655 gen_op_st_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
1656 gen_op_addl_A0_im(2 << s
->dflag
);
1658 gen_op_mov_reg_T1
[OT_WORD
+ s
->dflag
][R_ESP
]();
1661 /* NOTE: wrap around in 16 bit not fully handled */
1662 static void gen_popa(DisasContext
*s
)
1665 gen_op_movl_A0_ESP();
1667 gen_op_andl_A0_ffff();
1668 gen_op_movl_T1_A0();
1669 gen_op_addl_T1_im(16 << s
->dflag
);
1671 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
1672 for(i
= 0;i
< 8; i
++) {
1673 /* ESP is not reloaded */
1675 gen_op_ld_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
1676 gen_op_mov_reg_T0
[OT_WORD
+ s
->dflag
][7 - i
]();
1678 gen_op_addl_A0_im(2 << s
->dflag
);
1680 gen_op_mov_reg_T1
[OT_WORD
+ s
->dflag
][R_ESP
]();
1683 /* NOTE: wrap around in 16 bit not fully handled */
1684 /* XXX: check this */
1685 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
1687 int ot
, level1
, addend
, opsize
;
1689 ot
= s
->dflag
+ OT_WORD
;
1692 opsize
= 2 << s
->dflag
;
1694 gen_op_movl_A0_ESP();
1695 gen_op_addl_A0_im(-opsize
);
1697 gen_op_andl_A0_ffff();
1698 gen_op_movl_T1_A0();
1700 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
1702 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
1703 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1706 gen_op_addl_A0_im(-opsize
);
1707 gen_op_addl_T0_im(-opsize
);
1708 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1710 gen_op_addl_A0_im(-opsize
);
1711 gen_op_st_T1_A0
[ot
+ s
->mem_index
]();
1713 gen_op_mov_reg_T1
[ot
][R_EBP
]();
1714 addend
= -esp_addend
;
1716 addend
-= opsize
* (level1
+ 1);
1717 gen_op_addl_T1_im(addend
);
1718 gen_op_mov_reg_T1
[ot
][R_ESP
]();
1721 static void gen_exception(DisasContext
*s
, int trapno
, unsigned int cur_eip
)
1723 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1724 gen_op_set_cc_op(s
->cc_op
);
1725 gen_op_jmp_im(cur_eip
);
1726 gen_op_raise_exception(trapno
);
1730 /* an interrupt is different from an exception because of the
1731 priviledge checks */
1732 static void gen_interrupt(DisasContext
*s
, int intno
,
1733 unsigned int cur_eip
, unsigned int next_eip
)
1735 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1736 gen_op_set_cc_op(s
->cc_op
);
1737 gen_op_jmp_im(cur_eip
);
1738 gen_op_raise_interrupt(intno
, next_eip
);
1742 static void gen_debug(DisasContext
*s
, unsigned int cur_eip
)
1744 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1745 gen_op_set_cc_op(s
->cc_op
);
1746 gen_op_jmp_im(cur_eip
);
1751 /* generate a generic end of block. Trace exception is also generated
1753 static void gen_eob(DisasContext
*s
)
1755 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1756 gen_op_set_cc_op(s
->cc_op
);
1757 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
1758 gen_op_reset_inhibit_irq();
1760 if (s
->singlestep_enabled
) {
1763 gen_op_raise_exception(EXCP01_SSTP
);
1771 /* generate a jump to eip. No segment change must happen before as a
1772 direct call to the next block may occur */
1773 static void gen_jmp(DisasContext
*s
, unsigned int eip
)
1775 TranslationBlock
*tb
= s
->tb
;
1778 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1779 gen_op_set_cc_op(s
->cc_op
);
1780 gen_op_jmp((long)tb
, eip
);
1788 /* convert one instruction. s->is_jmp is set if the translation must
1789 be stopped. Return the next pc value */
1790 static uint8_t *disas_insn(DisasContext
*s
, uint8_t *pc_start
)
1792 int b
, prefixes
, aflag
, dflag
;
1794 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
1795 unsigned int next_eip
;
1803 b
= ldub_code(s
->pc
);
1805 /* check prefixes */
1808 prefixes
|= PREFIX_REPZ
;
1811 prefixes
|= PREFIX_REPNZ
;
1814 prefixes
|= PREFIX_LOCK
;
1835 prefixes
|= PREFIX_DATA
;
1838 prefixes
|= PREFIX_ADR
;
1842 if (prefixes
& PREFIX_DATA
)
1844 if (prefixes
& PREFIX_ADR
)
1847 s
->prefix
= prefixes
;
1851 /* lock generation */
1852 if (prefixes
& PREFIX_LOCK
)
1855 /* now check op code */
1859 /**************************/
1860 /* extended op code */
1861 b
= ldub_code(s
->pc
++) | 0x100;
1864 /**************************/
1882 ot
= dflag
? OT_LONG
: OT_WORD
;
1885 case 0: /* OP Ev, Gv */
1886 modrm
= ldub_code(s
->pc
++);
1887 reg
= ((modrm
>> 3) & 7);
1888 mod
= (modrm
>> 6) & 3;
1891 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
1893 } else if (op
== OP_XORL
&& rm
== reg
) {
1895 /* xor reg, reg optimisation */
1897 s
->cc_op
= CC_OP_LOGICB
+ ot
;
1898 gen_op_mov_reg_T0
[ot
][reg
]();
1899 gen_op_update1_cc();
1904 gen_op_mov_TN_reg
[ot
][1][reg
]();
1905 gen_op(s
, op
, ot
, opreg
);
1907 case 1: /* OP Gv, Ev */
1908 modrm
= ldub_code(s
->pc
++);
1909 mod
= (modrm
>> 6) & 3;
1910 reg
= ((modrm
>> 3) & 7);
1913 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
1914 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
1915 } else if (op
== OP_XORL
&& rm
== reg
) {
1918 gen_op_mov_TN_reg
[ot
][1][rm
]();
1920 gen_op(s
, op
, ot
, reg
);
1922 case 2: /* OP A, Iv */
1923 val
= insn_get(s
, ot
);
1924 gen_op_movl_T1_im(val
);
1925 gen_op(s
, op
, ot
, OR_EAX
);
1931 case 0x80: /* GRP1 */
1940 ot
= dflag
? OT_LONG
: OT_WORD
;
1942 modrm
= ldub_code(s
->pc
++);
1943 mod
= (modrm
>> 6) & 3;
1945 op
= (modrm
>> 3) & 7;
1948 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
1951 opreg
= rm
+ OR_EAX
;
1958 val
= insn_get(s
, ot
);
1961 val
= (int8_t)insn_get(s
, OT_BYTE
);
1964 gen_op_movl_T1_im(val
);
1965 gen_op(s
, op
, ot
, opreg
);
1969 /**************************/
1970 /* inc, dec, and other misc arith */
1971 case 0x40 ... 0x47: /* inc Gv */
1972 ot
= dflag
? OT_LONG
: OT_WORD
;
1973 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
1975 case 0x48 ... 0x4f: /* dec Gv */
1976 ot
= dflag
? OT_LONG
: OT_WORD
;
1977 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
1979 case 0xf6: /* GRP3 */
1984 ot
= dflag
? OT_LONG
: OT_WORD
;
1986 modrm
= ldub_code(s
->pc
++);
1987 mod
= (modrm
>> 6) & 3;
1989 op
= (modrm
>> 3) & 7;
1991 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
1992 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1994 gen_op_mov_TN_reg
[ot
][0][rm
]();
1999 val
= insn_get(s
, ot
);
2000 gen_op_movl_T1_im(val
);
2001 gen_op_testl_T0_T1_cc();
2002 s
->cc_op
= CC_OP_LOGICB
+ ot
;
2007 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2009 gen_op_mov_reg_T0
[ot
][rm
]();
2015 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2017 gen_op_mov_reg_T0
[ot
][rm
]();
2019 gen_op_update_neg_cc();
2020 s
->cc_op
= CC_OP_SUBB
+ ot
;
2025 gen_op_mulb_AL_T0();
2026 s
->cc_op
= CC_OP_MULB
;
2029 gen_op_mulw_AX_T0();
2030 s
->cc_op
= CC_OP_MULW
;
2034 gen_op_mull_EAX_T0();
2035 s
->cc_op
= CC_OP_MULL
;
2042 gen_op_imulb_AL_T0();
2043 s
->cc_op
= CC_OP_MULB
;
2046 gen_op_imulw_AX_T0();
2047 s
->cc_op
= CC_OP_MULW
;
2051 gen_op_imull_EAX_T0();
2052 s
->cc_op
= CC_OP_MULL
;
2059 gen_op_divb_AL_T0(pc_start
- s
->cs_base
);
2062 gen_op_divw_AX_T0(pc_start
- s
->cs_base
);
2066 gen_op_divl_EAX_T0(pc_start
- s
->cs_base
);
2073 gen_op_idivb_AL_T0(pc_start
- s
->cs_base
);
2076 gen_op_idivw_AX_T0(pc_start
- s
->cs_base
);
2080 gen_op_idivl_EAX_T0(pc_start
- s
->cs_base
);
2089 case 0xfe: /* GRP4 */
2090 case 0xff: /* GRP5 */
2094 ot
= dflag
? OT_LONG
: OT_WORD
;
2096 modrm
= ldub_code(s
->pc
++);
2097 mod
= (modrm
>> 6) & 3;
2099 op
= (modrm
>> 3) & 7;
2100 if (op
>= 2 && b
== 0xfe) {
2104 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2105 if (op
>= 2 && op
!= 3 && op
!= 5)
2106 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
2108 gen_op_mov_TN_reg
[ot
][0][rm
]();
2112 case 0: /* inc Ev */
2117 gen_inc(s
, ot
, opreg
, 1);
2119 case 1: /* dec Ev */
2124 gen_inc(s
, ot
, opreg
, -1);
2126 case 2: /* call Ev */
2127 /* XXX: optimize if memory (no 'and' is necessary) */
2129 gen_op_andl_T0_ffff();
2130 next_eip
= s
->pc
- s
->cs_base
;
2131 gen_op_movl_T1_im(next_eip
);
2136 case 3: /* lcall Ev */
2137 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2138 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
2139 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
2141 if (s
->pe
&& !s
->vm86
) {
2142 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2143 gen_op_set_cc_op(s
->cc_op
);
2144 gen_op_jmp_im(pc_start
- s
->cs_base
);
2145 gen_op_lcall_protected_T0_T1(dflag
, s
->pc
- s
->cs_base
);
2147 gen_op_lcall_real_T0_T1(dflag
, s
->pc
- s
->cs_base
);
2151 case 4: /* jmp Ev */
2153 gen_op_andl_T0_ffff();
2157 case 5: /* ljmp Ev */
2158 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2159 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
2160 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
2162 if (s
->pe
&& !s
->vm86
) {
2163 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2164 gen_op_set_cc_op(s
->cc_op
);
2165 gen_op_jmp_im(pc_start
- s
->cs_base
);
2166 gen_op_ljmp_protected_T0_T1();
2168 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
2169 gen_op_movl_T0_T1();
2174 case 6: /* push Ev */
2182 case 0x84: /* test Ev, Gv */
2187 ot
= dflag
? OT_LONG
: OT_WORD
;
2189 modrm
= ldub_code(s
->pc
++);
2190 mod
= (modrm
>> 6) & 3;
2192 reg
= (modrm
>> 3) & 7;
2194 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
2195 gen_op_mov_TN_reg
[ot
][1][reg
+ OR_EAX
]();
2196 gen_op_testl_T0_T1_cc();
2197 s
->cc_op
= CC_OP_LOGICB
+ ot
;
2200 case 0xa8: /* test eAX, Iv */
2205 ot
= dflag
? OT_LONG
: OT_WORD
;
2206 val
= insn_get(s
, ot
);
2208 gen_op_mov_TN_reg
[ot
][0][OR_EAX
]();
2209 gen_op_movl_T1_im(val
);
2210 gen_op_testl_T0_T1_cc();
2211 s
->cc_op
= CC_OP_LOGICB
+ ot
;
2214 case 0x98: /* CWDE/CBW */
2216 gen_op_movswl_EAX_AX();
2218 gen_op_movsbw_AX_AL();
2220 case 0x99: /* CDQ/CWD */
2222 gen_op_movslq_EDX_EAX();
2224 gen_op_movswl_DX_AX();
2226 case 0x1af: /* imul Gv, Ev */
2227 case 0x69: /* imul Gv, Ev, I */
2229 ot
= dflag
? OT_LONG
: OT_WORD
;
2230 modrm
= ldub_code(s
->pc
++);
2231 reg
= ((modrm
>> 3) & 7) + OR_EAX
;
2232 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
2234 val
= insn_get(s
, ot
);
2235 gen_op_movl_T1_im(val
);
2236 } else if (b
== 0x6b) {
2237 val
= insn_get(s
, OT_BYTE
);
2238 gen_op_movl_T1_im(val
);
2240 gen_op_mov_TN_reg
[ot
][1][reg
]();
2243 if (ot
== OT_LONG
) {
2244 gen_op_imull_T0_T1();
2246 gen_op_imulw_T0_T1();
2248 gen_op_mov_reg_T0
[ot
][reg
]();
2249 s
->cc_op
= CC_OP_MULB
+ ot
;
2252 case 0x1c1: /* xadd Ev, Gv */
2256 ot
= dflag
? OT_LONG
: OT_WORD
;
2257 modrm
= ldub_code(s
->pc
++);
2258 reg
= (modrm
>> 3) & 7;
2259 mod
= (modrm
>> 6) & 3;
2262 gen_op_mov_TN_reg
[ot
][0][reg
]();
2263 gen_op_mov_TN_reg
[ot
][1][rm
]();
2264 gen_op_addl_T0_T1();
2265 gen_op_mov_reg_T1
[ot
][reg
]();
2266 gen_op_mov_reg_T0
[ot
][rm
]();
2268 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2269 gen_op_mov_TN_reg
[ot
][0][reg
]();
2270 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2271 gen_op_addl_T0_T1();
2272 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2273 gen_op_mov_reg_T1
[ot
][reg
]();
2275 gen_op_update2_cc();
2276 s
->cc_op
= CC_OP_ADDB
+ ot
;
2279 case 0x1b1: /* cmpxchg Ev, Gv */
2283 ot
= dflag
? OT_LONG
: OT_WORD
;
2284 modrm
= ldub_code(s
->pc
++);
2285 reg
= (modrm
>> 3) & 7;
2286 mod
= (modrm
>> 6) & 3;
2287 gen_op_mov_TN_reg
[ot
][1][reg
]();
2290 gen_op_mov_TN_reg
[ot
][0][rm
]();
2291 gen_op_cmpxchg_T0_T1_EAX_cc
[ot
]();
2292 gen_op_mov_reg_T0
[ot
][rm
]();
2294 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2295 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
2296 gen_op_cmpxchg_mem_T0_T1_EAX_cc
[ot
+ s
->mem_index
]();
2298 s
->cc_op
= CC_OP_SUBB
+ ot
;
2300 case 0x1c7: /* cmpxchg8b */
2301 modrm
= ldub_code(s
->pc
++);
2302 mod
= (modrm
>> 6) & 3;
2305 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2306 gen_op_set_cc_op(s
->cc_op
);
2307 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2309 s
->cc_op
= CC_OP_EFLAGS
;
2312 /**************************/
2314 case 0x50 ... 0x57: /* push */
2315 gen_op_mov_TN_reg
[OT_LONG
][0][b
& 7]();
2318 case 0x58 ... 0x5f: /* pop */
2319 ot
= dflag
? OT_LONG
: OT_WORD
;
2321 /* NOTE: order is important for pop %sp */
2323 gen_op_mov_reg_T0
[ot
][b
& 7]();
2325 case 0x60: /* pusha */
2328 case 0x61: /* popa */
2331 case 0x68: /* push Iv */
2333 ot
= dflag
? OT_LONG
: OT_WORD
;
2335 val
= insn_get(s
, ot
);
2337 val
= (int8_t)insn_get(s
, OT_BYTE
);
2338 gen_op_movl_T0_im(val
);
2341 case 0x8f: /* pop Ev */
2342 ot
= dflag
? OT_LONG
: OT_WORD
;
2343 modrm
= ldub_code(s
->pc
++);
2344 mod
= (modrm
>> 6) & 3;
2347 /* NOTE: order is important for pop %sp */
2350 gen_op_mov_reg_T0
[ot
][rm
]();
2352 /* NOTE: order is important too for MMU exceptions */
2353 s
->popl_esp_hack
= 2 << dflag
;
2354 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
2355 s
->popl_esp_hack
= 0;
2359 case 0xc8: /* enter */
2362 val
= lduw_code(s
->pc
);
2364 level
= ldub_code(s
->pc
++);
2365 gen_enter(s
, val
, level
);
2368 case 0xc9: /* leave */
2369 /* XXX: exception not precise (ESP is updated before potential exception) */
2371 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
2372 gen_op_mov_reg_T0
[OT_LONG
][R_ESP
]();
2374 gen_op_mov_TN_reg
[OT_WORD
][0][R_EBP
]();
2375 gen_op_mov_reg_T0
[OT_WORD
][R_ESP
]();
2378 ot
= dflag
? OT_LONG
: OT_WORD
;
2379 gen_op_mov_reg_T0
[ot
][R_EBP
]();
2382 case 0x06: /* push es */
2383 case 0x0e: /* push cs */
2384 case 0x16: /* push ss */
2385 case 0x1e: /* push ds */
2386 gen_op_movl_T0_seg(b
>> 3);
2389 case 0x1a0: /* push fs */
2390 case 0x1a8: /* push gs */
2391 gen_op_movl_T0_seg((b
>> 3) & 7);
2394 case 0x07: /* pop es */
2395 case 0x17: /* pop ss */
2396 case 0x1f: /* pop ds */
2399 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
2402 /* if reg == SS, inhibit interrupts/trace. */
2403 /* If several instructions disable interrupts, only the
2405 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
2406 gen_op_set_inhibit_irq();
2410 gen_op_jmp_im(s
->pc
- s
->cs_base
);
2414 case 0x1a1: /* pop fs */
2415 case 0x1a9: /* pop gs */
2417 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
2420 gen_op_jmp_im(s
->pc
- s
->cs_base
);
2425 /**************************/
2428 case 0x89: /* mov Gv, Ev */
2432 ot
= dflag
? OT_LONG
: OT_WORD
;
2433 modrm
= ldub_code(s
->pc
++);
2434 reg
= (modrm
>> 3) & 7;
2436 /* generate a generic store */
2437 gen_ldst_modrm(s
, modrm
, ot
, OR_EAX
+ reg
, 1);
2440 case 0xc7: /* mov Ev, Iv */
2444 ot
= dflag
? OT_LONG
: OT_WORD
;
2445 modrm
= ldub_code(s
->pc
++);
2446 mod
= (modrm
>> 6) & 3;
2448 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2449 val
= insn_get(s
, ot
);
2450 gen_op_movl_T0_im(val
);
2452 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2454 gen_op_mov_reg_T0
[ot
][modrm
& 7]();
2457 case 0x8b: /* mov Ev, Gv */
2461 ot
= dflag
? OT_LONG
: OT_WORD
;
2462 modrm
= ldub_code(s
->pc
++);
2463 reg
= (modrm
>> 3) & 7;
2465 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
2466 gen_op_mov_reg_T0
[ot
][reg
]();
2468 case 0x8e: /* mov seg, Gv */
2469 modrm
= ldub_code(s
->pc
++);
2470 reg
= (modrm
>> 3) & 7;
2471 if (reg
>= 6 || reg
== R_CS
)
2473 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
2474 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
2476 /* if reg == SS, inhibit interrupts/trace */
2477 /* If several instructions disable interrupts, only the
2479 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
2480 gen_op_set_inhibit_irq();
2484 gen_op_jmp_im(s
->pc
- s
->cs_base
);
2488 case 0x8c: /* mov Gv, seg */
2489 modrm
= ldub_code(s
->pc
++);
2490 reg
= (modrm
>> 3) & 7;
2491 mod
= (modrm
>> 6) & 3;
2494 gen_op_movl_T0_seg(reg
);
2496 if (mod
== 3 && dflag
)
2498 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
2501 case 0x1b6: /* movzbS Gv, Eb */
2502 case 0x1b7: /* movzwS Gv, Eb */
2503 case 0x1be: /* movsbS Gv, Eb */
2504 case 0x1bf: /* movswS Gv, Eb */
2507 /* d_ot is the size of destination */
2508 d_ot
= dflag
+ OT_WORD
;
2509 /* ot is the size of source */
2510 ot
= (b
& 1) + OT_BYTE
;
2511 modrm
= ldub_code(s
->pc
++);
2512 reg
= ((modrm
>> 3) & 7) + OR_EAX
;
2513 mod
= (modrm
>> 6) & 3;
2517 gen_op_mov_TN_reg
[ot
][0][rm
]();
2518 switch(ot
| (b
& 8)) {
2520 gen_op_movzbl_T0_T0();
2523 gen_op_movsbl_T0_T0();
2526 gen_op_movzwl_T0_T0();
2530 gen_op_movswl_T0_T0();
2533 gen_op_mov_reg_T0
[d_ot
][reg
]();
2535 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2537 gen_op_lds_T0_A0
[ot
+ s
->mem_index
]();
2539 gen_op_ldu_T0_A0
[ot
+ s
->mem_index
]();
2541 gen_op_mov_reg_T0
[d_ot
][reg
]();
2546 case 0x8d: /* lea */
2547 ot
= dflag
? OT_LONG
: OT_WORD
;
2548 modrm
= ldub_code(s
->pc
++);
2549 reg
= (modrm
>> 3) & 7;
2550 /* we must ensure that no segment is added */
2554 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2556 gen_op_mov_reg_A0
[ot
- OT_WORD
][reg
]();
2559 case 0xa0: /* mov EAX, Ov */
2561 case 0xa2: /* mov Ov, EAX */
2566 ot
= dflag
? OT_LONG
: OT_WORD
;
2568 offset_addr
= insn_get(s
, OT_LONG
);
2570 offset_addr
= insn_get(s
, OT_WORD
);
2571 gen_op_movl_A0_im(offset_addr
);
2572 /* handle override */
2574 int override
, must_add_seg
;
2575 must_add_seg
= s
->addseg
;
2576 if (s
->override
>= 0) {
2577 override
= s
->override
;
2583 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
2587 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
2588 gen_op_mov_reg_T0
[ot
][R_EAX
]();
2590 gen_op_mov_TN_reg
[ot
][0][R_EAX
]();
2591 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2594 case 0xd7: /* xlat */
2595 gen_op_movl_A0_reg
[R_EBX
]();
2596 gen_op_addl_A0_AL();
2598 gen_op_andl_A0_ffff();
2599 /* handle override */
2601 int override
, must_add_seg
;
2602 must_add_seg
= s
->addseg
;
2604 if (s
->override
>= 0) {
2605 override
= s
->override
;
2611 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
2614 gen_op_ldu_T0_A0
[OT_BYTE
+ s
->mem_index
]();
2615 gen_op_mov_reg_T0
[OT_BYTE
][R_EAX
]();
2617 case 0xb0 ... 0xb7: /* mov R, Ib */
2618 val
= insn_get(s
, OT_BYTE
);
2619 gen_op_movl_T0_im(val
);
2620 gen_op_mov_reg_T0
[OT_BYTE
][b
& 7]();
2622 case 0xb8 ... 0xbf: /* mov R, Iv */
2623 ot
= dflag
? OT_LONG
: OT_WORD
;
2624 val
= insn_get(s
, ot
);
2625 reg
= OR_EAX
+ (b
& 7);
2626 gen_op_movl_T0_im(val
);
2627 gen_op_mov_reg_T0
[ot
][reg
]();
2630 case 0x91 ... 0x97: /* xchg R, EAX */
2631 ot
= dflag
? OT_LONG
: OT_WORD
;
2636 case 0x87: /* xchg Ev, Gv */
2640 ot
= dflag
? OT_LONG
: OT_WORD
;
2641 modrm
= ldub_code(s
->pc
++);
2642 reg
= (modrm
>> 3) & 7;
2643 mod
= (modrm
>> 6) & 3;
2647 gen_op_mov_TN_reg
[ot
][0][reg
]();
2648 gen_op_mov_TN_reg
[ot
][1][rm
]();
2649 gen_op_mov_reg_T0
[ot
][rm
]();
2650 gen_op_mov_reg_T1
[ot
][reg
]();
2652 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2653 gen_op_mov_TN_reg
[ot
][0][reg
]();
2654 /* for xchg, lock is implicit */
2655 if (!(prefixes
& PREFIX_LOCK
))
2657 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2658 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2659 if (!(prefixes
& PREFIX_LOCK
))
2661 gen_op_mov_reg_T1
[ot
][reg
]();
2664 case 0xc4: /* les Gv */
2667 case 0xc5: /* lds Gv */
2670 case 0x1b2: /* lss Gv */
2673 case 0x1b4: /* lfs Gv */
2676 case 0x1b5: /* lgs Gv */
2679 ot
= dflag
? OT_LONG
: OT_WORD
;
2680 modrm
= ldub_code(s
->pc
++);
2681 reg
= (modrm
>> 3) & 7;
2682 mod
= (modrm
>> 6) & 3;
2685 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2686 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2687 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
2688 /* load the segment first to handle exceptions properly */
2689 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
2690 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
2691 /* then put the data */
2692 gen_op_mov_reg_T1
[ot
][reg
]();
2694 gen_op_jmp_im(s
->pc
- s
->cs_base
);
2699 /************************/
2710 ot
= dflag
? OT_LONG
: OT_WORD
;
2712 modrm
= ldub_code(s
->pc
++);
2713 mod
= (modrm
>> 6) & 3;
2715 op
= (modrm
>> 3) & 7;
2718 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2721 opreg
= rm
+ OR_EAX
;
2726 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
2729 shift
= ldub_code(s
->pc
++);
2731 gen_shifti(s
, op
, ot
, opreg
, shift
);
2746 case 0x1a4: /* shld imm */
2750 case 0x1a5: /* shld cl */
2754 case 0x1ac: /* shrd imm */
2758 case 0x1ad: /* shrd cl */
2762 ot
= dflag
? OT_LONG
: OT_WORD
;
2763 modrm
= ldub_code(s
->pc
++);
2764 mod
= (modrm
>> 6) & 3;
2766 reg
= (modrm
>> 3) & 7;
2769 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2770 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
2772 gen_op_mov_TN_reg
[ot
][0][rm
]();
2774 gen_op_mov_TN_reg
[ot
][1][reg
]();
2777 val
= ldub_code(s
->pc
++);
2781 gen_op_shiftd_T0_T1_im_cc
[ot
][op
](val
);
2783 gen_op_shiftd_mem_T0_T1_im_cc
[ot
+ s
->mem_index
][op
](val
);
2784 if (op
== 0 && ot
!= OT_WORD
)
2785 s
->cc_op
= CC_OP_SHLB
+ ot
;
2787 s
->cc_op
= CC_OP_SARB
+ ot
;
2790 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2791 gen_op_set_cc_op(s
->cc_op
);
2793 gen_op_shiftd_T0_T1_ECX_cc
[ot
][op
]();
2795 gen_op_shiftd_mem_T0_T1_ECX_cc
[ot
+ s
->mem_index
][op
]();
2796 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
2799 gen_op_mov_reg_T0
[ot
][rm
]();
2803 /************************/
2806 modrm
= ldub_code(s
->pc
++);
2807 mod
= (modrm
>> 6) & 3;
2809 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
2813 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2815 case 0x00 ... 0x07: /* fxxxs */
2816 case 0x10 ... 0x17: /* fixxxl */
2817 case 0x20 ... 0x27: /* fxxxl */
2818 case 0x30 ... 0x37: /* fixxx */
2825 gen_op_flds_FT0_A0();
2828 gen_op_fildl_FT0_A0();
2831 gen_op_fldl_FT0_A0();
2835 gen_op_fild_FT0_A0();
2839 gen_op_fp_arith_ST0_FT0
[op1
]();
2841 /* fcomp needs pop */
2846 case 0x08: /* flds */
2847 case 0x0a: /* fsts */
2848 case 0x0b: /* fstps */
2849 case 0x18: /* fildl */
2850 case 0x1a: /* fistl */
2851 case 0x1b: /* fistpl */
2852 case 0x28: /* fldl */
2853 case 0x2a: /* fstl */
2854 case 0x2b: /* fstpl */
2855 case 0x38: /* filds */
2856 case 0x3a: /* fists */
2857 case 0x3b: /* fistps */
2863 gen_op_flds_ST0_A0();
2866 gen_op_fildl_ST0_A0();
2869 gen_op_fldl_ST0_A0();
2873 gen_op_fild_ST0_A0();
2880 gen_op_fsts_ST0_A0();
2883 gen_op_fistl_ST0_A0();
2886 gen_op_fstl_ST0_A0();
2890 gen_op_fist_ST0_A0();
2898 case 0x0c: /* fldenv mem */
2899 gen_op_fldenv_A0(s
->dflag
);
2901 case 0x0d: /* fldcw mem */
2904 case 0x0e: /* fnstenv mem */
2905 gen_op_fnstenv_A0(s
->dflag
);
2907 case 0x0f: /* fnstcw mem */
2910 case 0x1d: /* fldt mem */
2911 gen_op_fldt_ST0_A0();
2913 case 0x1f: /* fstpt mem */
2914 gen_op_fstt_ST0_A0();
2917 case 0x2c: /* frstor mem */
2918 gen_op_frstor_A0(s
->dflag
);
2920 case 0x2e: /* fnsave mem */
2921 gen_op_fnsave_A0(s
->dflag
);
2923 case 0x2f: /* fnstsw mem */
2926 case 0x3c: /* fbld */
2927 gen_op_fbld_ST0_A0();
2929 case 0x3e: /* fbstp */
2930 gen_op_fbst_ST0_A0();
2933 case 0x3d: /* fildll */
2934 gen_op_fildll_ST0_A0();
2936 case 0x3f: /* fistpll */
2937 gen_op_fistll_ST0_A0();
2944 /* register float ops */
2948 case 0x08: /* fld sti */
2950 gen_op_fmov_ST0_STN((opreg
+ 1) & 7);
2952 case 0x09: /* fxchg sti */
2953 gen_op_fxchg_ST0_STN(opreg
);
2955 case 0x0a: /* grp d9/2 */
2963 case 0x0c: /* grp d9/4 */
2973 gen_op_fcom_ST0_FT0();
2982 case 0x0d: /* grp d9/5 */
2991 gen_op_fldl2t_ST0();
2995 gen_op_fldl2e_ST0();
3003 gen_op_fldlg2_ST0();
3007 gen_op_fldln2_ST0();
3018 case 0x0e: /* grp d9/6 */
3029 case 3: /* fpatan */
3032 case 4: /* fxtract */
3035 case 5: /* fprem1 */
3038 case 6: /* fdecstp */
3042 case 7: /* fincstp */
3047 case 0x0f: /* grp d9/7 */
3052 case 1: /* fyl2xp1 */
3058 case 3: /* fsincos */
3061 case 5: /* fscale */
3064 case 4: /* frndint */
3076 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
3077 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
3078 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
3084 gen_op_fp_arith_STN_ST0
[op1
](opreg
);
3088 gen_op_fmov_FT0_STN(opreg
);
3089 gen_op_fp_arith_ST0_FT0
[op1
]();
3093 case 0x02: /* fcom */
3094 gen_op_fmov_FT0_STN(opreg
);
3095 gen_op_fcom_ST0_FT0();
3097 case 0x03: /* fcomp */
3098 gen_op_fmov_FT0_STN(opreg
);
3099 gen_op_fcom_ST0_FT0();
3102 case 0x15: /* da/5 */
3104 case 1: /* fucompp */
3105 gen_op_fmov_FT0_STN(1);
3106 gen_op_fucom_ST0_FT0();
3116 case 0: /* feni (287 only, just do nop here) */
3118 case 1: /* fdisi (287 only, just do nop here) */
3123 case 3: /* fninit */
3126 case 4: /* fsetpm (287 only, just do nop here) */
3132 case 0x1d: /* fucomi */
3133 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3134 gen_op_set_cc_op(s
->cc_op
);
3135 gen_op_fmov_FT0_STN(opreg
);
3136 gen_op_fucomi_ST0_FT0();
3137 s
->cc_op
= CC_OP_EFLAGS
;
3139 case 0x1e: /* fcomi */
3140 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3141 gen_op_set_cc_op(s
->cc_op
);
3142 gen_op_fmov_FT0_STN(opreg
);
3143 gen_op_fcomi_ST0_FT0();
3144 s
->cc_op
= CC_OP_EFLAGS
;
3146 case 0x2a: /* fst sti */
3147 gen_op_fmov_STN_ST0(opreg
);
3149 case 0x2b: /* fstp sti */
3150 gen_op_fmov_STN_ST0(opreg
);
3153 case 0x2c: /* fucom st(i) */
3154 gen_op_fmov_FT0_STN(opreg
);
3155 gen_op_fucom_ST0_FT0();
3157 case 0x2d: /* fucomp st(i) */
3158 gen_op_fmov_FT0_STN(opreg
);
3159 gen_op_fucom_ST0_FT0();
3162 case 0x33: /* de/3 */
3164 case 1: /* fcompp */
3165 gen_op_fmov_FT0_STN(1);
3166 gen_op_fcom_ST0_FT0();
3174 case 0x3c: /* df/4 */
3177 gen_op_fnstsw_EAX();
3183 case 0x3d: /* fucomip */
3184 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3185 gen_op_set_cc_op(s
->cc_op
);
3186 gen_op_fmov_FT0_STN(opreg
);
3187 gen_op_fucomi_ST0_FT0();
3189 s
->cc_op
= CC_OP_EFLAGS
;
3191 case 0x3e: /* fcomip */
3192 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3193 gen_op_set_cc_op(s
->cc_op
);
3194 gen_op_fmov_FT0_STN(opreg
);
3195 gen_op_fcomi_ST0_FT0();
3197 s
->cc_op
= CC_OP_EFLAGS
;
3199 case 0x10 ... 0x13: /* fcmovxx */
3203 const static uint8_t fcmov_cc
[8] = {
3209 op1
= fcmov_cc
[op
& 3] | ((op
>> 3) & 1);
3211 gen_op_fcmov_ST0_STN_T0(opreg
);
3219 /************************/
3222 case 0xa4: /* movsS */
3227 ot
= dflag
? OT_LONG
: OT_WORD
;
3229 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3230 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3236 case 0xaa: /* stosS */
3241 ot
= dflag
? OT_LONG
: OT_WORD
;
3243 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3244 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3249 case 0xac: /* lodsS */
3254 ot
= dflag
? OT_LONG
: OT_WORD
;
3255 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3256 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3261 case 0xae: /* scasS */
3266 ot
= dflag
? OT_LONG
: OT_WORD
;
3267 if (prefixes
& PREFIX_REPNZ
) {
3268 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
3269 } else if (prefixes
& PREFIX_REPZ
) {
3270 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
3273 s
->cc_op
= CC_OP_SUBB
+ ot
;
3277 case 0xa6: /* cmpsS */
3282 ot
= dflag
? OT_LONG
: OT_WORD
;
3283 if (prefixes
& PREFIX_REPNZ
) {
3284 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
3285 } else if (prefixes
& PREFIX_REPZ
) {
3286 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
3289 s
->cc_op
= CC_OP_SUBB
+ ot
;
3292 case 0x6c: /* insS */
3297 ot
= dflag
? OT_LONG
: OT_WORD
;
3298 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
3299 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3300 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3305 case 0x6e: /* outsS */
3310 ot
= dflag
? OT_LONG
: OT_WORD
;
3311 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
3312 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3313 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3319 /************************/
3326 ot
= dflag
? OT_LONG
: OT_WORD
;
3327 val
= ldub_code(s
->pc
++);
3328 gen_op_movl_T0_im(val
);
3329 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
3331 gen_op_mov_reg_T1
[ot
][R_EAX
]();
3338 ot
= dflag
? OT_LONG
: OT_WORD
;
3339 val
= ldub_code(s
->pc
++);
3340 gen_op_movl_T0_im(val
);
3341 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
3342 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
3350 ot
= dflag
? OT_LONG
: OT_WORD
;
3351 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
3352 gen_op_andl_T0_ffff();
3353 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
3355 gen_op_mov_reg_T1
[ot
][R_EAX
]();
3362 ot
= dflag
? OT_LONG
: OT_WORD
;
3363 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
3364 gen_op_andl_T0_ffff();
3365 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
3366 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
3370 /************************/
3372 case 0xc2: /* ret im */
3373 val
= ldsw_code(s
->pc
);
3376 gen_stack_update(s
, val
+ (2 << s
->dflag
));
3378 gen_op_andl_T0_ffff();
3382 case 0xc3: /* ret */
3386 gen_op_andl_T0_ffff();
3390 case 0xca: /* lret im */
3391 val
= ldsw_code(s
->pc
);
3394 if (s
->pe
&& !s
->vm86
) {
3395 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3396 gen_op_set_cc_op(s
->cc_op
);
3397 gen_op_jmp_im(pc_start
- s
->cs_base
);
3398 gen_op_lret_protected(s
->dflag
, val
);
3402 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
3404 gen_op_andl_T0_ffff();
3405 /* NOTE: keeping EIP updated is not a problem in case of
3409 gen_op_addl_A0_im(2 << s
->dflag
);
3410 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
3411 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
3412 /* add stack offset */
3413 gen_stack_update(s
, val
+ (4 << s
->dflag
));
3417 case 0xcb: /* lret */
3420 case 0xcf: /* iret */
3423 gen_op_iret_real(s
->dflag
);
3424 s
->cc_op
= CC_OP_EFLAGS
;
3425 } else if (s
->vm86
) {
3427 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3429 gen_op_iret_real(s
->dflag
);
3430 s
->cc_op
= CC_OP_EFLAGS
;
3433 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3434 gen_op_set_cc_op(s
->cc_op
);
3435 gen_op_jmp_im(pc_start
- s
->cs_base
);
3436 gen_op_iret_protected(s
->dflag
);
3437 s
->cc_op
= CC_OP_EFLAGS
;
3441 case 0xe8: /* call im */
3443 unsigned int next_eip
;
3444 ot
= dflag
? OT_LONG
: OT_WORD
;
3445 val
= insn_get(s
, ot
);
3446 next_eip
= s
->pc
- s
->cs_base
;
3450 gen_op_movl_T0_im(next_eip
);
3455 case 0x9a: /* lcall im */
3457 unsigned int selector
, offset
;
3459 ot
= dflag
? OT_LONG
: OT_WORD
;
3460 offset
= insn_get(s
, ot
);
3461 selector
= insn_get(s
, OT_WORD
);
3463 gen_op_movl_T0_im(selector
);
3464 gen_op_movl_T1_im(offset
);
3467 case 0xe9: /* jmp */
3468 ot
= dflag
? OT_LONG
: OT_WORD
;
3469 val
= insn_get(s
, ot
);
3470 val
+= s
->pc
- s
->cs_base
;
3475 case 0xea: /* ljmp im */
3477 unsigned int selector
, offset
;
3479 ot
= dflag
? OT_LONG
: OT_WORD
;
3480 offset
= insn_get(s
, ot
);
3481 selector
= insn_get(s
, OT_WORD
);
3483 gen_op_movl_T0_im(selector
);
3484 gen_op_movl_T1_im(offset
);
3487 case 0xeb: /* jmp Jb */
3488 val
= (int8_t)insn_get(s
, OT_BYTE
);
3489 val
+= s
->pc
- s
->cs_base
;
3494 case 0x70 ... 0x7f: /* jcc Jb */
3495 val
= (int8_t)insn_get(s
, OT_BYTE
);
3497 case 0x180 ... 0x18f: /* jcc Jv */
3499 val
= insn_get(s
, OT_LONG
);
3501 val
= (int16_t)insn_get(s
, OT_WORD
);
3504 next_eip
= s
->pc
- s
->cs_base
;
3508 gen_jcc(s
, b
, val
, next_eip
);
3511 case 0x190 ... 0x19f: /* setcc Gv */
3512 modrm
= ldub_code(s
->pc
++);
3514 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
3516 case 0x140 ... 0x14f: /* cmov Gv, Ev */
3517 ot
= dflag
? OT_LONG
: OT_WORD
;
3518 modrm
= ldub_code(s
->pc
++);
3519 reg
= (modrm
>> 3) & 7;
3520 mod
= (modrm
>> 6) & 3;
3523 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3524 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3527 gen_op_mov_TN_reg
[ot
][1][rm
]();
3529 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
3532 /************************/
3534 case 0x9c: /* pushf */
3535 if (s
->vm86
&& s
->iopl
!= 3) {
3536 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3538 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3539 gen_op_set_cc_op(s
->cc_op
);
3540 gen_op_movl_T0_eflags();
3544 case 0x9d: /* popf */
3545 if (s
->vm86
&& s
->iopl
!= 3) {
3546 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3551 gen_op_movl_eflags_T0_cpl0();
3553 gen_op_movw_eflags_T0_cpl0();
3556 if (s
->cpl
<= s
->iopl
) {
3558 gen_op_movl_eflags_T0_io();
3560 gen_op_movw_eflags_T0_io();
3564 gen_op_movl_eflags_T0();
3566 gen_op_movw_eflags_T0();
3571 s
->cc_op
= CC_OP_EFLAGS
;
3572 /* abort translation because TF flag may change */
3573 gen_op_jmp_im(s
->pc
- s
->cs_base
);
3577 case 0x9e: /* sahf */
3578 gen_op_mov_TN_reg
[OT_BYTE
][0][R_AH
]();
3579 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3580 gen_op_set_cc_op(s
->cc_op
);
3581 gen_op_movb_eflags_T0();
3582 s
->cc_op
= CC_OP_EFLAGS
;
3584 case 0x9f: /* lahf */
3585 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3586 gen_op_set_cc_op(s
->cc_op
);
3587 gen_op_movl_T0_eflags();
3588 gen_op_mov_reg_T0
[OT_BYTE
][R_AH
]();
3590 case 0xf5: /* cmc */
3591 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3592 gen_op_set_cc_op(s
->cc_op
);
3594 s
->cc_op
= CC_OP_EFLAGS
;
3596 case 0xf8: /* clc */
3597 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3598 gen_op_set_cc_op(s
->cc_op
);
3600 s
->cc_op
= CC_OP_EFLAGS
;
3602 case 0xf9: /* stc */
3603 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3604 gen_op_set_cc_op(s
->cc_op
);
3606 s
->cc_op
= CC_OP_EFLAGS
;
3608 case 0xfc: /* cld */
3611 case 0xfd: /* std */
3615 /************************/
3616 /* bit operations */
3617 case 0x1ba: /* bt/bts/btr/btc Gv, im */
3618 ot
= dflag
? OT_LONG
: OT_WORD
;
3619 modrm
= ldub_code(s
->pc
++);
3620 op
= (modrm
>> 3) & 7;
3621 mod
= (modrm
>> 6) & 3;
3624 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3625 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3627 gen_op_mov_TN_reg
[ot
][0][rm
]();
3630 val
= ldub_code(s
->pc
++);
3631 gen_op_movl_T1_im(val
);
3635 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
3636 s
->cc_op
= CC_OP_SARB
+ ot
;
3639 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3641 gen_op_mov_reg_T0
[ot
][rm
]();
3642 gen_op_update_bt_cc();
3645 case 0x1a3: /* bt Gv, Ev */
3648 case 0x1ab: /* bts */
3651 case 0x1b3: /* btr */
3654 case 0x1bb: /* btc */
3657 ot
= dflag
? OT_LONG
: OT_WORD
;
3658 modrm
= ldub_code(s
->pc
++);
3659 reg
= (modrm
>> 3) & 7;
3660 mod
= (modrm
>> 6) & 3;
3662 gen_op_mov_TN_reg
[OT_LONG
][1][reg
]();
3664 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3665 /* specific case: we need to add a displacement */
3667 gen_op_add_bitw_A0_T1();
3669 gen_op_add_bitl_A0_T1();
3670 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3672 gen_op_mov_TN_reg
[ot
][0][rm
]();
3674 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
3675 s
->cc_op
= CC_OP_SARB
+ ot
;
3678 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3680 gen_op_mov_reg_T0
[ot
][rm
]();
3681 gen_op_update_bt_cc();
3684 case 0x1bc: /* bsf */
3685 case 0x1bd: /* bsr */
3686 ot
= dflag
? OT_LONG
: OT_WORD
;
3687 modrm
= ldub_code(s
->pc
++);
3688 reg
= (modrm
>> 3) & 7;
3689 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3690 gen_op_bsx_T0_cc
[ot
- OT_WORD
][b
& 1]();
3691 /* NOTE: we always write back the result. Intel doc says it is
3692 undefined if T0 == 0 */
3693 gen_op_mov_reg_T0
[ot
][reg
]();
3694 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3696 /************************/
3698 case 0x27: /* daa */
3699 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3700 gen_op_set_cc_op(s
->cc_op
);
3702 s
->cc_op
= CC_OP_EFLAGS
;
3704 case 0x2f: /* das */
3705 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3706 gen_op_set_cc_op(s
->cc_op
);
3708 s
->cc_op
= CC_OP_EFLAGS
;
3710 case 0x37: /* aaa */
3711 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3712 gen_op_set_cc_op(s
->cc_op
);
3714 s
->cc_op
= CC_OP_EFLAGS
;
3716 case 0x3f: /* aas */
3717 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3718 gen_op_set_cc_op(s
->cc_op
);
3720 s
->cc_op
= CC_OP_EFLAGS
;
3722 case 0xd4: /* aam */
3723 val
= ldub_code(s
->pc
++);
3725 s
->cc_op
= CC_OP_LOGICB
;
3727 case 0xd5: /* aad */
3728 val
= ldub_code(s
->pc
++);
3730 s
->cc_op
= CC_OP_LOGICB
;
3732 /************************/
3734 case 0x90: /* nop */
3736 case 0x9b: /* fwait */
3738 case 0xcc: /* int3 */
3739 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3741 case 0xcd: /* int N */
3742 val
= ldub_code(s
->pc
++);
3743 if (s
->vm86
&& s
->iopl
!= 3) {
3744 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3746 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3749 case 0xce: /* into */
3750 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3751 gen_op_set_cc_op(s
->cc_op
);
3752 gen_op_into(s
->pc
- s
->cs_base
);
3754 case 0xf1: /* icebp (undocumented, exits to external debugger) */
3755 gen_debug(s
, pc_start
- s
->cs_base
);
3757 case 0xfa: /* cli */
3759 if (s
->cpl
<= s
->iopl
) {
3762 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3768 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3772 case 0xfb: /* sti */
3774 if (s
->cpl
<= s
->iopl
) {
3777 /* interruptions are enabled only the first insn after sti */
3778 /* If several instructions disable interrupts, only the
3780 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
3781 gen_op_set_inhibit_irq();
3782 /* give a chance to handle pending irqs */
3783 gen_op_jmp_im(s
->pc
- s
->cs_base
);
3786 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3792 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3796 case 0x62: /* bound */
3797 ot
= dflag
? OT_LONG
: OT_WORD
;
3798 modrm
= ldub_code(s
->pc
++);
3799 reg
= (modrm
>> 3) & 7;
3800 mod
= (modrm
>> 6) & 3;
3803 gen_op_mov_reg_T0
[ot
][reg
]();
3804 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3806 gen_op_boundw(pc_start
- s
->cs_base
);
3808 gen_op_boundl(pc_start
- s
->cs_base
);
3810 case 0x1c8 ... 0x1cf: /* bswap reg */
3812 gen_op_mov_TN_reg
[OT_LONG
][0][reg
]();
3814 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
3816 case 0xd6: /* salc */
3817 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3818 gen_op_set_cc_op(s
->cc_op
);
3821 case 0xe0: /* loopnz */
3822 case 0xe1: /* loopz */
3823 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3824 gen_op_set_cc_op(s
->cc_op
);
3826 case 0xe2: /* loop */
3827 case 0xe3: /* jecxz */
3828 val
= (int8_t)insn_get(s
, OT_BYTE
);
3829 next_eip
= s
->pc
- s
->cs_base
;
3833 gen_op_loop
[s
->aflag
][b
& 3](val
, next_eip
);
3836 case 0x130: /* wrmsr */
3837 case 0x132: /* rdmsr */
3839 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3847 case 0x131: /* rdtsc */
3850 case 0x1a2: /* cpuid */
3853 case 0xf4: /* hlt */
3855 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3857 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3858 gen_op_set_cc_op(s
->cc_op
);
3859 gen_op_jmp_im(s
->pc
- s
->cs_base
);
3865 modrm
= ldub_code(s
->pc
++);
3866 mod
= (modrm
>> 6) & 3;
3867 op
= (modrm
>> 3) & 7;
3870 if (!s
->pe
|| s
->vm86
)
3872 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
3876 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3879 if (!s
->pe
|| s
->vm86
)
3882 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3884 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3885 gen_op_jmp_im(pc_start
- s
->cs_base
);
3890 if (!s
->pe
|| s
->vm86
)
3892 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
3896 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3899 if (!s
->pe
|| s
->vm86
)
3902 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3904 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3905 gen_op_jmp_im(pc_start
- s
->cs_base
);
3911 if (!s
->pe
|| s
->vm86
)
3913 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3914 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3915 gen_op_set_cc_op(s
->cc_op
);
3920 s
->cc_op
= CC_OP_EFLAGS
;
3927 modrm
= ldub_code(s
->pc
++);
3928 mod
= (modrm
>> 6) & 3;
3929 op
= (modrm
>> 3) & 7;
3935 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3937 gen_op_movl_T0_env(offsetof(CPUX86State
,gdt
.limit
));
3939 gen_op_movl_T0_env(offsetof(CPUX86State
,idt
.limit
));
3940 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
3941 gen_op_addl_A0_im(2);
3943 gen_op_movl_T0_env(offsetof(CPUX86State
,gdt
.base
));
3945 gen_op_movl_T0_env(offsetof(CPUX86State
,idt
.base
));
3947 gen_op_andl_T0_im(0xffffff);
3948 gen_op_st_T0_A0
[OT_LONG
+ s
->mem_index
]();
3955 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3957 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3958 gen_op_ld_T1_A0
[OT_WORD
+ s
->mem_index
]();
3959 gen_op_addl_A0_im(2);
3960 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
3962 gen_op_andl_T0_im(0xffffff);
3964 gen_op_movl_env_T0(offsetof(CPUX86State
,gdt
.base
));
3965 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
3967 gen_op_movl_env_T0(offsetof(CPUX86State
,idt
.base
));
3968 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
3973 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
3974 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
3978 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3980 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3982 gen_op_jmp_im(s
->pc
- s
->cs_base
);
3986 case 7: /* invlpg */
3988 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3992 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3994 gen_op_jmp_im(s
->pc
- s
->cs_base
);
4002 case 0x108: /* invd */
4003 case 0x109: /* wbinvd */
4005 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4010 case 0x63: /* arpl */
4011 if (!s
->pe
|| s
->vm86
)
4013 ot
= dflag
? OT_LONG
: OT_WORD
;
4014 modrm
= ldub_code(s
->pc
++);
4015 reg
= (modrm
>> 3) & 7;
4016 mod
= (modrm
>> 6) & 3;
4019 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4020 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
4022 gen_op_mov_TN_reg
[ot
][0][rm
]();
4024 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4025 gen_op_set_cc_op(s
->cc_op
);
4027 s
->cc_op
= CC_OP_EFLAGS
;
4029 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
4031 gen_op_mov_reg_T0
[ot
][rm
]();
4033 gen_op_arpl_update();
4035 case 0x102: /* lar */
4036 case 0x103: /* lsl */
4037 if (!s
->pe
|| s
->vm86
)
4039 ot
= dflag
? OT_LONG
: OT_WORD
;
4040 modrm
= ldub_code(s
->pc
++);
4041 reg
= (modrm
>> 3) & 7;
4042 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4043 gen_op_mov_TN_reg
[ot
][1][reg
]();
4044 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4045 gen_op_set_cc_op(s
->cc_op
);
4050 s
->cc_op
= CC_OP_EFLAGS
;
4051 gen_op_mov_reg_T1
[ot
][reg
]();
4054 modrm
= ldub_code(s
->pc
++);
4055 mod
= (modrm
>> 6) & 3;
4056 op
= (modrm
>> 3) & 7;
4058 case 0: /* prefetchnta */
4059 case 1: /* prefetchnt0 */
4060 case 2: /* prefetchnt0 */
4061 case 3: /* prefetchnt0 */
4064 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4065 /* nothing more to do */
4071 case 0x120: /* mov reg, crN */
4072 case 0x122: /* mov crN, reg */
4074 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4076 modrm
= ldub_code(s
->pc
++);
4077 if ((modrm
& 0xc0) != 0xc0)
4080 reg
= (modrm
>> 3) & 7;
4087 gen_op_mov_TN_reg
[OT_LONG
][0][rm
]();
4088 gen_op_movl_crN_T0(reg
);
4089 gen_op_jmp_im(s
->pc
- s
->cs_base
);
4092 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
4093 gen_op_mov_reg_T0
[OT_LONG
][rm
]();
4101 case 0x121: /* mov reg, drN */
4102 case 0x123: /* mov drN, reg */
4104 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4106 modrm
= ldub_code(s
->pc
++);
4107 if ((modrm
& 0xc0) != 0xc0)
4110 reg
= (modrm
>> 3) & 7;
4111 /* XXX: do it dynamically with CR4.DE bit */
4112 if (reg
== 4 || reg
== 5)
4115 gen_op_mov_TN_reg
[OT_LONG
][0][rm
]();
4116 gen_op_movl_drN_T0(reg
);
4117 gen_op_jmp_im(s
->pc
- s
->cs_base
);
4120 gen_op_movl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
4121 gen_op_mov_reg_T0
[OT_LONG
][rm
]();
4125 case 0x106: /* clts */
4127 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4135 /* lock generation */
4136 if (s
->prefix
& PREFIX_LOCK
)
4140 /* XXX: ensure that no lock was generated */
4141 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
4145 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
4146 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
4148 /* flags read by an operation */
4149 static uint16_t opc_read_flags
[NB_OPS
] = {
4150 [INDEX_op_aas
] = CC_A
,
4151 [INDEX_op_aaa
] = CC_A
,
4152 [INDEX_op_das
] = CC_A
| CC_C
,
4153 [INDEX_op_daa
] = CC_A
| CC_C
,
4155 /* subtle: due to the incl/decl implementation, C is used */
4156 [INDEX_op_update_inc_cc
] = CC_C
,
4158 [INDEX_op_into
] = CC_O
,
4160 [INDEX_op_jb_subb
] = CC_C
,
4161 [INDEX_op_jb_subw
] = CC_C
,
4162 [INDEX_op_jb_subl
] = CC_C
,
4164 [INDEX_op_jz_subb
] = CC_Z
,
4165 [INDEX_op_jz_subw
] = CC_Z
,
4166 [INDEX_op_jz_subl
] = CC_Z
,
4168 [INDEX_op_jbe_subb
] = CC_Z
| CC_C
,
4169 [INDEX_op_jbe_subw
] = CC_Z
| CC_C
,
4170 [INDEX_op_jbe_subl
] = CC_Z
| CC_C
,
4172 [INDEX_op_js_subb
] = CC_S
,
4173 [INDEX_op_js_subw
] = CC_S
,
4174 [INDEX_op_js_subl
] = CC_S
,
4176 [INDEX_op_jl_subb
] = CC_O
| CC_S
,
4177 [INDEX_op_jl_subw
] = CC_O
| CC_S
,
4178 [INDEX_op_jl_subl
] = CC_O
| CC_S
,
4180 [INDEX_op_jle_subb
] = CC_O
| CC_S
| CC_Z
,
4181 [INDEX_op_jle_subw
] = CC_O
| CC_S
| CC_Z
,
4182 [INDEX_op_jle_subl
] = CC_O
| CC_S
| CC_Z
,
4184 [INDEX_op_loopnzw
] = CC_Z
,
4185 [INDEX_op_loopnzl
] = CC_Z
,
4186 [INDEX_op_loopzw
] = CC_Z
,
4187 [INDEX_op_loopzl
] = CC_Z
,
4189 [INDEX_op_seto_T0_cc
] = CC_O
,
4190 [INDEX_op_setb_T0_cc
] = CC_C
,
4191 [INDEX_op_setz_T0_cc
] = CC_Z
,
4192 [INDEX_op_setbe_T0_cc
] = CC_Z
| CC_C
,
4193 [INDEX_op_sets_T0_cc
] = CC_S
,
4194 [INDEX_op_setp_T0_cc
] = CC_P
,
4195 [INDEX_op_setl_T0_cc
] = CC_O
| CC_S
,
4196 [INDEX_op_setle_T0_cc
] = CC_O
| CC_S
| CC_Z
,
4198 [INDEX_op_setb_T0_subb
] = CC_C
,
4199 [INDEX_op_setb_T0_subw
] = CC_C
,
4200 [INDEX_op_setb_T0_subl
] = CC_C
,
4202 [INDEX_op_setz_T0_subb
] = CC_Z
,
4203 [INDEX_op_setz_T0_subw
] = CC_Z
,
4204 [INDEX_op_setz_T0_subl
] = CC_Z
,
4206 [INDEX_op_setbe_T0_subb
] = CC_Z
| CC_C
,
4207 [INDEX_op_setbe_T0_subw
] = CC_Z
| CC_C
,
4208 [INDEX_op_setbe_T0_subl
] = CC_Z
| CC_C
,
4210 [INDEX_op_sets_T0_subb
] = CC_S
,
4211 [INDEX_op_sets_T0_subw
] = CC_S
,
4212 [INDEX_op_sets_T0_subl
] = CC_S
,
4214 [INDEX_op_setl_T0_subb
] = CC_O
| CC_S
,
4215 [INDEX_op_setl_T0_subw
] = CC_O
| CC_S
,
4216 [INDEX_op_setl_T0_subl
] = CC_O
| CC_S
,
4218 [INDEX_op_setle_T0_subb
] = CC_O
| CC_S
| CC_Z
,
4219 [INDEX_op_setle_T0_subw
] = CC_O
| CC_S
| CC_Z
,
4220 [INDEX_op_setle_T0_subl
] = CC_O
| CC_S
| CC_Z
,
4222 [INDEX_op_movl_T0_eflags
] = CC_OSZAPC
,
4223 [INDEX_op_cmc
] = CC_C
,
4224 [INDEX_op_salc
] = CC_C
,
4226 #define DEF_READF(SUFFIX)\
4227 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
4228 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
4229 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
4230 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
4231 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
4232 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
4234 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
4235 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
4236 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
4237 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
4238 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
4239 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,
4244 #ifndef CONFIG_USER_ONLY
4250 /* flags written by an operation */
4251 static uint16_t opc_write_flags
[NB_OPS
] = {
4252 [INDEX_op_update2_cc
] = CC_OSZAPC
,
4253 [INDEX_op_update1_cc
] = CC_OSZAPC
,
4254 [INDEX_op_cmpl_T0_T1_cc
] = CC_OSZAPC
,
4255 [INDEX_op_update_neg_cc
] = CC_OSZAPC
,
4256 /* subtle: due to the incl/decl implementation, C is used */
4257 [INDEX_op_update_inc_cc
] = CC_OSZAPC
,
4258 [INDEX_op_testl_T0_T1_cc
] = CC_OSZAPC
,
4260 [INDEX_op_mulb_AL_T0
] = CC_OSZAPC
,
4261 [INDEX_op_imulb_AL_T0
] = CC_OSZAPC
,
4262 [INDEX_op_mulw_AX_T0
] = CC_OSZAPC
,
4263 [INDEX_op_imulw_AX_T0
] = CC_OSZAPC
,
4264 [INDEX_op_mull_EAX_T0
] = CC_OSZAPC
,
4265 [INDEX_op_imull_EAX_T0
] = CC_OSZAPC
,
4266 [INDEX_op_imulw_T0_T1
] = CC_OSZAPC
,
4267 [INDEX_op_imull_T0_T1
] = CC_OSZAPC
,
4270 [INDEX_op_aam
] = CC_OSZAPC
,
4271 [INDEX_op_aad
] = CC_OSZAPC
,
4272 [INDEX_op_aas
] = CC_OSZAPC
,
4273 [INDEX_op_aaa
] = CC_OSZAPC
,
4274 [INDEX_op_das
] = CC_OSZAPC
,
4275 [INDEX_op_daa
] = CC_OSZAPC
,
4277 [INDEX_op_movb_eflags_T0
] = CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
,
4278 [INDEX_op_movw_eflags_T0
] = CC_OSZAPC
,
4279 [INDEX_op_movl_eflags_T0
] = CC_OSZAPC
,
4280 [INDEX_op_movw_eflags_T0_io
] = CC_OSZAPC
,
4281 [INDEX_op_movl_eflags_T0_io
] = CC_OSZAPC
,
4282 [INDEX_op_movw_eflags_T0_cpl0
] = CC_OSZAPC
,
4283 [INDEX_op_movl_eflags_T0_cpl0
] = CC_OSZAPC
,
4284 [INDEX_op_clc
] = CC_C
,
4285 [INDEX_op_stc
] = CC_C
,
4286 [INDEX_op_cmc
] = CC_C
,
4288 [INDEX_op_btw_T0_T1_cc
] = CC_OSZAPC
,
4289 [INDEX_op_btl_T0_T1_cc
] = CC_OSZAPC
,
4290 [INDEX_op_btsw_T0_T1_cc
] = CC_OSZAPC
,
4291 [INDEX_op_btsl_T0_T1_cc
] = CC_OSZAPC
,
4292 [INDEX_op_btrw_T0_T1_cc
] = CC_OSZAPC
,
4293 [INDEX_op_btrl_T0_T1_cc
] = CC_OSZAPC
,
4294 [INDEX_op_btcw_T0_T1_cc
] = CC_OSZAPC
,
4295 [INDEX_op_btcl_T0_T1_cc
] = CC_OSZAPC
,
4297 [INDEX_op_bsfw_T0_cc
] = CC_OSZAPC
,
4298 [INDEX_op_bsfl_T0_cc
] = CC_OSZAPC
,
4299 [INDEX_op_bsrw_T0_cc
] = CC_OSZAPC
,
4300 [INDEX_op_bsrl_T0_cc
] = CC_OSZAPC
,
4302 [INDEX_op_cmpxchgb_T0_T1_EAX_cc
] = CC_OSZAPC
,
4303 [INDEX_op_cmpxchgw_T0_T1_EAX_cc
] = CC_OSZAPC
,
4304 [INDEX_op_cmpxchgl_T0_T1_EAX_cc
] = CC_OSZAPC
,
4306 [INDEX_op_cmpxchg8b
] = CC_Z
,
4307 [INDEX_op_lar
] = CC_Z
,
4308 [INDEX_op_lsl
] = CC_Z
,
4309 [INDEX_op_fcomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
4310 [INDEX_op_fucomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
4312 #define DEF_WRITEF(SUFFIX)\
4313 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4314 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4315 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4316 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4317 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4318 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4320 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4321 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4322 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4323 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4324 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4325 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4327 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4328 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4329 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4330 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4331 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4332 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
4334 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4335 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4336 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4338 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4339 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4340 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4342 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4343 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4344 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
4346 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
4347 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
4348 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
4349 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
4351 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
4352 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
4353 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
4354 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
4356 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
4357 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
4358 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,
4363 #ifndef CONFIG_USER_ONLY
4369 /* simpler form of an operation if no flags need to be generated */
4370 static uint16_t opc_simpler
[NB_OPS
] = {
4371 [INDEX_op_update2_cc
] = INDEX_op_nop
,
4372 [INDEX_op_update1_cc
] = INDEX_op_nop
,
4373 [INDEX_op_update_neg_cc
] = INDEX_op_nop
,
4375 /* broken: CC_OP logic must be rewritten */
4376 [INDEX_op_update_inc_cc
] = INDEX_op_nop
,
4379 [INDEX_op_shlb_T0_T1_cc
] = INDEX_op_shlb_T0_T1
,
4380 [INDEX_op_shlw_T0_T1_cc
] = INDEX_op_shlw_T0_T1
,
4381 [INDEX_op_shll_T0_T1_cc
] = INDEX_op_shll_T0_T1
,
4383 [INDEX_op_shrb_T0_T1_cc
] = INDEX_op_shrb_T0_T1
,
4384 [INDEX_op_shrw_T0_T1_cc
] = INDEX_op_shrw_T0_T1
,
4385 [INDEX_op_shrl_T0_T1_cc
] = INDEX_op_shrl_T0_T1
,
4387 [INDEX_op_sarb_T0_T1_cc
] = INDEX_op_sarb_T0_T1
,
4388 [INDEX_op_sarw_T0_T1_cc
] = INDEX_op_sarw_T0_T1
,
4389 [INDEX_op_sarl_T0_T1_cc
] = INDEX_op_sarl_T0_T1
,
4391 #define DEF_SIMPLER(SUFFIX)\
4392 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
4393 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
4394 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
4396 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
4397 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
4398 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,
4402 #ifndef CONFIG_USER_ONLY
4403 DEF_SIMPLER(_kernel
)
4408 void optimize_flags_init(void)
4411 /* put default values in arrays */
4412 for(i
= 0; i
< NB_OPS
; i
++) {
4413 if (opc_simpler
[i
] == 0)
4418 /* CPU flags computation optimization: we move backward thru the
4419 generated code to see which flags are needed. The operation is
4420 modified if suitable */
4421 static void optimize_flags(uint16_t *opc_buf
, int opc_buf_len
)
4424 int live_flags
, write_flags
, op
;
4426 opc_ptr
= opc_buf
+ opc_buf_len
;
4427 /* live_flags contains the flags needed by the next instructions
4428 in the code. At the end of the bloc, we consider that all the
4430 live_flags
= CC_OSZAPC
;
4431 while (opc_ptr
> opc_buf
) {
4433 /* if none of the flags written by the instruction is used,
4434 then we can try to find a simpler instruction */
4435 write_flags
= opc_write_flags
[op
];
4436 if ((live_flags
& write_flags
) == 0) {
4437 *opc_ptr
= opc_simpler
[op
];
4439 /* compute the live flags before the instruction */
4440 live_flags
&= ~write_flags
;
4441 live_flags
|= opc_read_flags
[op
];
4445 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
4446 basic block 'tb'. If search_pc is TRUE, also generate PC
4447 information for each intermediate instruction. */
4448 static inline int gen_intermediate_code_internal(CPUState
*env
,
4449 TranslationBlock
*tb
,
4452 DisasContext dc1
, *dc
= &dc1
;
4454 uint16_t *gen_opc_end
;
4459 /* generate intermediate code */
4460 pc_start
= (uint8_t *)tb
->pc
;
4461 cs_base
= (uint8_t *)tb
->cs_base
;
4464 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
4465 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
4466 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
4467 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
4469 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
4470 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
4471 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
4472 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
4473 dc
->singlestep_enabled
= env
->singlestep_enabled
;
4474 dc
->cc_op
= CC_OP_DYNAMIC
;
4475 dc
->cs_base
= cs_base
;
4477 dc
->popl_esp_hack
= 0;
4478 /* select memory access functions */
4480 if (flags
& HF_SOFTMMU_MASK
) {
4486 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
4487 (flags
& HF_INHIBIT_IRQ_MASK
)
4488 #ifndef CONFIG_SOFTMMU
4489 || (flags
& HF_SOFTMMU_MASK
)
4493 /* check addseg logic */
4494 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
))
4495 printf("ERROR addseg\n");
4498 gen_opc_ptr
= gen_opc_buf
;
4499 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4500 gen_opparam_ptr
= gen_opparam_buf
;
4502 dc
->is_jmp
= DISAS_NEXT
;
4507 if (env
->nb_breakpoints
> 0) {
4508 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
4509 if (env
->breakpoints
[j
] == (unsigned long)pc_ptr
) {
4510 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
4516 j
= gen_opc_ptr
- gen_opc_buf
;
4520 gen_opc_instr_start
[lj
++] = 0;
4522 gen_opc_pc
[lj
] = (uint32_t)pc_ptr
;
4523 gen_opc_cc_op
[lj
] = dc
->cc_op
;
4524 gen_opc_instr_start
[lj
] = 1;
4526 pc_ptr
= disas_insn(dc
, pc_ptr
);
4527 /* stop translation if indicated */
4530 /* if single step mode, we generate only one instruction and
4531 generate an exception */
4532 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
4533 the flag and abort the translation to give the irqs a
4534 change to be happen */
4535 if (dc
->tf
|| dc
->singlestep_enabled
||
4536 (flags
& HF_INHIBIT_IRQ_MASK
)) {
4537 gen_op_jmp_im(pc_ptr
- dc
->cs_base
);
4541 /* if too long translation, stop generation too */
4542 if (gen_opc_ptr
>= gen_opc_end
||
4543 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
4544 gen_op_jmp_im(pc_ptr
- dc
->cs_base
);
4549 *gen_opc_ptr
= INDEX_op_end
;
4550 /* we don't forget to fill the last values */
4552 j
= gen_opc_ptr
- gen_opc_buf
;
4555 gen_opc_instr_start
[lj
++] = 0;
4560 fprintf(logfile
, "----------------\n");
4561 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
4562 disas(logfile
, pc_start
, pc_ptr
- pc_start
, 0, !dc
->code32
);
4563 fprintf(logfile
, "\n");
4565 fprintf(logfile
, "OP:\n");
4566 dump_ops(gen_opc_buf
, gen_opparam_buf
);
4567 fprintf(logfile
, "\n");
4572 /* optimize flag computations */
4573 optimize_flags(gen_opc_buf
, gen_opc_ptr
- gen_opc_buf
);
4577 fprintf(logfile
, "AFTER FLAGS OPT:\n");
4578 dump_ops(gen_opc_buf
, gen_opparam_buf
);
4579 fprintf(logfile
, "\n");
4583 tb
->size
= pc_ptr
- pc_start
;
4587 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
4589 return gen_intermediate_code_internal(env
, tb
, 0);
4592 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
4594 return gen_intermediate_code_internal(env
, tb
, 1);