4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
33 /* XXX: move that elsewhere */
34 static uint16_t *gen_opc_ptr
;
35 static uint32_t *gen_opparam_ptr
;
37 #define PREFIX_REPZ 0x01
38 #define PREFIX_REPNZ 0x02
39 #define PREFIX_LOCK 0x04
40 #define PREFIX_DATA 0x08
41 #define PREFIX_ADR 0x10
43 typedef struct DisasContext
{
44 /* current insn context */
45 int override
; /* -1 if no override */
48 uint8_t *pc
; /* pc = eip + cs_base */
49 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
50 static state change (stop translation) */
51 /* current block context */
52 uint8_t *cs_base
; /* base of CS segment */
53 int pe
; /* protected mode */
54 int code32
; /* 32 bit code segment */
55 int ss32
; /* 32 bit stack segment */
56 int cc_op
; /* current CC operation */
57 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
58 int f_st
; /* currently unused */
59 int vm86
; /* vm86 mode */
62 int tf
; /* TF cpu flag */
63 int singlestep_enabled
; /* "hardware" single step enabled */
64 int jmp_opt
; /* use direct block chaining for direct jumps */
65 int mem_index
; /* select memory access functions */
66 struct TranslationBlock
*tb
;
67 int popl_esp_hack
; /* for correct popl with esp base handling */
70 static void gen_eob(DisasContext
*s
);
71 static void gen_jmp(DisasContext
*s
, unsigned int eip
);
73 /* i386 arith/logic operations */
93 OP_SHL1
, /* undocumented */
98 #define DEF(s, n, copy_size) INDEX_op_ ## s,
115 /* I386 int registers */
116 OR_EAX
, /* MUST be even numbered */
124 OR_TMP0
, /* temporary operand register */
126 OR_A0
, /* temporary register used when doing address evaluation */
127 OR_ZERO
, /* fixed zero register */
131 typedef void (GenOpFunc
)(void);
132 typedef void (GenOpFunc1
)(long);
133 typedef void (GenOpFunc2
)(long, long);
134 typedef void (GenOpFunc3
)(long, long, long);
136 static GenOpFunc
*gen_op_mov_reg_T0
[3][8] = {
169 static GenOpFunc
*gen_op_mov_reg_T1
[3][8] = {
202 static GenOpFunc
*gen_op_mov_reg_A0
[2][8] = {
225 static GenOpFunc
*gen_op_mov_TN_reg
[3][2][8] =
295 static GenOpFunc
*gen_op_movl_A0_reg
[8] = {
306 static GenOpFunc
*gen_op_addl_A0_reg_sN
[4][8] = {
318 gen_op_addl_A0_EAX_s1
,
319 gen_op_addl_A0_ECX_s1
,
320 gen_op_addl_A0_EDX_s1
,
321 gen_op_addl_A0_EBX_s1
,
322 gen_op_addl_A0_ESP_s1
,
323 gen_op_addl_A0_EBP_s1
,
324 gen_op_addl_A0_ESI_s1
,
325 gen_op_addl_A0_EDI_s1
,
328 gen_op_addl_A0_EAX_s2
,
329 gen_op_addl_A0_ECX_s2
,
330 gen_op_addl_A0_EDX_s2
,
331 gen_op_addl_A0_EBX_s2
,
332 gen_op_addl_A0_ESP_s2
,
333 gen_op_addl_A0_EBP_s2
,
334 gen_op_addl_A0_ESI_s2
,
335 gen_op_addl_A0_EDI_s2
,
338 gen_op_addl_A0_EAX_s3
,
339 gen_op_addl_A0_ECX_s3
,
340 gen_op_addl_A0_EDX_s3
,
341 gen_op_addl_A0_EBX_s3
,
342 gen_op_addl_A0_ESP_s3
,
343 gen_op_addl_A0_EBP_s3
,
344 gen_op_addl_A0_ESI_s3
,
345 gen_op_addl_A0_EDI_s3
,
349 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[2][8] = {
351 gen_op_cmovw_EAX_T1_T0
,
352 gen_op_cmovw_ECX_T1_T0
,
353 gen_op_cmovw_EDX_T1_T0
,
354 gen_op_cmovw_EBX_T1_T0
,
355 gen_op_cmovw_ESP_T1_T0
,
356 gen_op_cmovw_EBP_T1_T0
,
357 gen_op_cmovw_ESI_T1_T0
,
358 gen_op_cmovw_EDI_T1_T0
,
361 gen_op_cmovl_EAX_T1_T0
,
362 gen_op_cmovl_ECX_T1_T0
,
363 gen_op_cmovl_EDX_T1_T0
,
364 gen_op_cmovl_EBX_T1_T0
,
365 gen_op_cmovl_ESP_T1_T0
,
366 gen_op_cmovl_EBP_T1_T0
,
367 gen_op_cmovl_ESI_T1_T0
,
368 gen_op_cmovl_EDI_T1_T0
,
372 static GenOpFunc
*gen_op_arith_T0_T1_cc
[8] = {
383 static GenOpFunc
*gen_op_arithc_T0_T1_cc
[3][2] = {
385 gen_op_adcb_T0_T1_cc
,
386 gen_op_sbbb_T0_T1_cc
,
389 gen_op_adcw_T0_T1_cc
,
390 gen_op_sbbw_T0_T1_cc
,
393 gen_op_adcl_T0_T1_cc
,
394 gen_op_sbbl_T0_T1_cc
,
398 static GenOpFunc
*gen_op_arithc_mem_T0_T1_cc
[3][2] = {
400 gen_op_adcb_mem_T0_T1_cc
,
401 gen_op_sbbb_mem_T0_T1_cc
,
404 gen_op_adcw_mem_T0_T1_cc
,
405 gen_op_sbbw_mem_T0_T1_cc
,
408 gen_op_adcl_mem_T0_T1_cc
,
409 gen_op_sbbl_mem_T0_T1_cc
,
413 static const int cc_op_arithb
[8] = {
424 static GenOpFunc
*gen_op_cmpxchg_T0_T1_EAX_cc
[3] = {
425 gen_op_cmpxchgb_T0_T1_EAX_cc
,
426 gen_op_cmpxchgw_T0_T1_EAX_cc
,
427 gen_op_cmpxchgl_T0_T1_EAX_cc
,
430 static GenOpFunc
*gen_op_cmpxchg_mem_T0_T1_EAX_cc
[3] = {
431 gen_op_cmpxchgb_mem_T0_T1_EAX_cc
,
432 gen_op_cmpxchgw_mem_T0_T1_EAX_cc
,
433 gen_op_cmpxchgl_mem_T0_T1_EAX_cc
,
436 static GenOpFunc
*gen_op_shift_T0_T1_cc
[3][8] = {
438 gen_op_rolb_T0_T1_cc
,
439 gen_op_rorb_T0_T1_cc
,
440 gen_op_rclb_T0_T1_cc
,
441 gen_op_rcrb_T0_T1_cc
,
442 gen_op_shlb_T0_T1_cc
,
443 gen_op_shrb_T0_T1_cc
,
444 gen_op_shlb_T0_T1_cc
,
445 gen_op_sarb_T0_T1_cc
,
448 gen_op_rolw_T0_T1_cc
,
449 gen_op_rorw_T0_T1_cc
,
450 gen_op_rclw_T0_T1_cc
,
451 gen_op_rcrw_T0_T1_cc
,
452 gen_op_shlw_T0_T1_cc
,
453 gen_op_shrw_T0_T1_cc
,
454 gen_op_shlw_T0_T1_cc
,
455 gen_op_sarw_T0_T1_cc
,
458 gen_op_roll_T0_T1_cc
,
459 gen_op_rorl_T0_T1_cc
,
460 gen_op_rcll_T0_T1_cc
,
461 gen_op_rcrl_T0_T1_cc
,
462 gen_op_shll_T0_T1_cc
,
463 gen_op_shrl_T0_T1_cc
,
464 gen_op_shll_T0_T1_cc
,
465 gen_op_sarl_T0_T1_cc
,
469 static GenOpFunc
*gen_op_shift_mem_T0_T1_cc
[3][8] = {
471 gen_op_rolb_mem_T0_T1_cc
,
472 gen_op_rorb_mem_T0_T1_cc
,
473 gen_op_rclb_mem_T0_T1_cc
,
474 gen_op_rcrb_mem_T0_T1_cc
,
475 gen_op_shlb_mem_T0_T1_cc
,
476 gen_op_shrb_mem_T0_T1_cc
,
477 gen_op_shlb_mem_T0_T1_cc
,
478 gen_op_sarb_mem_T0_T1_cc
,
481 gen_op_rolw_mem_T0_T1_cc
,
482 gen_op_rorw_mem_T0_T1_cc
,
483 gen_op_rclw_mem_T0_T1_cc
,
484 gen_op_rcrw_mem_T0_T1_cc
,
485 gen_op_shlw_mem_T0_T1_cc
,
486 gen_op_shrw_mem_T0_T1_cc
,
487 gen_op_shlw_mem_T0_T1_cc
,
488 gen_op_sarw_mem_T0_T1_cc
,
491 gen_op_roll_mem_T0_T1_cc
,
492 gen_op_rorl_mem_T0_T1_cc
,
493 gen_op_rcll_mem_T0_T1_cc
,
494 gen_op_rcrl_mem_T0_T1_cc
,
495 gen_op_shll_mem_T0_T1_cc
,
496 gen_op_shrl_mem_T0_T1_cc
,
497 gen_op_shll_mem_T0_T1_cc
,
498 gen_op_sarl_mem_T0_T1_cc
,
502 static GenOpFunc1
*gen_op_shiftd_T0_T1_im_cc
[2][2] = {
504 gen_op_shldw_T0_T1_im_cc
,
505 gen_op_shrdw_T0_T1_im_cc
,
508 gen_op_shldl_T0_T1_im_cc
,
509 gen_op_shrdl_T0_T1_im_cc
,
513 static GenOpFunc
*gen_op_shiftd_T0_T1_ECX_cc
[2][2] = {
515 gen_op_shldw_T0_T1_ECX_cc
,
516 gen_op_shrdw_T0_T1_ECX_cc
,
519 gen_op_shldl_T0_T1_ECX_cc
,
520 gen_op_shrdl_T0_T1_ECX_cc
,
524 static GenOpFunc1
*gen_op_shiftd_mem_T0_T1_im_cc
[2][2] = {
526 gen_op_shldw_mem_T0_T1_im_cc
,
527 gen_op_shrdw_mem_T0_T1_im_cc
,
530 gen_op_shldl_mem_T0_T1_im_cc
,
531 gen_op_shrdl_mem_T0_T1_im_cc
,
535 static GenOpFunc
*gen_op_shiftd_mem_T0_T1_ECX_cc
[2][2] = {
537 gen_op_shldw_mem_T0_T1_ECX_cc
,
538 gen_op_shrdw_mem_T0_T1_ECX_cc
,
541 gen_op_shldl_mem_T0_T1_ECX_cc
,
542 gen_op_shrdl_mem_T0_T1_ECX_cc
,
546 static GenOpFunc
*gen_op_btx_T0_T1_cc
[2][4] = {
549 gen_op_btsw_T0_T1_cc
,
550 gen_op_btrw_T0_T1_cc
,
551 gen_op_btcw_T0_T1_cc
,
555 gen_op_btsl_T0_T1_cc
,
556 gen_op_btrl_T0_T1_cc
,
557 gen_op_btcl_T0_T1_cc
,
561 static GenOpFunc
*gen_op_bsx_T0_cc
[2][2] = {
572 static GenOpFunc
*gen_op_lds_T0_A0
[3 * 3] = {
573 gen_op_ldsb_raw_T0_A0
,
574 gen_op_ldsw_raw_T0_A0
,
576 #ifndef CONFIG_USER_ONLY
577 gen_op_ldsb_kernel_T0_A0
,
578 gen_op_ldsw_kernel_T0_A0
,
581 gen_op_ldsb_user_T0_A0
,
582 gen_op_ldsw_user_T0_A0
,
587 static GenOpFunc
*gen_op_ldu_T0_A0
[3 * 3] = {
588 gen_op_ldub_raw_T0_A0
,
589 gen_op_lduw_raw_T0_A0
,
592 #ifndef CONFIG_USER_ONLY
593 gen_op_ldub_kernel_T0_A0
,
594 gen_op_lduw_kernel_T0_A0
,
597 gen_op_ldub_user_T0_A0
,
598 gen_op_lduw_user_T0_A0
,
603 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
604 static GenOpFunc
*gen_op_ld_T0_A0
[3 * 3] = {
605 gen_op_ldub_raw_T0_A0
,
606 gen_op_lduw_raw_T0_A0
,
607 gen_op_ldl_raw_T0_A0
,
609 #ifndef CONFIG_USER_ONLY
610 gen_op_ldub_kernel_T0_A0
,
611 gen_op_lduw_kernel_T0_A0
,
612 gen_op_ldl_kernel_T0_A0
,
614 gen_op_ldub_user_T0_A0
,
615 gen_op_lduw_user_T0_A0
,
616 gen_op_ldl_user_T0_A0
,
620 static GenOpFunc
*gen_op_ld_T1_A0
[3 * 3] = {
621 gen_op_ldub_raw_T1_A0
,
622 gen_op_lduw_raw_T1_A0
,
623 gen_op_ldl_raw_T1_A0
,
625 #ifndef CONFIG_USER_ONLY
626 gen_op_ldub_kernel_T1_A0
,
627 gen_op_lduw_kernel_T1_A0
,
628 gen_op_ldl_kernel_T1_A0
,
630 gen_op_ldub_user_T1_A0
,
631 gen_op_lduw_user_T1_A0
,
632 gen_op_ldl_user_T1_A0
,
636 static GenOpFunc
*gen_op_st_T0_A0
[3 * 3] = {
637 gen_op_stb_raw_T0_A0
,
638 gen_op_stw_raw_T0_A0
,
639 gen_op_stl_raw_T0_A0
,
641 #ifndef CONFIG_USER_ONLY
642 gen_op_stb_kernel_T0_A0
,
643 gen_op_stw_kernel_T0_A0
,
644 gen_op_stl_kernel_T0_A0
,
646 gen_op_stb_user_T0_A0
,
647 gen_op_stw_user_T0_A0
,
648 gen_op_stl_user_T0_A0
,
652 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
656 override
= s
->override
;
659 if (s
->addseg
&& override
< 0)
662 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
663 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
665 gen_op_movl_A0_reg
[R_ESI
]();
668 /* 16 address, always override */
671 gen_op_movl_A0_reg
[R_ESI
]();
672 gen_op_andl_A0_ffff();
673 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
677 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
681 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
682 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
684 gen_op_movl_A0_reg
[R_EDI
]();
687 gen_op_movl_A0_reg
[R_EDI
]();
688 gen_op_andl_A0_ffff();
689 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
693 static GenOpFunc
*gen_op_movl_T0_Dshift
[3] = {
694 gen_op_movl_T0_Dshiftb
,
695 gen_op_movl_T0_Dshiftw
,
696 gen_op_movl_T0_Dshiftl
,
699 static GenOpFunc2
*gen_op_jz_ecx
[2] = {
704 static GenOpFunc1
*gen_op_jz_ecx_im
[2] = {
709 static GenOpFunc
*gen_op_dec_ECX
[2] = {
714 static GenOpFunc1
*gen_op_string_jnz_sub
[2][3] = {
716 gen_op_string_jnz_subb
,
717 gen_op_string_jnz_subw
,
718 gen_op_string_jnz_subl
,
721 gen_op_string_jz_subb
,
722 gen_op_string_jz_subw
,
723 gen_op_string_jz_subl
,
727 static GenOpFunc1
*gen_op_string_jnz_sub_im
[2][3] = {
729 gen_op_string_jnz_subb_im
,
730 gen_op_string_jnz_subw_im
,
731 gen_op_string_jnz_subl_im
,
734 gen_op_string_jz_subb_im
,
735 gen_op_string_jz_subw_im
,
736 gen_op_string_jz_subl_im
,
740 static GenOpFunc
*gen_op_in_DX_T0
[3] = {
746 static GenOpFunc
*gen_op_out_DX_T0
[3] = {
752 static GenOpFunc
*gen_op_in
[3] = {
758 static GenOpFunc
*gen_op_out
[3] = {
764 static GenOpFunc
*gen_check_io_T0
[3] = {
770 static GenOpFunc
*gen_check_io_DX
[3] = {
776 static void gen_check_io(DisasContext
*s
, int ot
, int use_dx
, int cur_eip
)
778 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
779 if (s
->cc_op
!= CC_OP_DYNAMIC
)
780 gen_op_set_cc_op(s
->cc_op
);
781 gen_op_jmp_im(cur_eip
);
783 gen_check_io_DX
[ot
]();
785 gen_check_io_T0
[ot
]();
789 static inline void gen_movs(DisasContext
*s
, int ot
)
791 gen_string_movl_A0_ESI(s
);
792 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
793 gen_string_movl_A0_EDI(s
);
794 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
795 gen_op_movl_T0_Dshift
[ot
]();
797 gen_op_addl_ESI_T0();
798 gen_op_addl_EDI_T0();
800 gen_op_addw_ESI_T0();
801 gen_op_addw_EDI_T0();
805 static inline void gen_update_cc_op(DisasContext
*s
)
807 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
808 gen_op_set_cc_op(s
->cc_op
);
809 s
->cc_op
= CC_OP_DYNAMIC
;
813 static inline void gen_jz_ecx_string(DisasContext
*s
, unsigned int next_eip
)
816 gen_op_jz_ecx
[s
->aflag
]((long)s
->tb
, next_eip
);
818 /* XXX: does not work with gdbstub "ice" single step - not a
820 gen_op_jz_ecx_im
[s
->aflag
](next_eip
);
824 static inline void gen_stos(DisasContext
*s
, int ot
)
826 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
827 gen_string_movl_A0_EDI(s
);
828 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
829 gen_op_movl_T0_Dshift
[ot
]();
831 gen_op_addl_EDI_T0();
833 gen_op_addw_EDI_T0();
837 static inline void gen_lods(DisasContext
*s
, int ot
)
839 gen_string_movl_A0_ESI(s
);
840 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
841 gen_op_mov_reg_T0
[ot
][R_EAX
]();
842 gen_op_movl_T0_Dshift
[ot
]();
844 gen_op_addl_ESI_T0();
846 gen_op_addw_ESI_T0();
850 static inline void gen_scas(DisasContext
*s
, int ot
)
852 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
853 gen_string_movl_A0_EDI(s
);
854 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
855 gen_op_cmpl_T0_T1_cc();
856 gen_op_movl_T0_Dshift
[ot
]();
858 gen_op_addl_EDI_T0();
860 gen_op_addw_EDI_T0();
864 static inline void gen_cmps(DisasContext
*s
, int ot
)
866 gen_string_movl_A0_ESI(s
);
867 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
868 gen_string_movl_A0_EDI(s
);
869 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
870 gen_op_cmpl_T0_T1_cc();
871 gen_op_movl_T0_Dshift
[ot
]();
873 gen_op_addl_ESI_T0();
874 gen_op_addl_EDI_T0();
876 gen_op_addw_ESI_T0();
877 gen_op_addw_EDI_T0();
881 static inline void gen_ins(DisasContext
*s
, int ot
)
883 gen_op_in_DX_T0
[ot
]();
884 gen_string_movl_A0_EDI(s
);
885 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
886 gen_op_movl_T0_Dshift
[ot
]();
888 gen_op_addl_EDI_T0();
890 gen_op_addw_EDI_T0();
894 static inline void gen_outs(DisasContext
*s
, int ot
)
896 gen_string_movl_A0_ESI(s
);
897 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
898 gen_op_out_DX_T0
[ot
]();
899 gen_op_movl_T0_Dshift
[ot
]();
901 gen_op_addl_ESI_T0();
903 gen_op_addw_ESI_T0();
907 /* same method as Valgrind : we generate jumps to current or next
909 #define GEN_REPZ(op) \
910 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
911 unsigned int cur_eip, unsigned int next_eip) \
913 gen_update_cc_op(s); \
914 gen_jz_ecx_string(s, next_eip); \
916 gen_op_dec_ECX[s->aflag](); \
917 /* a loop would cause two single step exceptions if ECX = 1 \
918 before rep string_insn */ \
920 gen_op_jz_ecx_im[s->aflag](next_eip); \
921 gen_jmp(s, cur_eip); \
924 #define GEN_REPZ2(op) \
925 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
926 unsigned int cur_eip, \
927 unsigned int next_eip, \
930 gen_update_cc_op(s); \
931 gen_jz_ecx_string(s, next_eip); \
933 gen_op_dec_ECX[s->aflag](); \
934 gen_op_set_cc_op(CC_OP_SUBB + ot); \
936 gen_op_string_jnz_sub_im[nz][ot](next_eip); \
938 gen_op_string_jnz_sub[nz][ot]((long)s->tb); \
940 gen_op_jz_ecx_im[s->aflag](next_eip); \
941 gen_jmp(s, cur_eip); \
963 static GenOpFunc3
*gen_jcc_sub
[3][8] = {
995 static GenOpFunc2
*gen_op_loop
[2][4] = {
1010 static GenOpFunc
*gen_setcc_slow
[8] = {
1021 static GenOpFunc
*gen_setcc_sub
[3][8] = {
1024 gen_op_setb_T0_subb
,
1025 gen_op_setz_T0_subb
,
1026 gen_op_setbe_T0_subb
,
1027 gen_op_sets_T0_subb
,
1029 gen_op_setl_T0_subb
,
1030 gen_op_setle_T0_subb
,
1034 gen_op_setb_T0_subw
,
1035 gen_op_setz_T0_subw
,
1036 gen_op_setbe_T0_subw
,
1037 gen_op_sets_T0_subw
,
1039 gen_op_setl_T0_subw
,
1040 gen_op_setle_T0_subw
,
1044 gen_op_setb_T0_subl
,
1045 gen_op_setz_T0_subl
,
1046 gen_op_setbe_T0_subl
,
1047 gen_op_sets_T0_subl
,
1049 gen_op_setl_T0_subl
,
1050 gen_op_setle_T0_subl
,
1054 static GenOpFunc
*gen_op_fp_arith_ST0_FT0
[8] = {
1055 gen_op_fadd_ST0_FT0
,
1056 gen_op_fmul_ST0_FT0
,
1057 gen_op_fcom_ST0_FT0
,
1058 gen_op_fcom_ST0_FT0
,
1059 gen_op_fsub_ST0_FT0
,
1060 gen_op_fsubr_ST0_FT0
,
1061 gen_op_fdiv_ST0_FT0
,
1062 gen_op_fdivr_ST0_FT0
,
1065 /* NOTE the exception in "r" op ordering */
1066 static GenOpFunc1
*gen_op_fp_arith_STN_ST0
[8] = {
1067 gen_op_fadd_STN_ST0
,
1068 gen_op_fmul_STN_ST0
,
1071 gen_op_fsubr_STN_ST0
,
1072 gen_op_fsub_STN_ST0
,
1073 gen_op_fdivr_STN_ST0
,
1074 gen_op_fdiv_STN_ST0
,
1077 /* if d == OR_TMP0, it means memory operand (address in A0) */
1078 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1080 GenOpFunc
*gen_update_cc
;
1083 gen_op_mov_TN_reg
[ot
][0][d
]();
1085 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1090 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1091 gen_op_set_cc_op(s1
->cc_op
);
1093 gen_op_arithc_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1094 gen_op_mov_reg_T0
[ot
][d
]();
1096 gen_op_arithc_mem_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1098 s1
->cc_op
= CC_OP_DYNAMIC
;
1101 gen_op_addl_T0_T1();
1102 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1103 gen_update_cc
= gen_op_update2_cc
;
1106 gen_op_subl_T0_T1();
1107 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1108 gen_update_cc
= gen_op_update2_cc
;
1114 gen_op_arith_T0_T1_cc
[op
]();
1115 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1116 gen_update_cc
= gen_op_update1_cc
;
1119 gen_op_cmpl_T0_T1_cc();
1120 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1121 gen_update_cc
= NULL
;
1124 if (op
!= OP_CMPL
) {
1126 gen_op_mov_reg_T0
[ot
][d
]();
1128 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1130 /* the flags update must happen after the memory write (precise
1131 exception support) */
1137 /* if d == OR_TMP0, it means memory operand (address in A0) */
1138 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1141 gen_op_mov_TN_reg
[ot
][0][d
]();
1143 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1144 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1145 gen_op_set_cc_op(s1
->cc_op
);
1148 s1
->cc_op
= CC_OP_INCB
+ ot
;
1151 s1
->cc_op
= CC_OP_DECB
+ ot
;
1154 gen_op_mov_reg_T0
[ot
][d
]();
1156 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1157 gen_op_update_inc_cc();
1160 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1163 gen_op_mov_TN_reg
[ot
][0][d
]();
1165 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1167 gen_op_mov_TN_reg
[ot
][1][s
]();
1168 /* for zero counts, flags are not updated, so must do it dynamically */
1169 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1170 gen_op_set_cc_op(s1
->cc_op
);
1173 gen_op_shift_T0_T1_cc
[ot
][op
]();
1175 gen_op_shift_mem_T0_T1_cc
[ot
][op
]();
1177 gen_op_mov_reg_T0
[ot
][d
]();
1178 s1
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1181 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1183 /* currently not optimized */
1184 gen_op_movl_T1_im(c
);
1185 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1188 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1195 int mod
, rm
, code
, override
, must_add_seg
;
1197 override
= s
->override
;
1198 must_add_seg
= s
->addseg
;
1201 mod
= (modrm
>> 6) & 3;
1213 code
= ldub_code(s
->pc
++);
1214 scale
= (code
>> 6) & 3;
1215 index
= (code
>> 3) & 7;
1223 disp
= ldl_code(s
->pc
);
1230 disp
= (int8_t)ldub_code(s
->pc
++);
1234 disp
= ldl_code(s
->pc
);
1240 /* for correct popl handling with esp */
1241 if (base
== 4 && s
->popl_esp_hack
)
1242 disp
+= s
->popl_esp_hack
;
1243 gen_op_movl_A0_reg
[base
]();
1245 gen_op_addl_A0_im(disp
);
1247 gen_op_movl_A0_im(disp
);
1249 /* XXX: index == 4 is always invalid */
1250 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1251 gen_op_addl_A0_reg_sN
[scale
][index
]();
1255 if (base
== R_EBP
|| base
== R_ESP
)
1260 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1266 disp
= lduw_code(s
->pc
);
1268 gen_op_movl_A0_im(disp
);
1269 rm
= 0; /* avoid SS override */
1276 disp
= (int8_t)ldub_code(s
->pc
++);
1280 disp
= lduw_code(s
->pc
);
1286 gen_op_movl_A0_reg
[R_EBX
]();
1287 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1290 gen_op_movl_A0_reg
[R_EBX
]();
1291 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1294 gen_op_movl_A0_reg
[R_EBP
]();
1295 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1298 gen_op_movl_A0_reg
[R_EBP
]();
1299 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1302 gen_op_movl_A0_reg
[R_ESI
]();
1305 gen_op_movl_A0_reg
[R_EDI
]();
1308 gen_op_movl_A0_reg
[R_EBP
]();
1312 gen_op_movl_A0_reg
[R_EBX
]();
1316 gen_op_addl_A0_im(disp
);
1317 gen_op_andl_A0_ffff();
1321 if (rm
== 2 || rm
== 3 || rm
== 6)
1326 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1336 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1338 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1340 int mod
, rm
, opreg
, disp
;
1342 mod
= (modrm
>> 6) & 3;
1347 gen_op_mov_TN_reg
[ot
][0][reg
]();
1348 gen_op_mov_reg_T0
[ot
][rm
]();
1350 gen_op_mov_TN_reg
[ot
][0][rm
]();
1352 gen_op_mov_reg_T0
[ot
][reg
]();
1355 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
1358 gen_op_mov_TN_reg
[ot
][0][reg
]();
1359 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1361 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1363 gen_op_mov_reg_T0
[ot
][reg
]();
1368 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
1374 ret
= ldub_code(s
->pc
);
1378 ret
= lduw_code(s
->pc
);
1383 ret
= ldl_code(s
->pc
);
1390 static inline void gen_jcc(DisasContext
*s
, int b
, int val
, int next_eip
)
1392 TranslationBlock
*tb
;
1397 jcc_op
= (b
>> 1) & 7;
1401 /* we optimize the cmp/jcc case */
1405 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1408 /* some jumps are easy to compute */
1435 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 3][jcc_op
];
1438 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 3][jcc_op
];
1450 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1451 gen_op_set_cc_op(s
->cc_op
);
1454 gen_setcc_slow
[jcc_op
]();
1460 func((long)tb
, val
, next_eip
);
1462 func((long)tb
, next_eip
, val
);
1466 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1467 gen_op_set_cc_op(s
->cc_op
);
1468 s
->cc_op
= CC_OP_DYNAMIC
;
1470 gen_setcc_slow
[jcc_op
]();
1472 gen_op_jcc_im(val
, next_eip
);
1474 gen_op_jcc_im(next_eip
, val
);
1480 static void gen_setcc(DisasContext
*s
, int b
)
1486 jcc_op
= (b
>> 1) & 7;
1488 /* we optimize the cmp/jcc case */
1492 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1497 /* some jumps are easy to compute */
1515 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 3][jcc_op
];
1518 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 3][jcc_op
];
1526 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1527 gen_op_set_cc_op(s
->cc_op
);
1528 func
= gen_setcc_slow
[jcc_op
];
1537 /* move T0 to seg_reg and compute if the CPU state may change. Never
1538 call this function with seg_reg == R_CS */
1539 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, unsigned int cur_eip
)
1541 if (s
->pe
&& !s
->vm86
)
1542 gen_op_movl_seg_T0(seg_reg
, cur_eip
);
1544 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
1545 /* abort translation because the register may have a non zero base
1546 or because ss32 may change. For R_SS, translation must always
1547 stop as a special handling must be done to disable hardware
1548 interrupts for the next instruction */
1549 if (seg_reg
== R_SS
|| (!s
->addseg
&& seg_reg
< R_FS
))
1553 /* generate a push. It depends on ss32, addseg and dflag */
1554 static void gen_push_T0(DisasContext
*s
)
1564 gen_op_pushl_ss32_T0();
1566 gen_op_pushw_ss32_T0();
1570 gen_op_pushl_ss16_T0();
1572 gen_op_pushw_ss16_T0();
1576 /* two step pop is necessary for precise exceptions */
1577 static void gen_pop_T0(DisasContext
*s
)
1587 gen_op_popl_ss32_T0();
1589 gen_op_popw_ss32_T0();
1593 gen_op_popl_ss16_T0();
1595 gen_op_popw_ss16_T0();
1599 static inline void gen_stack_update(DisasContext
*s
, int addend
)
1603 gen_op_addl_ESP_2();
1604 else if (addend
== 4)
1605 gen_op_addl_ESP_4();
1607 gen_op_addl_ESP_im(addend
);
1610 gen_op_addw_ESP_2();
1611 else if (addend
== 4)
1612 gen_op_addw_ESP_4();
1614 gen_op_addw_ESP_im(addend
);
1618 static void gen_pop_update(DisasContext
*s
)
1620 gen_stack_update(s
, 2 << s
->dflag
);
1623 static void gen_stack_A0(DisasContext
*s
)
1625 gen_op_movl_A0_ESP();
1627 gen_op_andl_A0_ffff();
1628 gen_op_movl_T1_A0();
1630 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
1633 /* NOTE: wrap around in 16 bit not fully handled */
1634 static void gen_pusha(DisasContext
*s
)
1637 gen_op_movl_A0_ESP();
1638 gen_op_addl_A0_im(-16 << s
->dflag
);
1640 gen_op_andl_A0_ffff();
1641 gen_op_movl_T1_A0();
1643 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
1644 for(i
= 0;i
< 8; i
++) {
1645 gen_op_mov_TN_reg
[OT_LONG
][0][7 - i
]();
1646 gen_op_st_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
1647 gen_op_addl_A0_im(2 << s
->dflag
);
1649 gen_op_mov_reg_T1
[OT_WORD
+ s
->dflag
][R_ESP
]();
1652 /* NOTE: wrap around in 16 bit not fully handled */
1653 static void gen_popa(DisasContext
*s
)
1656 gen_op_movl_A0_ESP();
1658 gen_op_andl_A0_ffff();
1659 gen_op_movl_T1_A0();
1660 gen_op_addl_T1_im(16 << s
->dflag
);
1662 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
1663 for(i
= 0;i
< 8; i
++) {
1664 /* ESP is not reloaded */
1666 gen_op_ld_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
1667 gen_op_mov_reg_T0
[OT_WORD
+ s
->dflag
][7 - i
]();
1669 gen_op_addl_A0_im(2 << s
->dflag
);
1671 gen_op_mov_reg_T1
[OT_WORD
+ s
->dflag
][R_ESP
]();
1674 /* NOTE: wrap around in 16 bit not fully handled */
1675 /* XXX: check this */
1676 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
1678 int ot
, level1
, addend
, opsize
;
1680 ot
= s
->dflag
+ OT_WORD
;
1683 opsize
= 2 << s
->dflag
;
1685 gen_op_movl_A0_ESP();
1686 gen_op_addl_A0_im(-opsize
);
1688 gen_op_andl_A0_ffff();
1689 gen_op_movl_T1_A0();
1691 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
1693 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
1694 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1697 gen_op_addl_A0_im(-opsize
);
1698 gen_op_addl_T0_im(-opsize
);
1699 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1701 gen_op_addl_A0_im(-opsize
);
1702 /* XXX: add st_T1_A0 ? */
1703 gen_op_movl_T0_T1();
1704 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1706 gen_op_mov_reg_T1
[ot
][R_EBP
]();
1707 addend
= -esp_addend
;
1709 addend
-= opsize
* (level1
+ 1);
1710 gen_op_addl_T1_im(addend
);
1711 gen_op_mov_reg_T1
[ot
][R_ESP
]();
1714 static void gen_exception(DisasContext
*s
, int trapno
, unsigned int cur_eip
)
1716 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1717 gen_op_set_cc_op(s
->cc_op
);
1718 gen_op_jmp_im(cur_eip
);
1719 gen_op_raise_exception(trapno
);
1723 /* an interrupt is different from an exception because of the
1724 priviledge checks */
1725 static void gen_interrupt(DisasContext
*s
, int intno
,
1726 unsigned int cur_eip
, unsigned int next_eip
)
1728 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1729 gen_op_set_cc_op(s
->cc_op
);
1730 gen_op_jmp_im(cur_eip
);
1731 gen_op_raise_interrupt(intno
, next_eip
);
1735 static void gen_debug(DisasContext
*s
, unsigned int cur_eip
)
1737 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1738 gen_op_set_cc_op(s
->cc_op
);
1739 gen_op_jmp_im(cur_eip
);
1744 /* generate a generic end of block. Trace exception is also generated
1746 static void gen_eob(DisasContext
*s
)
1748 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1749 gen_op_set_cc_op(s
->cc_op
);
1750 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
1751 gen_op_reset_inhibit_irq();
1753 if (s
->singlestep_enabled
) {
1756 gen_op_raise_exception(EXCP01_SSTP
);
1764 /* generate a jump to eip. No segment change must happen before as a
1765 direct call to the next block may occur */
1766 static void gen_jmp(DisasContext
*s
, unsigned int eip
)
1768 TranslationBlock
*tb
= s
->tb
;
1771 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1772 gen_op_set_cc_op(s
->cc_op
);
1773 gen_op_jmp((long)tb
, eip
);
1781 /* convert one instruction. s->is_jmp is set if the translation must
1782 be stopped. Return the next pc value */
1783 static uint8_t *disas_insn(DisasContext
*s
, uint8_t *pc_start
)
1785 int b
, prefixes
, aflag
, dflag
;
1787 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
1788 unsigned int next_eip
;
1796 b
= ldub_code(s
->pc
);
1798 /* check prefixes */
1801 prefixes
|= PREFIX_REPZ
;
1804 prefixes
|= PREFIX_REPNZ
;
1807 prefixes
|= PREFIX_LOCK
;
1828 prefixes
|= PREFIX_DATA
;
1831 prefixes
|= PREFIX_ADR
;
1835 if (prefixes
& PREFIX_DATA
)
1837 if (prefixes
& PREFIX_ADR
)
1840 s
->prefix
= prefixes
;
1844 /* lock generation */
1845 if (prefixes
& PREFIX_LOCK
)
1848 /* now check op code */
1852 /**************************/
1853 /* extended op code */
1854 b
= ldub_code(s
->pc
++) | 0x100;
1857 /**************************/
1875 ot
= dflag
? OT_LONG
: OT_WORD
;
1878 case 0: /* OP Ev, Gv */
1879 modrm
= ldub_code(s
->pc
++);
1880 reg
= ((modrm
>> 3) & 7);
1881 mod
= (modrm
>> 6) & 3;
1884 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
1886 } else if (op
== OP_XORL
&& rm
== reg
) {
1888 /* xor reg, reg optimisation */
1890 s
->cc_op
= CC_OP_LOGICB
+ ot
;
1891 gen_op_mov_reg_T0
[ot
][reg
]();
1892 gen_op_update1_cc();
1897 gen_op_mov_TN_reg
[ot
][1][reg
]();
1898 gen_op(s
, op
, ot
, opreg
);
1900 case 1: /* OP Gv, Ev */
1901 modrm
= ldub_code(s
->pc
++);
1902 mod
= (modrm
>> 6) & 3;
1903 reg
= ((modrm
>> 3) & 7);
1906 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
1907 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
1908 } else if (op
== OP_XORL
&& rm
== reg
) {
1911 gen_op_mov_TN_reg
[ot
][1][rm
]();
1913 gen_op(s
, op
, ot
, reg
);
1915 case 2: /* OP A, Iv */
1916 val
= insn_get(s
, ot
);
1917 gen_op_movl_T1_im(val
);
1918 gen_op(s
, op
, ot
, OR_EAX
);
1924 case 0x80: /* GRP1 */
1933 ot
= dflag
? OT_LONG
: OT_WORD
;
1935 modrm
= ldub_code(s
->pc
++);
1936 mod
= (modrm
>> 6) & 3;
1938 op
= (modrm
>> 3) & 7;
1941 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
1944 opreg
= rm
+ OR_EAX
;
1951 val
= insn_get(s
, ot
);
1954 val
= (int8_t)insn_get(s
, OT_BYTE
);
1957 gen_op_movl_T1_im(val
);
1958 gen_op(s
, op
, ot
, opreg
);
1962 /**************************/
1963 /* inc, dec, and other misc arith */
1964 case 0x40 ... 0x47: /* inc Gv */
1965 ot
= dflag
? OT_LONG
: OT_WORD
;
1966 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
1968 case 0x48 ... 0x4f: /* dec Gv */
1969 ot
= dflag
? OT_LONG
: OT_WORD
;
1970 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
1972 case 0xf6: /* GRP3 */
1977 ot
= dflag
? OT_LONG
: OT_WORD
;
1979 modrm
= ldub_code(s
->pc
++);
1980 mod
= (modrm
>> 6) & 3;
1982 op
= (modrm
>> 3) & 7;
1984 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
1985 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1987 gen_op_mov_TN_reg
[ot
][0][rm
]();
1992 val
= insn_get(s
, ot
);
1993 gen_op_movl_T1_im(val
);
1994 gen_op_testl_T0_T1_cc();
1995 s
->cc_op
= CC_OP_LOGICB
+ ot
;
2000 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2002 gen_op_mov_reg_T0
[ot
][rm
]();
2008 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2010 gen_op_mov_reg_T0
[ot
][rm
]();
2012 gen_op_update_neg_cc();
2013 s
->cc_op
= CC_OP_SUBB
+ ot
;
2018 gen_op_mulb_AL_T0();
2019 s
->cc_op
= CC_OP_MULB
;
2022 gen_op_mulw_AX_T0();
2023 s
->cc_op
= CC_OP_MULW
;
2027 gen_op_mull_EAX_T0();
2028 s
->cc_op
= CC_OP_MULL
;
2035 gen_op_imulb_AL_T0();
2036 s
->cc_op
= CC_OP_MULB
;
2039 gen_op_imulw_AX_T0();
2040 s
->cc_op
= CC_OP_MULW
;
2044 gen_op_imull_EAX_T0();
2045 s
->cc_op
= CC_OP_MULL
;
2052 gen_op_divb_AL_T0(pc_start
- s
->cs_base
);
2055 gen_op_divw_AX_T0(pc_start
- s
->cs_base
);
2059 gen_op_divl_EAX_T0(pc_start
- s
->cs_base
);
2066 gen_op_idivb_AL_T0(pc_start
- s
->cs_base
);
2069 gen_op_idivw_AX_T0(pc_start
- s
->cs_base
);
2073 gen_op_idivl_EAX_T0(pc_start
- s
->cs_base
);
2082 case 0xfe: /* GRP4 */
2083 case 0xff: /* GRP5 */
2087 ot
= dflag
? OT_LONG
: OT_WORD
;
2089 modrm
= ldub_code(s
->pc
++);
2090 mod
= (modrm
>> 6) & 3;
2092 op
= (modrm
>> 3) & 7;
2093 if (op
>= 2 && b
== 0xfe) {
2097 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2098 if (op
>= 2 && op
!= 3 && op
!= 5)
2099 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
2101 gen_op_mov_TN_reg
[ot
][0][rm
]();
2105 case 0: /* inc Ev */
2110 gen_inc(s
, ot
, opreg
, 1);
2112 case 1: /* dec Ev */
2117 gen_inc(s
, ot
, opreg
, -1);
2119 case 2: /* call Ev */
2120 /* XXX: optimize if memory (no and is necessary) */
2122 gen_op_andl_T0_ffff();
2124 next_eip
= s
->pc
- s
->cs_base
;
2125 gen_op_movl_T0_im(next_eip
);
2129 case 3: /* lcall Ev */
2130 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2131 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
2132 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
2134 if (s
->pe
&& !s
->vm86
) {
2135 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2136 gen_op_set_cc_op(s
->cc_op
);
2137 gen_op_jmp_im(pc_start
- s
->cs_base
);
2138 gen_op_lcall_protected_T0_T1(dflag
, s
->pc
- s
->cs_base
);
2140 gen_op_lcall_real_T0_T1(dflag
, s
->pc
- s
->cs_base
);
2144 case 4: /* jmp Ev */
2146 gen_op_andl_T0_ffff();
2150 case 5: /* ljmp Ev */
2151 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2152 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
2153 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
2155 if (s
->pe
&& !s
->vm86
) {
2156 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2157 gen_op_set_cc_op(s
->cc_op
);
2158 gen_op_jmp_im(pc_start
- s
->cs_base
);
2159 gen_op_ljmp_protected_T0_T1();
2161 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
2162 gen_op_movl_T0_T1();
2167 case 6: /* push Ev */
2175 case 0x84: /* test Ev, Gv */
2180 ot
= dflag
? OT_LONG
: OT_WORD
;
2182 modrm
= ldub_code(s
->pc
++);
2183 mod
= (modrm
>> 6) & 3;
2185 reg
= (modrm
>> 3) & 7;
2187 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
2188 gen_op_mov_TN_reg
[ot
][1][reg
+ OR_EAX
]();
2189 gen_op_testl_T0_T1_cc();
2190 s
->cc_op
= CC_OP_LOGICB
+ ot
;
2193 case 0xa8: /* test eAX, Iv */
2198 ot
= dflag
? OT_LONG
: OT_WORD
;
2199 val
= insn_get(s
, ot
);
2201 gen_op_mov_TN_reg
[ot
][0][OR_EAX
]();
2202 gen_op_movl_T1_im(val
);
2203 gen_op_testl_T0_T1_cc();
2204 s
->cc_op
= CC_OP_LOGICB
+ ot
;
2207 case 0x98: /* CWDE/CBW */
2209 gen_op_movswl_EAX_AX();
2211 gen_op_movsbw_AX_AL();
2213 case 0x99: /* CDQ/CWD */
2215 gen_op_movslq_EDX_EAX();
2217 gen_op_movswl_DX_AX();
2219 case 0x1af: /* imul Gv, Ev */
2220 case 0x69: /* imul Gv, Ev, I */
2222 ot
= dflag
? OT_LONG
: OT_WORD
;
2223 modrm
= ldub_code(s
->pc
++);
2224 reg
= ((modrm
>> 3) & 7) + OR_EAX
;
2225 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
2227 val
= insn_get(s
, ot
);
2228 gen_op_movl_T1_im(val
);
2229 } else if (b
== 0x6b) {
2230 val
= insn_get(s
, OT_BYTE
);
2231 gen_op_movl_T1_im(val
);
2233 gen_op_mov_TN_reg
[ot
][1][reg
]();
2236 if (ot
== OT_LONG
) {
2237 gen_op_imull_T0_T1();
2239 gen_op_imulw_T0_T1();
2241 gen_op_mov_reg_T0
[ot
][reg
]();
2242 s
->cc_op
= CC_OP_MULB
+ ot
;
2245 case 0x1c1: /* xadd Ev, Gv */
2249 ot
= dflag
? OT_LONG
: OT_WORD
;
2250 modrm
= ldub_code(s
->pc
++);
2251 reg
= (modrm
>> 3) & 7;
2252 mod
= (modrm
>> 6) & 3;
2255 gen_op_mov_TN_reg
[ot
][0][reg
]();
2256 gen_op_mov_TN_reg
[ot
][1][rm
]();
2257 gen_op_addl_T0_T1();
2258 gen_op_mov_reg_T0
[ot
][rm
]();
2259 gen_op_mov_reg_T1
[ot
][reg
]();
2261 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2262 gen_op_mov_TN_reg
[ot
][0][reg
]();
2263 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2264 gen_op_addl_T0_T1();
2265 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2266 gen_op_mov_reg_T1
[ot
][reg
]();
2268 gen_op_update2_cc();
2269 s
->cc_op
= CC_OP_ADDB
+ ot
;
2272 case 0x1b1: /* cmpxchg Ev, Gv */
2276 ot
= dflag
? OT_LONG
: OT_WORD
;
2277 modrm
= ldub_code(s
->pc
++);
2278 reg
= (modrm
>> 3) & 7;
2279 mod
= (modrm
>> 6) & 3;
2280 gen_op_mov_TN_reg
[ot
][1][reg
]();
2283 gen_op_mov_TN_reg
[ot
][0][rm
]();
2284 gen_op_cmpxchg_T0_T1_EAX_cc
[ot
]();
2285 gen_op_mov_reg_T0
[ot
][rm
]();
2287 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2288 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
2289 gen_op_cmpxchg_mem_T0_T1_EAX_cc
[ot
]();
2291 s
->cc_op
= CC_OP_SUBB
+ ot
;
2293 case 0x1c7: /* cmpxchg8b */
2294 modrm
= ldub_code(s
->pc
++);
2295 mod
= (modrm
>> 6) & 3;
2298 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2299 gen_op_set_cc_op(s
->cc_op
);
2300 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2302 s
->cc_op
= CC_OP_EFLAGS
;
2305 /**************************/
2307 case 0x50 ... 0x57: /* push */
2308 gen_op_mov_TN_reg
[OT_LONG
][0][b
& 7]();
2311 case 0x58 ... 0x5f: /* pop */
2312 ot
= dflag
? OT_LONG
: OT_WORD
;
2314 /* NOTE: order is important for pop %sp */
2316 gen_op_mov_reg_T0
[ot
][b
& 7]();
2318 case 0x60: /* pusha */
2321 case 0x61: /* popa */
2324 case 0x68: /* push Iv */
2326 ot
= dflag
? OT_LONG
: OT_WORD
;
2328 val
= insn_get(s
, ot
);
2330 val
= (int8_t)insn_get(s
, OT_BYTE
);
2331 gen_op_movl_T0_im(val
);
2334 case 0x8f: /* pop Ev */
2335 ot
= dflag
? OT_LONG
: OT_WORD
;
2336 modrm
= ldub_code(s
->pc
++);
2337 mod
= (modrm
>> 6) & 3;
2340 /* NOTE: order is important for pop %sp */
2343 gen_op_mov_reg_T0
[ot
][rm
]();
2345 /* NOTE: order is important too for MMU exceptions */
2346 s
->popl_esp_hack
= 2 << dflag
;
2347 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
2348 s
->popl_esp_hack
= 0;
2352 case 0xc8: /* enter */
2355 val
= lduw_code(s
->pc
);
2357 level
= ldub_code(s
->pc
++);
2358 gen_enter(s
, val
, level
);
2361 case 0xc9: /* leave */
2362 /* XXX: exception not precise (ESP is updated before potential exception) */
2364 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
2365 gen_op_mov_reg_T0
[OT_LONG
][R_ESP
]();
2367 gen_op_mov_TN_reg
[OT_WORD
][0][R_EBP
]();
2368 gen_op_mov_reg_T0
[OT_WORD
][R_ESP
]();
2371 ot
= dflag
? OT_LONG
: OT_WORD
;
2372 gen_op_mov_reg_T0
[ot
][R_EBP
]();
2375 case 0x06: /* push es */
2376 case 0x0e: /* push cs */
2377 case 0x16: /* push ss */
2378 case 0x1e: /* push ds */
2379 gen_op_movl_T0_seg(b
>> 3);
2382 case 0x1a0: /* push fs */
2383 case 0x1a8: /* push gs */
2384 gen_op_movl_T0_seg((b
>> 3) & 7);
2387 case 0x07: /* pop es */
2388 case 0x17: /* pop ss */
2389 case 0x1f: /* pop ds */
2392 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
2395 /* if reg == SS, inhibit interrupts/trace. */
2396 /* If several instructions disable interrupts, only the
2398 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
2399 gen_op_set_inhibit_irq();
2403 gen_op_jmp_im(s
->pc
- s
->cs_base
);
2407 case 0x1a1: /* pop fs */
2408 case 0x1a9: /* pop gs */
2410 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
2413 gen_op_jmp_im(s
->pc
- s
->cs_base
);
2418 /**************************/
2421 case 0x89: /* mov Gv, Ev */
2425 ot
= dflag
? OT_LONG
: OT_WORD
;
2426 modrm
= ldub_code(s
->pc
++);
2427 reg
= (modrm
>> 3) & 7;
2429 /* generate a generic store */
2430 gen_ldst_modrm(s
, modrm
, ot
, OR_EAX
+ reg
, 1);
2433 case 0xc7: /* mov Ev, Iv */
2437 ot
= dflag
? OT_LONG
: OT_WORD
;
2438 modrm
= ldub_code(s
->pc
++);
2439 mod
= (modrm
>> 6) & 3;
2441 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2442 val
= insn_get(s
, ot
);
2443 gen_op_movl_T0_im(val
);
2445 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2447 gen_op_mov_reg_T0
[ot
][modrm
& 7]();
2450 case 0x8b: /* mov Ev, Gv */
2454 ot
= dflag
? OT_LONG
: OT_WORD
;
2455 modrm
= ldub_code(s
->pc
++);
2456 reg
= (modrm
>> 3) & 7;
2458 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
2459 gen_op_mov_reg_T0
[ot
][reg
]();
2461 case 0x8e: /* mov seg, Gv */
2462 modrm
= ldub_code(s
->pc
++);
2463 reg
= (modrm
>> 3) & 7;
2464 if (reg
>= 6 || reg
== R_CS
)
2466 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
2467 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
2469 /* if reg == SS, inhibit interrupts/trace */
2470 /* If several instructions disable interrupts, only the
2472 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
2473 gen_op_set_inhibit_irq();
2477 gen_op_jmp_im(s
->pc
- s
->cs_base
);
2481 case 0x8c: /* mov Gv, seg */
2482 modrm
= ldub_code(s
->pc
++);
2483 reg
= (modrm
>> 3) & 7;
2484 mod
= (modrm
>> 6) & 3;
2487 gen_op_movl_T0_seg(reg
);
2489 if (mod
== 3 && dflag
)
2491 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
2494 case 0x1b6: /* movzbS Gv, Eb */
2495 case 0x1b7: /* movzwS Gv, Eb */
2496 case 0x1be: /* movsbS Gv, Eb */
2497 case 0x1bf: /* movswS Gv, Eb */
2500 /* d_ot is the size of destination */
2501 d_ot
= dflag
+ OT_WORD
;
2502 /* ot is the size of source */
2503 ot
= (b
& 1) + OT_BYTE
;
2504 modrm
= ldub_code(s
->pc
++);
2505 reg
= ((modrm
>> 3) & 7) + OR_EAX
;
2506 mod
= (modrm
>> 6) & 3;
2510 gen_op_mov_TN_reg
[ot
][0][rm
]();
2511 switch(ot
| (b
& 8)) {
2513 gen_op_movzbl_T0_T0();
2516 gen_op_movsbl_T0_T0();
2519 gen_op_movzwl_T0_T0();
2523 gen_op_movswl_T0_T0();
2526 gen_op_mov_reg_T0
[d_ot
][reg
]();
2528 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2530 gen_op_lds_T0_A0
[ot
+ s
->mem_index
]();
2532 gen_op_ldu_T0_A0
[ot
+ s
->mem_index
]();
2534 gen_op_mov_reg_T0
[d_ot
][reg
]();
2539 case 0x8d: /* lea */
2540 ot
= dflag
? OT_LONG
: OT_WORD
;
2541 modrm
= ldub_code(s
->pc
++);
2542 reg
= (modrm
>> 3) & 7;
2543 /* we must ensure that no segment is added */
2547 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2549 gen_op_mov_reg_A0
[ot
- OT_WORD
][reg
]();
2552 case 0xa0: /* mov EAX, Ov */
2554 case 0xa2: /* mov Ov, EAX */
2559 ot
= dflag
? OT_LONG
: OT_WORD
;
2561 offset_addr
= insn_get(s
, OT_LONG
);
2563 offset_addr
= insn_get(s
, OT_WORD
);
2564 gen_op_movl_A0_im(offset_addr
);
2565 /* handle override */
2567 int override
, must_add_seg
;
2568 must_add_seg
= s
->addseg
;
2569 if (s
->override
>= 0) {
2570 override
= s
->override
;
2576 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
2580 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
2581 gen_op_mov_reg_T0
[ot
][R_EAX
]();
2583 gen_op_mov_TN_reg
[ot
][0][R_EAX
]();
2584 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2587 case 0xd7: /* xlat */
2588 gen_op_movl_A0_reg
[R_EBX
]();
2589 gen_op_addl_A0_AL();
2591 gen_op_andl_A0_ffff();
2592 /* handle override */
2594 int override
, must_add_seg
;
2595 must_add_seg
= s
->addseg
;
2597 if (s
->override
>= 0) {
2598 override
= s
->override
;
2604 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
2607 gen_op_ldu_T0_A0
[OT_BYTE
+ s
->mem_index
]();
2608 gen_op_mov_reg_T0
[OT_BYTE
][R_EAX
]();
2610 case 0xb0 ... 0xb7: /* mov R, Ib */
2611 val
= insn_get(s
, OT_BYTE
);
2612 gen_op_movl_T0_im(val
);
2613 gen_op_mov_reg_T0
[OT_BYTE
][b
& 7]();
2615 case 0xb8 ... 0xbf: /* mov R, Iv */
2616 ot
= dflag
? OT_LONG
: OT_WORD
;
2617 val
= insn_get(s
, ot
);
2618 reg
= OR_EAX
+ (b
& 7);
2619 gen_op_movl_T0_im(val
);
2620 gen_op_mov_reg_T0
[ot
][reg
]();
2623 case 0x91 ... 0x97: /* xchg R, EAX */
2624 ot
= dflag
? OT_LONG
: OT_WORD
;
2629 case 0x87: /* xchg Ev, Gv */
2633 ot
= dflag
? OT_LONG
: OT_WORD
;
2634 modrm
= ldub_code(s
->pc
++);
2635 reg
= (modrm
>> 3) & 7;
2636 mod
= (modrm
>> 6) & 3;
2640 gen_op_mov_TN_reg
[ot
][0][reg
]();
2641 gen_op_mov_TN_reg
[ot
][1][rm
]();
2642 gen_op_mov_reg_T0
[ot
][rm
]();
2643 gen_op_mov_reg_T1
[ot
][reg
]();
2645 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2646 gen_op_mov_TN_reg
[ot
][0][reg
]();
2647 /* for xchg, lock is implicit */
2648 if (!(prefixes
& PREFIX_LOCK
))
2650 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2651 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2652 if (!(prefixes
& PREFIX_LOCK
))
2654 gen_op_mov_reg_T1
[ot
][reg
]();
2657 case 0xc4: /* les Gv */
2660 case 0xc5: /* lds Gv */
2663 case 0x1b2: /* lss Gv */
2666 case 0x1b4: /* lfs Gv */
2669 case 0x1b5: /* lgs Gv */
2672 ot
= dflag
? OT_LONG
: OT_WORD
;
2673 modrm
= ldub_code(s
->pc
++);
2674 reg
= (modrm
>> 3) & 7;
2675 mod
= (modrm
>> 6) & 3;
2678 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2679 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2680 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
2681 /* load the segment first to handle exceptions properly */
2682 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
2683 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
2684 /* then put the data */
2685 gen_op_mov_reg_T1
[ot
][reg
]();
2687 gen_op_jmp_im(s
->pc
- s
->cs_base
);
2692 /************************/
2703 ot
= dflag
? OT_LONG
: OT_WORD
;
2705 modrm
= ldub_code(s
->pc
++);
2706 mod
= (modrm
>> 6) & 3;
2708 op
= (modrm
>> 3) & 7;
2711 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2714 opreg
= rm
+ OR_EAX
;
2719 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
2722 shift
= ldub_code(s
->pc
++);
2724 gen_shifti(s
, op
, ot
, opreg
, shift
);
2739 case 0x1a4: /* shld imm */
2743 case 0x1a5: /* shld cl */
2747 case 0x1ac: /* shrd imm */
2751 case 0x1ad: /* shrd cl */
2755 ot
= dflag
? OT_LONG
: OT_WORD
;
2756 modrm
= ldub_code(s
->pc
++);
2757 mod
= (modrm
>> 6) & 3;
2759 reg
= (modrm
>> 3) & 7;
2762 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2763 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
2765 gen_op_mov_TN_reg
[ot
][0][rm
]();
2767 gen_op_mov_TN_reg
[ot
][1][reg
]();
2770 val
= ldub_code(s
->pc
++);
2774 gen_op_shiftd_T0_T1_im_cc
[ot
- OT_WORD
][op
](val
);
2776 gen_op_shiftd_mem_T0_T1_im_cc
[ot
- OT_WORD
][op
](val
);
2777 if (op
== 0 && ot
!= OT_WORD
)
2778 s
->cc_op
= CC_OP_SHLB
+ ot
;
2780 s
->cc_op
= CC_OP_SARB
+ ot
;
2783 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2784 gen_op_set_cc_op(s
->cc_op
);
2786 gen_op_shiftd_T0_T1_ECX_cc
[ot
- OT_WORD
][op
]();
2788 gen_op_shiftd_mem_T0_T1_ECX_cc
[ot
- OT_WORD
][op
]();
2789 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
2792 gen_op_mov_reg_T0
[ot
][rm
]();
2796 /************************/
2799 modrm
= ldub_code(s
->pc
++);
2800 mod
= (modrm
>> 6) & 3;
2802 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
2806 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2808 case 0x00 ... 0x07: /* fxxxs */
2809 case 0x10 ... 0x17: /* fixxxl */
2810 case 0x20 ... 0x27: /* fxxxl */
2811 case 0x30 ... 0x37: /* fixxx */
2818 gen_op_flds_FT0_A0();
2821 gen_op_fildl_FT0_A0();
2824 gen_op_fldl_FT0_A0();
2828 gen_op_fild_FT0_A0();
2832 gen_op_fp_arith_ST0_FT0
[op1
]();
2834 /* fcomp needs pop */
2839 case 0x08: /* flds */
2840 case 0x0a: /* fsts */
2841 case 0x0b: /* fstps */
2842 case 0x18: /* fildl */
2843 case 0x1a: /* fistl */
2844 case 0x1b: /* fistpl */
2845 case 0x28: /* fldl */
2846 case 0x2a: /* fstl */
2847 case 0x2b: /* fstpl */
2848 case 0x38: /* filds */
2849 case 0x3a: /* fists */
2850 case 0x3b: /* fistps */
2856 gen_op_flds_ST0_A0();
2859 gen_op_fildl_ST0_A0();
2862 gen_op_fldl_ST0_A0();
2866 gen_op_fild_ST0_A0();
2873 gen_op_fsts_ST0_A0();
2876 gen_op_fistl_ST0_A0();
2879 gen_op_fstl_ST0_A0();
2883 gen_op_fist_ST0_A0();
2891 case 0x0c: /* fldenv mem */
2892 gen_op_fldenv_A0(s
->dflag
);
2894 case 0x0d: /* fldcw mem */
2897 case 0x0e: /* fnstenv mem */
2898 gen_op_fnstenv_A0(s
->dflag
);
2900 case 0x0f: /* fnstcw mem */
2903 case 0x1d: /* fldt mem */
2904 gen_op_fldt_ST0_A0();
2906 case 0x1f: /* fstpt mem */
2907 gen_op_fstt_ST0_A0();
2910 case 0x2c: /* frstor mem */
2911 gen_op_frstor_A0(s
->dflag
);
2913 case 0x2e: /* fnsave mem */
2914 gen_op_fnsave_A0(s
->dflag
);
2916 case 0x2f: /* fnstsw mem */
2919 case 0x3c: /* fbld */
2920 gen_op_fbld_ST0_A0();
2922 case 0x3e: /* fbstp */
2923 gen_op_fbst_ST0_A0();
2926 case 0x3d: /* fildll */
2927 gen_op_fildll_ST0_A0();
2929 case 0x3f: /* fistpll */
2930 gen_op_fistll_ST0_A0();
2937 /* register float ops */
2941 case 0x08: /* fld sti */
2943 gen_op_fmov_ST0_STN((opreg
+ 1) & 7);
2945 case 0x09: /* fxchg sti */
2946 gen_op_fxchg_ST0_STN(opreg
);
2948 case 0x0a: /* grp d9/2 */
2956 case 0x0c: /* grp d9/4 */
2966 gen_op_fcom_ST0_FT0();
2975 case 0x0d: /* grp d9/5 */
2984 gen_op_fldl2t_ST0();
2988 gen_op_fldl2e_ST0();
2996 gen_op_fldlg2_ST0();
3000 gen_op_fldln2_ST0();
3011 case 0x0e: /* grp d9/6 */
3022 case 3: /* fpatan */
3025 case 4: /* fxtract */
3028 case 5: /* fprem1 */
3031 case 6: /* fdecstp */
3035 case 7: /* fincstp */
3040 case 0x0f: /* grp d9/7 */
3045 case 1: /* fyl2xp1 */
3051 case 3: /* fsincos */
3054 case 5: /* fscale */
3057 case 4: /* frndint */
3069 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
3070 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
3071 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
3077 gen_op_fp_arith_STN_ST0
[op1
](opreg
);
3081 gen_op_fmov_FT0_STN(opreg
);
3082 gen_op_fp_arith_ST0_FT0
[op1
]();
3086 case 0x02: /* fcom */
3087 gen_op_fmov_FT0_STN(opreg
);
3088 gen_op_fcom_ST0_FT0();
3090 case 0x03: /* fcomp */
3091 gen_op_fmov_FT0_STN(opreg
);
3092 gen_op_fcom_ST0_FT0();
3095 case 0x15: /* da/5 */
3097 case 1: /* fucompp */
3098 gen_op_fmov_FT0_STN(1);
3099 gen_op_fucom_ST0_FT0();
3109 case 0: /* feni (287 only, just do nop here) */
3111 case 1: /* fdisi (287 only, just do nop here) */
3116 case 3: /* fninit */
3119 case 4: /* fsetpm (287 only, just do nop here) */
3125 case 0x1d: /* fucomi */
3126 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3127 gen_op_set_cc_op(s
->cc_op
);
3128 gen_op_fmov_FT0_STN(opreg
);
3129 gen_op_fucomi_ST0_FT0();
3130 s
->cc_op
= CC_OP_EFLAGS
;
3132 case 0x1e: /* fcomi */
3133 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3134 gen_op_set_cc_op(s
->cc_op
);
3135 gen_op_fmov_FT0_STN(opreg
);
3136 gen_op_fcomi_ST0_FT0();
3137 s
->cc_op
= CC_OP_EFLAGS
;
3139 case 0x2a: /* fst sti */
3140 gen_op_fmov_STN_ST0(opreg
);
3142 case 0x2b: /* fstp sti */
3143 gen_op_fmov_STN_ST0(opreg
);
3146 case 0x2c: /* fucom st(i) */
3147 gen_op_fmov_FT0_STN(opreg
);
3148 gen_op_fucom_ST0_FT0();
3150 case 0x2d: /* fucomp st(i) */
3151 gen_op_fmov_FT0_STN(opreg
);
3152 gen_op_fucom_ST0_FT0();
3155 case 0x33: /* de/3 */
3157 case 1: /* fcompp */
3158 gen_op_fmov_FT0_STN(1);
3159 gen_op_fcom_ST0_FT0();
3167 case 0x3c: /* df/4 */
3170 gen_op_fnstsw_EAX();
3176 case 0x3d: /* fucomip */
3177 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3178 gen_op_set_cc_op(s
->cc_op
);
3179 gen_op_fmov_FT0_STN(opreg
);
3180 gen_op_fucomi_ST0_FT0();
3182 s
->cc_op
= CC_OP_EFLAGS
;
3184 case 0x3e: /* fcomip */
3185 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3186 gen_op_set_cc_op(s
->cc_op
);
3187 gen_op_fmov_FT0_STN(opreg
);
3188 gen_op_fcomi_ST0_FT0();
3190 s
->cc_op
= CC_OP_EFLAGS
;
3192 case 0x10 ... 0x13: /* fcmovxx */
3196 const static uint8_t fcmov_cc
[8] = {
3202 op1
= fcmov_cc
[op
& 3] | ((op
>> 3) & 1);
3204 gen_op_fcmov_ST0_STN_T0(opreg
);
3212 /************************/
3215 case 0xa4: /* movsS */
3220 ot
= dflag
? OT_LONG
: OT_WORD
;
3222 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3223 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3229 case 0xaa: /* stosS */
3234 ot
= dflag
? OT_LONG
: OT_WORD
;
3236 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3237 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3242 case 0xac: /* lodsS */
3247 ot
= dflag
? OT_LONG
: OT_WORD
;
3248 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3249 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3254 case 0xae: /* scasS */
3259 ot
= dflag
? OT_LONG
: OT_WORD
;
3260 if (prefixes
& PREFIX_REPNZ
) {
3261 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
3262 } else if (prefixes
& PREFIX_REPZ
) {
3263 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
3266 s
->cc_op
= CC_OP_SUBB
+ ot
;
3270 case 0xa6: /* cmpsS */
3275 ot
= dflag
? OT_LONG
: OT_WORD
;
3276 if (prefixes
& PREFIX_REPNZ
) {
3277 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
3278 } else if (prefixes
& PREFIX_REPZ
) {
3279 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
3282 s
->cc_op
= CC_OP_SUBB
+ ot
;
3285 case 0x6c: /* insS */
3290 ot
= dflag
? OT_LONG
: OT_WORD
;
3291 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
3292 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3293 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3298 case 0x6e: /* outsS */
3303 ot
= dflag
? OT_LONG
: OT_WORD
;
3304 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
3305 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3306 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3312 /************************/
3319 ot
= dflag
? OT_LONG
: OT_WORD
;
3320 val
= ldub_code(s
->pc
++);
3321 gen_op_movl_T0_im(val
);
3322 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
3324 gen_op_mov_reg_T1
[ot
][R_EAX
]();
3331 ot
= dflag
? OT_LONG
: OT_WORD
;
3332 val
= ldub_code(s
->pc
++);
3333 gen_op_movl_T0_im(val
);
3334 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
3335 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
3343 ot
= dflag
? OT_LONG
: OT_WORD
;
3344 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
3345 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
3347 gen_op_mov_reg_T1
[ot
][R_EAX
]();
3354 ot
= dflag
? OT_LONG
: OT_WORD
;
3355 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
3356 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
3357 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
3361 /************************/
3363 case 0xc2: /* ret im */
3364 val
= ldsw_code(s
->pc
);
3367 gen_stack_update(s
, val
+ (2 << s
->dflag
));
3369 gen_op_andl_T0_ffff();
3373 case 0xc3: /* ret */
3377 gen_op_andl_T0_ffff();
3381 case 0xca: /* lret im */
3382 val
= ldsw_code(s
->pc
);
3385 if (s
->pe
&& !s
->vm86
) {
3386 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3387 gen_op_set_cc_op(s
->cc_op
);
3388 gen_op_jmp_im(pc_start
- s
->cs_base
);
3389 gen_op_lret_protected(s
->dflag
, val
);
3393 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
3395 gen_op_andl_T0_ffff();
3396 /* NOTE: keeping EIP updated is not a problem in case of
3400 gen_op_addl_A0_im(2 << s
->dflag
);
3401 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
3402 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
3403 /* add stack offset */
3404 gen_stack_update(s
, val
+ (4 << s
->dflag
));
3408 case 0xcb: /* lret */
3411 case 0xcf: /* iret */
3414 gen_op_iret_real(s
->dflag
);
3415 s
->cc_op
= CC_OP_EFLAGS
;
3416 } else if (s
->vm86
) {
3418 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3420 gen_op_iret_real(s
->dflag
);
3421 s
->cc_op
= CC_OP_EFLAGS
;
3424 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3425 gen_op_set_cc_op(s
->cc_op
);
3426 gen_op_jmp_im(pc_start
- s
->cs_base
);
3427 gen_op_iret_protected(s
->dflag
);
3428 s
->cc_op
= CC_OP_EFLAGS
;
3432 case 0xe8: /* call im */
3434 unsigned int next_eip
;
3435 ot
= dflag
? OT_LONG
: OT_WORD
;
3436 val
= insn_get(s
, ot
);
3437 next_eip
= s
->pc
- s
->cs_base
;
3441 gen_op_movl_T0_im(next_eip
);
3446 case 0x9a: /* lcall im */
3448 unsigned int selector
, offset
;
3450 ot
= dflag
? OT_LONG
: OT_WORD
;
3451 offset
= insn_get(s
, ot
);
3452 selector
= insn_get(s
, OT_WORD
);
3454 gen_op_movl_T0_im(selector
);
3455 gen_op_movl_T1_im(offset
);
3458 case 0xe9: /* jmp */
3459 ot
= dflag
? OT_LONG
: OT_WORD
;
3460 val
= insn_get(s
, ot
);
3461 val
+= s
->pc
- s
->cs_base
;
3466 case 0xea: /* ljmp im */
3468 unsigned int selector
, offset
;
3470 ot
= dflag
? OT_LONG
: OT_WORD
;
3471 offset
= insn_get(s
, ot
);
3472 selector
= insn_get(s
, OT_WORD
);
3474 gen_op_movl_T0_im(selector
);
3475 gen_op_movl_T1_im(offset
);
3478 case 0xeb: /* jmp Jb */
3479 val
= (int8_t)insn_get(s
, OT_BYTE
);
3480 val
+= s
->pc
- s
->cs_base
;
3485 case 0x70 ... 0x7f: /* jcc Jb */
3486 val
= (int8_t)insn_get(s
, OT_BYTE
);
3488 case 0x180 ... 0x18f: /* jcc Jv */
3490 val
= insn_get(s
, OT_LONG
);
3492 val
= (int16_t)insn_get(s
, OT_WORD
);
3495 next_eip
= s
->pc
- s
->cs_base
;
3499 gen_jcc(s
, b
, val
, next_eip
);
3502 case 0x190 ... 0x19f: /* setcc Gv */
3503 modrm
= ldub_code(s
->pc
++);
3505 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
3507 case 0x140 ... 0x14f: /* cmov Gv, Ev */
3508 ot
= dflag
? OT_LONG
: OT_WORD
;
3509 modrm
= ldub_code(s
->pc
++);
3510 reg
= (modrm
>> 3) & 7;
3511 mod
= (modrm
>> 6) & 3;
3514 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3515 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3518 gen_op_mov_TN_reg
[ot
][1][rm
]();
3520 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
3523 /************************/
3525 case 0x9c: /* pushf */
3526 if (s
->vm86
&& s
->iopl
!= 3) {
3527 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3529 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3530 gen_op_set_cc_op(s
->cc_op
);
3531 gen_op_movl_T0_eflags();
3535 case 0x9d: /* popf */
3536 if (s
->vm86
&& s
->iopl
!= 3) {
3537 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3542 gen_op_movl_eflags_T0_cpl0();
3544 gen_op_movw_eflags_T0_cpl0();
3547 if (s
->cpl
<= s
->iopl
) {
3549 gen_op_movl_eflags_T0_io();
3551 gen_op_movw_eflags_T0_io();
3555 gen_op_movl_eflags_T0();
3557 gen_op_movw_eflags_T0();
3562 s
->cc_op
= CC_OP_EFLAGS
;
3563 /* abort translation because TF flag may change */
3564 gen_op_jmp_im(s
->pc
- s
->cs_base
);
3568 case 0x9e: /* sahf */
3569 gen_op_mov_TN_reg
[OT_BYTE
][0][R_AH
]();
3570 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3571 gen_op_set_cc_op(s
->cc_op
);
3572 gen_op_movb_eflags_T0();
3573 s
->cc_op
= CC_OP_EFLAGS
;
3575 case 0x9f: /* lahf */
3576 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3577 gen_op_set_cc_op(s
->cc_op
);
3578 gen_op_movl_T0_eflags();
3579 gen_op_mov_reg_T0
[OT_BYTE
][R_AH
]();
3581 case 0xf5: /* cmc */
3582 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3583 gen_op_set_cc_op(s
->cc_op
);
3585 s
->cc_op
= CC_OP_EFLAGS
;
3587 case 0xf8: /* clc */
3588 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3589 gen_op_set_cc_op(s
->cc_op
);
3591 s
->cc_op
= CC_OP_EFLAGS
;
3593 case 0xf9: /* stc */
3594 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3595 gen_op_set_cc_op(s
->cc_op
);
3597 s
->cc_op
= CC_OP_EFLAGS
;
3599 case 0xfc: /* cld */
3602 case 0xfd: /* std */
3606 /************************/
3607 /* bit operations */
3608 case 0x1ba: /* bt/bts/btr/btc Gv, im */
3609 ot
= dflag
? OT_LONG
: OT_WORD
;
3610 modrm
= ldub_code(s
->pc
++);
3611 op
= (modrm
>> 3) & 7;
3612 mod
= (modrm
>> 6) & 3;
3615 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3616 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3618 gen_op_mov_TN_reg
[ot
][0][rm
]();
3621 val
= ldub_code(s
->pc
++);
3622 gen_op_movl_T1_im(val
);
3626 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
3627 s
->cc_op
= CC_OP_SARB
+ ot
;
3630 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3632 gen_op_mov_reg_T0
[ot
][rm
]();
3633 gen_op_update_bt_cc();
3636 case 0x1a3: /* bt Gv, Ev */
3639 case 0x1ab: /* bts */
3642 case 0x1b3: /* btr */
3645 case 0x1bb: /* btc */
3648 ot
= dflag
? OT_LONG
: OT_WORD
;
3649 modrm
= ldub_code(s
->pc
++);
3650 reg
= (modrm
>> 3) & 7;
3651 mod
= (modrm
>> 6) & 3;
3653 gen_op_mov_TN_reg
[OT_LONG
][1][reg
]();
3655 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3656 /* specific case: we need to add a displacement */
3658 gen_op_add_bitw_A0_T1();
3660 gen_op_add_bitl_A0_T1();
3661 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3663 gen_op_mov_TN_reg
[ot
][0][rm
]();
3665 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
3666 s
->cc_op
= CC_OP_SARB
+ ot
;
3669 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3671 gen_op_mov_reg_T0
[ot
][rm
]();
3672 gen_op_update_bt_cc();
3675 case 0x1bc: /* bsf */
3676 case 0x1bd: /* bsr */
3677 ot
= dflag
? OT_LONG
: OT_WORD
;
3678 modrm
= ldub_code(s
->pc
++);
3679 reg
= (modrm
>> 3) & 7;
3680 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3681 gen_op_bsx_T0_cc
[ot
- OT_WORD
][b
& 1]();
3682 /* NOTE: we always write back the result. Intel doc says it is
3683 undefined if T0 == 0 */
3684 gen_op_mov_reg_T0
[ot
][reg
]();
3685 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3687 /************************/
3689 case 0x27: /* daa */
3690 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3691 gen_op_set_cc_op(s
->cc_op
);
3693 s
->cc_op
= CC_OP_EFLAGS
;
3695 case 0x2f: /* das */
3696 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3697 gen_op_set_cc_op(s
->cc_op
);
3699 s
->cc_op
= CC_OP_EFLAGS
;
3701 case 0x37: /* aaa */
3702 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3703 gen_op_set_cc_op(s
->cc_op
);
3705 s
->cc_op
= CC_OP_EFLAGS
;
3707 case 0x3f: /* aas */
3708 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3709 gen_op_set_cc_op(s
->cc_op
);
3711 s
->cc_op
= CC_OP_EFLAGS
;
3713 case 0xd4: /* aam */
3714 val
= ldub_code(s
->pc
++);
3716 s
->cc_op
= CC_OP_LOGICB
;
3718 case 0xd5: /* aad */
3719 val
= ldub_code(s
->pc
++);
3721 s
->cc_op
= CC_OP_LOGICB
;
3723 /************************/
3725 case 0x90: /* nop */
3727 case 0x9b: /* fwait */
3729 case 0xcc: /* int3 */
3730 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3732 case 0xcd: /* int N */
3733 val
= ldub_code(s
->pc
++);
3734 if (s
->vm86
&& s
->iopl
!= 3) {
3735 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3737 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3740 case 0xce: /* into */
3741 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3742 gen_op_set_cc_op(s
->cc_op
);
3743 gen_op_into(s
->pc
- s
->cs_base
);
3745 case 0xf1: /* icebp (undocumented, exits to external debugger) */
3746 gen_debug(s
, pc_start
- s
->cs_base
);
3748 case 0xfa: /* cli */
3750 if (s
->cpl
<= s
->iopl
) {
3753 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3759 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3763 case 0xfb: /* sti */
3765 if (s
->cpl
<= s
->iopl
) {
3768 /* interruptions are enabled only the first insn after sti */
3769 /* If several instructions disable interrupts, only the
3771 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
3772 gen_op_set_inhibit_irq();
3773 /* give a chance to handle pending irqs */
3774 gen_op_jmp_im(s
->pc
- s
->cs_base
);
3777 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3783 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3787 case 0x62: /* bound */
3788 ot
= dflag
? OT_LONG
: OT_WORD
;
3789 modrm
= ldub_code(s
->pc
++);
3790 reg
= (modrm
>> 3) & 7;
3791 mod
= (modrm
>> 6) & 3;
3794 gen_op_mov_reg_T0
[ot
][reg
]();
3795 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3797 gen_op_boundw(pc_start
- s
->cs_base
);
3799 gen_op_boundl(pc_start
- s
->cs_base
);
3801 case 0x1c8 ... 0x1cf: /* bswap reg */
3803 gen_op_mov_TN_reg
[OT_LONG
][0][reg
]();
3805 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
3807 case 0xd6: /* salc */
3808 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3809 gen_op_set_cc_op(s
->cc_op
);
3812 case 0xe0: /* loopnz */
3813 case 0xe1: /* loopz */
3814 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3815 gen_op_set_cc_op(s
->cc_op
);
3817 case 0xe2: /* loop */
3818 case 0xe3: /* jecxz */
3819 val
= (int8_t)insn_get(s
, OT_BYTE
);
3820 next_eip
= s
->pc
- s
->cs_base
;
3824 gen_op_loop
[s
->aflag
][b
& 3](val
, next_eip
);
3827 case 0x130: /* wrmsr */
3828 case 0x132: /* rdmsr */
3830 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3838 case 0x131: /* rdtsc */
3841 case 0x1a2: /* cpuid */
3844 case 0xf4: /* hlt */
3846 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3848 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3849 gen_op_set_cc_op(s
->cc_op
);
3850 gen_op_jmp_im(s
->pc
- s
->cs_base
);
3856 modrm
= ldub_code(s
->pc
++);
3857 mod
= (modrm
>> 6) & 3;
3858 op
= (modrm
>> 3) & 7;
3861 if (!s
->pe
|| s
->vm86
)
3863 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
3867 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3870 if (!s
->pe
|| s
->vm86
)
3873 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3875 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3876 gen_op_jmp_im(pc_start
- s
->cs_base
);
3881 if (!s
->pe
|| s
->vm86
)
3883 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
3887 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3890 if (!s
->pe
|| s
->vm86
)
3893 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3895 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3896 gen_op_jmp_im(pc_start
- s
->cs_base
);
3902 if (!s
->pe
|| s
->vm86
)
3904 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3905 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3906 gen_op_set_cc_op(s
->cc_op
);
3911 s
->cc_op
= CC_OP_EFLAGS
;
3918 modrm
= ldub_code(s
->pc
++);
3919 mod
= (modrm
>> 6) & 3;
3920 op
= (modrm
>> 3) & 7;
3926 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3928 gen_op_movl_T0_env(offsetof(CPUX86State
,gdt
.limit
));
3930 gen_op_movl_T0_env(offsetof(CPUX86State
,idt
.limit
));
3931 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
3932 gen_op_addl_A0_im(2);
3934 gen_op_movl_T0_env(offsetof(CPUX86State
,gdt
.base
));
3936 gen_op_movl_T0_env(offsetof(CPUX86State
,idt
.base
));
3938 gen_op_andl_T0_im(0xffffff);
3939 gen_op_st_T0_A0
[OT_LONG
+ s
->mem_index
]();
3946 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3948 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3949 gen_op_ld_T1_A0
[OT_WORD
+ s
->mem_index
]();
3950 gen_op_addl_A0_im(2);
3951 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
3953 gen_op_andl_T0_im(0xffffff);
3955 gen_op_movl_env_T0(offsetof(CPUX86State
,gdt
.base
));
3956 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
3958 gen_op_movl_env_T0(offsetof(CPUX86State
,idt
.base
));
3959 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
3964 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
3965 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
3969 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3971 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3973 gen_op_jmp_im(s
->pc
- s
->cs_base
);
3977 case 7: /* invlpg */
3979 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3983 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3991 case 0x63: /* arpl */
3992 if (!s
->pe
|| s
->vm86
)
3994 ot
= dflag
? OT_LONG
: OT_WORD
;
3995 modrm
= ldub_code(s
->pc
++);
3996 reg
= (modrm
>> 3) & 7;
3997 mod
= (modrm
>> 6) & 3;
4000 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4001 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
4003 gen_op_mov_TN_reg
[ot
][0][rm
]();
4005 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4006 gen_op_set_cc_op(s
->cc_op
);
4008 s
->cc_op
= CC_OP_EFLAGS
;
4010 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
4012 gen_op_mov_reg_T0
[ot
][rm
]();
4014 gen_op_arpl_update();
4016 case 0x102: /* lar */
4017 case 0x103: /* lsl */
4018 if (!s
->pe
|| s
->vm86
)
4020 ot
= dflag
? OT_LONG
: OT_WORD
;
4021 modrm
= ldub_code(s
->pc
++);
4022 reg
= (modrm
>> 3) & 7;
4023 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4024 gen_op_mov_TN_reg
[ot
][1][reg
]();
4025 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4026 gen_op_set_cc_op(s
->cc_op
);
4031 s
->cc_op
= CC_OP_EFLAGS
;
4032 gen_op_mov_reg_T1
[ot
][reg
]();
4035 modrm
= ldub_code(s
->pc
++);
4036 mod
= (modrm
>> 6) & 3;
4037 op
= (modrm
>> 3) & 7;
4039 case 0: /* prefetchnta */
4040 case 1: /* prefetchnt0 */
4041 case 2: /* prefetchnt0 */
4042 case 3: /* prefetchnt0 */
4045 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4046 /* nothing more to do */
4052 case 0x120: /* mov reg, crN */
4053 case 0x122: /* mov crN, reg */
4055 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4057 modrm
= ldub_code(s
->pc
++);
4058 if ((modrm
& 0xc0) != 0xc0)
4061 reg
= (modrm
>> 3) & 7;
4068 gen_op_mov_TN_reg
[OT_LONG
][0][rm
]();
4069 gen_op_movl_crN_T0(reg
);
4070 gen_op_jmp_im(s
->pc
- s
->cs_base
);
4073 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
4074 gen_op_mov_reg_T0
[OT_LONG
][rm
]();
4082 case 0x121: /* mov reg, drN */
4083 case 0x123: /* mov drN, reg */
4085 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4087 modrm
= ldub_code(s
->pc
++);
4088 if ((modrm
& 0xc0) != 0xc0)
4091 reg
= (modrm
>> 3) & 7;
4092 /* XXX: do it dynamically with CR4.DE bit */
4093 if (reg
== 4 || reg
== 5)
4096 gen_op_mov_TN_reg
[OT_LONG
][0][rm
]();
4097 gen_op_movl_drN_T0(reg
);
4098 gen_op_jmp_im(s
->pc
- s
->cs_base
);
4101 gen_op_movl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
4102 gen_op_mov_reg_T0
[OT_LONG
][rm
]();
4106 case 0x106: /* clts */
4108 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4116 /* lock generation */
4117 if (s
->prefix
& PREFIX_LOCK
)
4121 /* XXX: ensure that no lock was generated */
4122 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
4126 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
4127 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
4129 /* flags read by an operation */
4130 static uint16_t opc_read_flags
[NB_OPS
] = {
4131 [INDEX_op_aas
] = CC_A
,
4132 [INDEX_op_aaa
] = CC_A
,
4133 [INDEX_op_das
] = CC_A
| CC_C
,
4134 [INDEX_op_daa
] = CC_A
| CC_C
,
4136 [INDEX_op_adcb_T0_T1_cc
] = CC_C
,
4137 [INDEX_op_adcw_T0_T1_cc
] = CC_C
,
4138 [INDEX_op_adcl_T0_T1_cc
] = CC_C
,
4139 [INDEX_op_sbbb_T0_T1_cc
] = CC_C
,
4140 [INDEX_op_sbbw_T0_T1_cc
] = CC_C
,
4141 [INDEX_op_sbbl_T0_T1_cc
] = CC_C
,
4143 [INDEX_op_adcb_mem_T0_T1_cc
] = CC_C
,
4144 [INDEX_op_adcw_mem_T0_T1_cc
] = CC_C
,
4145 [INDEX_op_adcl_mem_T0_T1_cc
] = CC_C
,
4146 [INDEX_op_sbbb_mem_T0_T1_cc
] = CC_C
,
4147 [INDEX_op_sbbw_mem_T0_T1_cc
] = CC_C
,
4148 [INDEX_op_sbbl_mem_T0_T1_cc
] = CC_C
,
4150 /* subtle: due to the incl/decl implementation, C is used */
4151 [INDEX_op_update_inc_cc
] = CC_C
,
4153 [INDEX_op_into
] = CC_O
,
4155 [INDEX_op_jb_subb
] = CC_C
,
4156 [INDEX_op_jb_subw
] = CC_C
,
4157 [INDEX_op_jb_subl
] = CC_C
,
4159 [INDEX_op_jz_subb
] = CC_Z
,
4160 [INDEX_op_jz_subw
] = CC_Z
,
4161 [INDEX_op_jz_subl
] = CC_Z
,
4163 [INDEX_op_jbe_subb
] = CC_Z
| CC_C
,
4164 [INDEX_op_jbe_subw
] = CC_Z
| CC_C
,
4165 [INDEX_op_jbe_subl
] = CC_Z
| CC_C
,
4167 [INDEX_op_js_subb
] = CC_S
,
4168 [INDEX_op_js_subw
] = CC_S
,
4169 [INDEX_op_js_subl
] = CC_S
,
4171 [INDEX_op_jl_subb
] = CC_O
| CC_S
,
4172 [INDEX_op_jl_subw
] = CC_O
| CC_S
,
4173 [INDEX_op_jl_subl
] = CC_O
| CC_S
,
4175 [INDEX_op_jle_subb
] = CC_O
| CC_S
| CC_Z
,
4176 [INDEX_op_jle_subw
] = CC_O
| CC_S
| CC_Z
,
4177 [INDEX_op_jle_subl
] = CC_O
| CC_S
| CC_Z
,
4179 [INDEX_op_loopnzw
] = CC_Z
,
4180 [INDEX_op_loopnzl
] = CC_Z
,
4181 [INDEX_op_loopzw
] = CC_Z
,
4182 [INDEX_op_loopzl
] = CC_Z
,
4184 [INDEX_op_seto_T0_cc
] = CC_O
,
4185 [INDEX_op_setb_T0_cc
] = CC_C
,
4186 [INDEX_op_setz_T0_cc
] = CC_Z
,
4187 [INDEX_op_setbe_T0_cc
] = CC_Z
| CC_C
,
4188 [INDEX_op_sets_T0_cc
] = CC_S
,
4189 [INDEX_op_setp_T0_cc
] = CC_P
,
4190 [INDEX_op_setl_T0_cc
] = CC_O
| CC_S
,
4191 [INDEX_op_setle_T0_cc
] = CC_O
| CC_S
| CC_Z
,
4193 [INDEX_op_setb_T0_subb
] = CC_C
,
4194 [INDEX_op_setb_T0_subw
] = CC_C
,
4195 [INDEX_op_setb_T0_subl
] = CC_C
,
4197 [INDEX_op_setz_T0_subb
] = CC_Z
,
4198 [INDEX_op_setz_T0_subw
] = CC_Z
,
4199 [INDEX_op_setz_T0_subl
] = CC_Z
,
4201 [INDEX_op_setbe_T0_subb
] = CC_Z
| CC_C
,
4202 [INDEX_op_setbe_T0_subw
] = CC_Z
| CC_C
,
4203 [INDEX_op_setbe_T0_subl
] = CC_Z
| CC_C
,
4205 [INDEX_op_sets_T0_subb
] = CC_S
,
4206 [INDEX_op_sets_T0_subw
] = CC_S
,
4207 [INDEX_op_sets_T0_subl
] = CC_S
,
4209 [INDEX_op_setl_T0_subb
] = CC_O
| CC_S
,
4210 [INDEX_op_setl_T0_subw
] = CC_O
| CC_S
,
4211 [INDEX_op_setl_T0_subl
] = CC_O
| CC_S
,
4213 [INDEX_op_setle_T0_subb
] = CC_O
| CC_S
| CC_Z
,
4214 [INDEX_op_setle_T0_subw
] = CC_O
| CC_S
| CC_Z
,
4215 [INDEX_op_setle_T0_subl
] = CC_O
| CC_S
| CC_Z
,
4217 [INDEX_op_movl_T0_eflags
] = CC_OSZAPC
,
4218 [INDEX_op_cmc
] = CC_C
,
4219 [INDEX_op_salc
] = CC_C
,
4221 [INDEX_op_rclb_T0_T1_cc
] = CC_C
,
4222 [INDEX_op_rclw_T0_T1_cc
] = CC_C
,
4223 [INDEX_op_rcll_T0_T1_cc
] = CC_C
,
4224 [INDEX_op_rcrb_T0_T1_cc
] = CC_C
,
4225 [INDEX_op_rcrw_T0_T1_cc
] = CC_C
,
4226 [INDEX_op_rcrl_T0_T1_cc
] = CC_C
,
4228 [INDEX_op_rclb_mem_T0_T1_cc
] = CC_C
,
4229 [INDEX_op_rclw_mem_T0_T1_cc
] = CC_C
,
4230 [INDEX_op_rcll_mem_T0_T1_cc
] = CC_C
,
4231 [INDEX_op_rcrb_mem_T0_T1_cc
] = CC_C
,
4232 [INDEX_op_rcrw_mem_T0_T1_cc
] = CC_C
,
4233 [INDEX_op_rcrl_mem_T0_T1_cc
] = CC_C
,
4236 /* flags written by an operation */
4237 static uint16_t opc_write_flags
[NB_OPS
] = {
4238 [INDEX_op_update2_cc
] = CC_OSZAPC
,
4239 [INDEX_op_update1_cc
] = CC_OSZAPC
,
4240 [INDEX_op_cmpl_T0_T1_cc
] = CC_OSZAPC
,
4241 [INDEX_op_update_neg_cc
] = CC_OSZAPC
,
4242 /* subtle: due to the incl/decl implementation, C is used */
4243 [INDEX_op_update_inc_cc
] = CC_OSZAPC
,
4244 [INDEX_op_testl_T0_T1_cc
] = CC_OSZAPC
,
4246 [INDEX_op_adcb_T0_T1_cc
] = CC_OSZAPC
,
4247 [INDEX_op_adcw_T0_T1_cc
] = CC_OSZAPC
,
4248 [INDEX_op_adcl_T0_T1_cc
] = CC_OSZAPC
,
4249 [INDEX_op_sbbb_T0_T1_cc
] = CC_OSZAPC
,
4250 [INDEX_op_sbbw_T0_T1_cc
] = CC_OSZAPC
,
4251 [INDEX_op_sbbl_T0_T1_cc
] = CC_OSZAPC
,
4253 [INDEX_op_adcb_mem_T0_T1_cc
] = CC_OSZAPC
,
4254 [INDEX_op_adcw_mem_T0_T1_cc
] = CC_OSZAPC
,
4255 [INDEX_op_adcl_mem_T0_T1_cc
] = CC_OSZAPC
,
4256 [INDEX_op_sbbb_mem_T0_T1_cc
] = CC_OSZAPC
,
4257 [INDEX_op_sbbw_mem_T0_T1_cc
] = CC_OSZAPC
,
4258 [INDEX_op_sbbl_mem_T0_T1_cc
] = CC_OSZAPC
,
4260 [INDEX_op_mulb_AL_T0
] = CC_OSZAPC
,
4261 [INDEX_op_imulb_AL_T0
] = CC_OSZAPC
,
4262 [INDEX_op_mulw_AX_T0
] = CC_OSZAPC
,
4263 [INDEX_op_imulw_AX_T0
] = CC_OSZAPC
,
4264 [INDEX_op_mull_EAX_T0
] = CC_OSZAPC
,
4265 [INDEX_op_imull_EAX_T0
] = CC_OSZAPC
,
4266 [INDEX_op_imulw_T0_T1
] = CC_OSZAPC
,
4267 [INDEX_op_imull_T0_T1
] = CC_OSZAPC
,
4270 [INDEX_op_aam
] = CC_OSZAPC
,
4271 [INDEX_op_aad
] = CC_OSZAPC
,
4272 [INDEX_op_aas
] = CC_OSZAPC
,
4273 [INDEX_op_aaa
] = CC_OSZAPC
,
4274 [INDEX_op_das
] = CC_OSZAPC
,
4275 [INDEX_op_daa
] = CC_OSZAPC
,
4277 [INDEX_op_movb_eflags_T0
] = CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
,
4278 [INDEX_op_movw_eflags_T0
] = CC_OSZAPC
,
4279 [INDEX_op_movl_eflags_T0
] = CC_OSZAPC
,
4280 [INDEX_op_movw_eflags_T0_io
] = CC_OSZAPC
,
4281 [INDEX_op_movl_eflags_T0_io
] = CC_OSZAPC
,
4282 [INDEX_op_movw_eflags_T0_cpl0
] = CC_OSZAPC
,
4283 [INDEX_op_movl_eflags_T0_cpl0
] = CC_OSZAPC
,
4284 [INDEX_op_clc
] = CC_C
,
4285 [INDEX_op_stc
] = CC_C
,
4286 [INDEX_op_cmc
] = CC_C
,
4288 [INDEX_op_rolb_T0_T1_cc
] = CC_O
| CC_C
,
4289 [INDEX_op_rolw_T0_T1_cc
] = CC_O
| CC_C
,
4290 [INDEX_op_roll_T0_T1_cc
] = CC_O
| CC_C
,
4291 [INDEX_op_rorb_T0_T1_cc
] = CC_O
| CC_C
,
4292 [INDEX_op_rorw_T0_T1_cc
] = CC_O
| CC_C
,
4293 [INDEX_op_rorl_T0_T1_cc
] = CC_O
| CC_C
,
4295 [INDEX_op_rclb_T0_T1_cc
] = CC_O
| CC_C
,
4296 [INDEX_op_rclw_T0_T1_cc
] = CC_O
| CC_C
,
4297 [INDEX_op_rcll_T0_T1_cc
] = CC_O
| CC_C
,
4298 [INDEX_op_rcrb_T0_T1_cc
] = CC_O
| CC_C
,
4299 [INDEX_op_rcrw_T0_T1_cc
] = CC_O
| CC_C
,
4300 [INDEX_op_rcrl_T0_T1_cc
] = CC_O
| CC_C
,
4302 [INDEX_op_shlb_T0_T1_cc
] = CC_OSZAPC
,
4303 [INDEX_op_shlw_T0_T1_cc
] = CC_OSZAPC
,
4304 [INDEX_op_shll_T0_T1_cc
] = CC_OSZAPC
,
4306 [INDEX_op_shrb_T0_T1_cc
] = CC_OSZAPC
,
4307 [INDEX_op_shrw_T0_T1_cc
] = CC_OSZAPC
,
4308 [INDEX_op_shrl_T0_T1_cc
] = CC_OSZAPC
,
4310 [INDEX_op_sarb_T0_T1_cc
] = CC_OSZAPC
,
4311 [INDEX_op_sarw_T0_T1_cc
] = CC_OSZAPC
,
4312 [INDEX_op_sarl_T0_T1_cc
] = CC_OSZAPC
,
4314 [INDEX_op_shldw_T0_T1_ECX_cc
] = CC_OSZAPC
,
4315 [INDEX_op_shldl_T0_T1_ECX_cc
] = CC_OSZAPC
,
4316 [INDEX_op_shldw_T0_T1_im_cc
] = CC_OSZAPC
,
4317 [INDEX_op_shldl_T0_T1_im_cc
] = CC_OSZAPC
,
4319 [INDEX_op_shrdw_T0_T1_ECX_cc
] = CC_OSZAPC
,
4320 [INDEX_op_shrdl_T0_T1_ECX_cc
] = CC_OSZAPC
,
4321 [INDEX_op_shrdw_T0_T1_im_cc
] = CC_OSZAPC
,
4322 [INDEX_op_shrdl_T0_T1_im_cc
] = CC_OSZAPC
,
4324 [INDEX_op_rolb_mem_T0_T1_cc
] = CC_O
| CC_C
,
4325 [INDEX_op_rolw_mem_T0_T1_cc
] = CC_O
| CC_C
,
4326 [INDEX_op_roll_mem_T0_T1_cc
] = CC_O
| CC_C
,
4327 [INDEX_op_rorb_mem_T0_T1_cc
] = CC_O
| CC_C
,
4328 [INDEX_op_rorw_mem_T0_T1_cc
] = CC_O
| CC_C
,
4329 [INDEX_op_rorl_mem_T0_T1_cc
] = CC_O
| CC_C
,
4331 [INDEX_op_rclb_mem_T0_T1_cc
] = CC_O
| CC_C
,
4332 [INDEX_op_rclw_mem_T0_T1_cc
] = CC_O
| CC_C
,
4333 [INDEX_op_rcll_mem_T0_T1_cc
] = CC_O
| CC_C
,
4334 [INDEX_op_rcrb_mem_T0_T1_cc
] = CC_O
| CC_C
,
4335 [INDEX_op_rcrw_mem_T0_T1_cc
] = CC_O
| CC_C
,
4336 [INDEX_op_rcrl_mem_T0_T1_cc
] = CC_O
| CC_C
,
4338 [INDEX_op_shlb_mem_T0_T1_cc
] = CC_OSZAPC
,
4339 [INDEX_op_shlw_mem_T0_T1_cc
] = CC_OSZAPC
,
4340 [INDEX_op_shll_mem_T0_T1_cc
] = CC_OSZAPC
,
4342 [INDEX_op_shrb_mem_T0_T1_cc
] = CC_OSZAPC
,
4343 [INDEX_op_shrw_mem_T0_T1_cc
] = CC_OSZAPC
,
4344 [INDEX_op_shrl_mem_T0_T1_cc
] = CC_OSZAPC
,
4346 [INDEX_op_sarb_mem_T0_T1_cc
] = CC_OSZAPC
,
4347 [INDEX_op_sarw_mem_T0_T1_cc
] = CC_OSZAPC
,
4348 [INDEX_op_sarl_mem_T0_T1_cc
] = CC_OSZAPC
,
4350 [INDEX_op_shldw_mem_T0_T1_ECX_cc
] = CC_OSZAPC
,
4351 [INDEX_op_shldl_mem_T0_T1_ECX_cc
] = CC_OSZAPC
,
4352 [INDEX_op_shldw_mem_T0_T1_im_cc
] = CC_OSZAPC
,
4353 [INDEX_op_shldl_mem_T0_T1_im_cc
] = CC_OSZAPC
,
4355 [INDEX_op_shrdw_mem_T0_T1_ECX_cc
] = CC_OSZAPC
,
4356 [INDEX_op_shrdl_mem_T0_T1_ECX_cc
] = CC_OSZAPC
,
4357 [INDEX_op_shrdw_mem_T0_T1_im_cc
] = CC_OSZAPC
,
4358 [INDEX_op_shrdl_mem_T0_T1_im_cc
] = CC_OSZAPC
,
4360 [INDEX_op_btw_T0_T1_cc
] = CC_OSZAPC
,
4361 [INDEX_op_btl_T0_T1_cc
] = CC_OSZAPC
,
4362 [INDEX_op_btsw_T0_T1_cc
] = CC_OSZAPC
,
4363 [INDEX_op_btsl_T0_T1_cc
] = CC_OSZAPC
,
4364 [INDEX_op_btrw_T0_T1_cc
] = CC_OSZAPC
,
4365 [INDEX_op_btrl_T0_T1_cc
] = CC_OSZAPC
,
4366 [INDEX_op_btcw_T0_T1_cc
] = CC_OSZAPC
,
4367 [INDEX_op_btcl_T0_T1_cc
] = CC_OSZAPC
,
4369 [INDEX_op_bsfw_T0_cc
] = CC_OSZAPC
,
4370 [INDEX_op_bsfl_T0_cc
] = CC_OSZAPC
,
4371 [INDEX_op_bsrw_T0_cc
] = CC_OSZAPC
,
4372 [INDEX_op_bsrl_T0_cc
] = CC_OSZAPC
,
4374 [INDEX_op_cmpxchgb_T0_T1_EAX_cc
] = CC_OSZAPC
,
4375 [INDEX_op_cmpxchgw_T0_T1_EAX_cc
] = CC_OSZAPC
,
4376 [INDEX_op_cmpxchgl_T0_T1_EAX_cc
] = CC_OSZAPC
,
4378 [INDEX_op_cmpxchgb_mem_T0_T1_EAX_cc
] = CC_OSZAPC
,
4379 [INDEX_op_cmpxchgw_mem_T0_T1_EAX_cc
] = CC_OSZAPC
,
4380 [INDEX_op_cmpxchgl_mem_T0_T1_EAX_cc
] = CC_OSZAPC
,
4382 [INDEX_op_cmpxchg8b
] = CC_Z
,
4383 [INDEX_op_lar
] = CC_Z
,
4384 [INDEX_op_lsl
] = CC_Z
,
4385 [INDEX_op_fcomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
4386 [INDEX_op_fucomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
4389 /* simpler form of an operation if no flags need to be generated */
4390 static uint16_t opc_simpler
[NB_OPS
] = {
4391 [INDEX_op_update2_cc
] = INDEX_op_nop
,
4392 [INDEX_op_update1_cc
] = INDEX_op_nop
,
4393 [INDEX_op_update_neg_cc
] = INDEX_op_nop
,
4395 /* broken: CC_OP logic must be rewritten */
4396 [INDEX_op_update_inc_cc
] = INDEX_op_nop
,
4398 [INDEX_op_rolb_T0_T1_cc
] = INDEX_op_rolb_T0_T1
,
4399 [INDEX_op_rolw_T0_T1_cc
] = INDEX_op_rolw_T0_T1
,
4400 [INDEX_op_roll_T0_T1_cc
] = INDEX_op_roll_T0_T1
,
4402 [INDEX_op_rorb_T0_T1_cc
] = INDEX_op_rorb_T0_T1
,
4403 [INDEX_op_rorw_T0_T1_cc
] = INDEX_op_rorw_T0_T1
,
4404 [INDEX_op_rorl_T0_T1_cc
] = INDEX_op_rorl_T0_T1
,
4406 [INDEX_op_rolb_mem_T0_T1_cc
] = INDEX_op_rolb_mem_T0_T1
,
4407 [INDEX_op_rolw_mem_T0_T1_cc
] = INDEX_op_rolw_mem_T0_T1
,
4408 [INDEX_op_roll_mem_T0_T1_cc
] = INDEX_op_roll_mem_T0_T1
,
4410 [INDEX_op_rorb_mem_T0_T1_cc
] = INDEX_op_rorb_mem_T0_T1
,
4411 [INDEX_op_rorw_mem_T0_T1_cc
] = INDEX_op_rorw_mem_T0_T1
,
4412 [INDEX_op_rorl_mem_T0_T1_cc
] = INDEX_op_rorl_mem_T0_T1
,
4414 [INDEX_op_shlb_T0_T1_cc
] = INDEX_op_shlb_T0_T1
,
4415 [INDEX_op_shlw_T0_T1_cc
] = INDEX_op_shlw_T0_T1
,
4416 [INDEX_op_shll_T0_T1_cc
] = INDEX_op_shll_T0_T1
,
4418 [INDEX_op_shrb_T0_T1_cc
] = INDEX_op_shrb_T0_T1
,
4419 [INDEX_op_shrw_T0_T1_cc
] = INDEX_op_shrw_T0_T1
,
4420 [INDEX_op_shrl_T0_T1_cc
] = INDEX_op_shrl_T0_T1
,
4422 [INDEX_op_sarb_T0_T1_cc
] = INDEX_op_sarb_T0_T1
,
4423 [INDEX_op_sarw_T0_T1_cc
] = INDEX_op_sarw_T0_T1
,
4424 [INDEX_op_sarl_T0_T1_cc
] = INDEX_op_sarl_T0_T1
,
4427 void optimize_flags_init(void)
4430 /* put default values in arrays */
4431 for(i
= 0; i
< NB_OPS
; i
++) {
4432 if (opc_simpler
[i
] == 0)
4437 /* CPU flags computation optimization: we move backward thru the
4438 generated code to see which flags are needed. The operation is
4439 modified if suitable */
4440 static void optimize_flags(uint16_t *opc_buf
, int opc_buf_len
)
4443 int live_flags
, write_flags
, op
;
4445 opc_ptr
= opc_buf
+ opc_buf_len
;
4446 /* live_flags contains the flags needed by the next instructions
4447 in the code. At the end of the bloc, we consider that all the
4449 live_flags
= CC_OSZAPC
;
4450 while (opc_ptr
> opc_buf
) {
4452 /* if none of the flags written by the instruction is used,
4453 then we can try to find a simpler instruction */
4454 write_flags
= opc_write_flags
[op
];
4455 if ((live_flags
& write_flags
) == 0) {
4456 *opc_ptr
= opc_simpler
[op
];
4458 /* compute the live flags before the instruction */
4459 live_flags
&= ~write_flags
;
4460 live_flags
|= opc_read_flags
[op
];
4464 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
4465 basic block 'tb'. If search_pc is TRUE, also generate PC
4466 information for each intermediate instruction. */
4467 static inline int gen_intermediate_code_internal(CPUState
*env
,
4468 TranslationBlock
*tb
,
4471 DisasContext dc1
, *dc
= &dc1
;
4473 uint16_t *gen_opc_end
;
4478 /* generate intermediate code */
4479 pc_start
= (uint8_t *)tb
->pc
;
4480 cs_base
= (uint8_t *)tb
->cs_base
;
4483 dc
->pe
= env
->cr
[0] & CR0_PE_MASK
;
4484 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
4485 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
4486 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
4488 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
4489 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
4490 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
4491 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
4492 dc
->singlestep_enabled
= env
->singlestep_enabled
;
4493 dc
->cc_op
= CC_OP_DYNAMIC
;
4494 dc
->cs_base
= cs_base
;
4496 dc
->popl_esp_hack
= 0;
4497 /* select memory access functions */
4499 if (flags
& HF_SOFTMMU_MASK
) {
4505 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
4506 (flags
& HF_INHIBIT_IRQ_MASK
)
4507 #ifndef CONFIG_SOFTMMU
4508 || (flags
& HF_SOFTMMU_MASK
)
4511 gen_opc_ptr
= gen_opc_buf
;
4512 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4513 gen_opparam_ptr
= gen_opparam_buf
;
4515 dc
->is_jmp
= DISAS_NEXT
;
4520 if (env
->nb_breakpoints
> 0) {
4521 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
4522 if (env
->breakpoints
[j
] == (unsigned long)pc_ptr
) {
4523 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
4529 j
= gen_opc_ptr
- gen_opc_buf
;
4533 gen_opc_instr_start
[lj
++] = 0;
4535 gen_opc_pc
[lj
] = (uint32_t)pc_ptr
;
4536 gen_opc_cc_op
[lj
] = dc
->cc_op
;
4537 gen_opc_instr_start
[lj
] = 1;
4539 pc_ptr
= disas_insn(dc
, pc_ptr
);
4540 /* stop translation if indicated */
4543 /* if single step mode, we generate only one instruction and
4544 generate an exception */
4545 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
4546 the flag and abort the translation to give the irqs a
4547 change to be happen */
4548 if (dc
->tf
|| dc
->singlestep_enabled
||
4549 (flags
& HF_INHIBIT_IRQ_MASK
)) {
4550 gen_op_jmp_im(pc_ptr
- dc
->cs_base
);
4554 /* if too long translation, stop generation too */
4555 if (gen_opc_ptr
>= gen_opc_end
||
4556 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
4557 gen_op_jmp_im(pc_ptr
- dc
->cs_base
);
4562 *gen_opc_ptr
= INDEX_op_end
;
4563 /* we don't forget to fill the last values */
4565 j
= gen_opc_ptr
- gen_opc_buf
;
4568 gen_opc_instr_start
[lj
++] = 0;
4573 fprintf(logfile
, "----------------\n");
4574 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
4575 disas(logfile
, pc_start
, pc_ptr
- pc_start
, 0, !dc
->code32
);
4576 fprintf(logfile
, "\n");
4578 fprintf(logfile
, "OP:\n");
4579 dump_ops(gen_opc_buf
, gen_opparam_buf
);
4580 fprintf(logfile
, "\n");
4584 /* optimize flag computations */
4585 optimize_flags(gen_opc_buf
, gen_opc_ptr
- gen_opc_buf
);
4589 fprintf(logfile
, "AFTER FLAGS OPT:\n");
4590 dump_ops(gen_opc_buf
, gen_opparam_buf
);
4591 fprintf(logfile
, "\n");
4595 tb
->size
= pc_ptr
- pc_start
;
4599 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
4601 return gen_intermediate_code_internal(env
, tb
, 0);
4604 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
4606 return gen_intermediate_code_internal(env
, tb
, 1);