4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
33 /* XXX: move that elsewhere */
34 static uint16_t *gen_opc_ptr
;
35 static uint32_t *gen_opparam_ptr
;
37 #define PREFIX_REPZ 0x01
38 #define PREFIX_REPNZ 0x02
39 #define PREFIX_LOCK 0x04
40 #define PREFIX_DATA 0x08
41 #define PREFIX_ADR 0x10
43 typedef struct DisasContext
{
44 /* current insn context */
45 int override
; /* -1 if no override */
48 uint8_t *pc
; /* pc = eip + cs_base */
49 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
50 static state change (stop translation) */
51 /* current block context */
52 uint8_t *cs_base
; /* base of CS segment */
53 int pe
; /* protected mode */
54 int code32
; /* 32 bit code segment */
55 int ss32
; /* 32 bit stack segment */
56 int cc_op
; /* current CC operation */
57 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
58 int f_st
; /* currently unused */
59 int vm86
; /* vm86 mode */
62 int tf
; /* TF cpu flag */
63 int singlestep_enabled
; /* "hardware" single step enabled */
64 int jmp_opt
; /* use direct block chaining for direct jumps */
65 int mem_index
; /* select memory access functions */
66 struct TranslationBlock
*tb
;
67 int popl_esp_hack
; /* for correct popl with esp base handling */
70 static void gen_eob(DisasContext
*s
);
71 static void gen_jmp(DisasContext
*s
, unsigned int eip
);
73 /* i386 arith/logic operations */
93 OP_SHL1
, /* undocumented */
98 #define DEF(s, n, copy_size) INDEX_op_ ## s,
115 /* I386 int registers */
116 OR_EAX
, /* MUST be even numbered */
124 OR_TMP0
, /* temporary operand register */
126 OR_A0
, /* temporary register used when doing address evaluation */
127 OR_ZERO
, /* fixed zero register */
131 typedef void (GenOpFunc
)(void);
132 typedef void (GenOpFunc1
)(long);
133 typedef void (GenOpFunc2
)(long, long);
134 typedef void (GenOpFunc3
)(long, long, long);
136 static GenOpFunc
*gen_op_mov_reg_T0
[3][8] = {
169 static GenOpFunc
*gen_op_mov_reg_T1
[3][8] = {
202 static GenOpFunc
*gen_op_mov_reg_A0
[2][8] = {
225 static GenOpFunc
*gen_op_mov_TN_reg
[3][2][8] =
295 static GenOpFunc
*gen_op_movl_A0_reg
[8] = {
306 static GenOpFunc
*gen_op_addl_A0_reg_sN
[4][8] = {
318 gen_op_addl_A0_EAX_s1
,
319 gen_op_addl_A0_ECX_s1
,
320 gen_op_addl_A0_EDX_s1
,
321 gen_op_addl_A0_EBX_s1
,
322 gen_op_addl_A0_ESP_s1
,
323 gen_op_addl_A0_EBP_s1
,
324 gen_op_addl_A0_ESI_s1
,
325 gen_op_addl_A0_EDI_s1
,
328 gen_op_addl_A0_EAX_s2
,
329 gen_op_addl_A0_ECX_s2
,
330 gen_op_addl_A0_EDX_s2
,
331 gen_op_addl_A0_EBX_s2
,
332 gen_op_addl_A0_ESP_s2
,
333 gen_op_addl_A0_EBP_s2
,
334 gen_op_addl_A0_ESI_s2
,
335 gen_op_addl_A0_EDI_s2
,
338 gen_op_addl_A0_EAX_s3
,
339 gen_op_addl_A0_ECX_s3
,
340 gen_op_addl_A0_EDX_s3
,
341 gen_op_addl_A0_EBX_s3
,
342 gen_op_addl_A0_ESP_s3
,
343 gen_op_addl_A0_EBP_s3
,
344 gen_op_addl_A0_ESI_s3
,
345 gen_op_addl_A0_EDI_s3
,
349 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[2][8] = {
351 gen_op_cmovw_EAX_T1_T0
,
352 gen_op_cmovw_ECX_T1_T0
,
353 gen_op_cmovw_EDX_T1_T0
,
354 gen_op_cmovw_EBX_T1_T0
,
355 gen_op_cmovw_ESP_T1_T0
,
356 gen_op_cmovw_EBP_T1_T0
,
357 gen_op_cmovw_ESI_T1_T0
,
358 gen_op_cmovw_EDI_T1_T0
,
361 gen_op_cmovl_EAX_T1_T0
,
362 gen_op_cmovl_ECX_T1_T0
,
363 gen_op_cmovl_EDX_T1_T0
,
364 gen_op_cmovl_EBX_T1_T0
,
365 gen_op_cmovl_ESP_T1_T0
,
366 gen_op_cmovl_EBP_T1_T0
,
367 gen_op_cmovl_ESI_T1_T0
,
368 gen_op_cmovl_EDI_T1_T0
,
372 static GenOpFunc
*gen_op_arith_T0_T1_cc
[8] = {
383 static GenOpFunc
*gen_op_arithc_T0_T1_cc
[3][2] = {
385 gen_op_adcb_T0_T1_cc
,
386 gen_op_sbbb_T0_T1_cc
,
389 gen_op_adcw_T0_T1_cc
,
390 gen_op_sbbw_T0_T1_cc
,
393 gen_op_adcl_T0_T1_cc
,
394 gen_op_sbbl_T0_T1_cc
,
398 static GenOpFunc
*gen_op_arithc_mem_T0_T1_cc
[3][2] = {
400 gen_op_adcb_mem_T0_T1_cc
,
401 gen_op_sbbb_mem_T0_T1_cc
,
404 gen_op_adcw_mem_T0_T1_cc
,
405 gen_op_sbbw_mem_T0_T1_cc
,
408 gen_op_adcl_mem_T0_T1_cc
,
409 gen_op_sbbl_mem_T0_T1_cc
,
413 static const int cc_op_arithb
[8] = {
424 static GenOpFunc
*gen_op_cmpxchg_T0_T1_EAX_cc
[3] = {
425 gen_op_cmpxchgb_T0_T1_EAX_cc
,
426 gen_op_cmpxchgw_T0_T1_EAX_cc
,
427 gen_op_cmpxchgl_T0_T1_EAX_cc
,
430 static GenOpFunc
*gen_op_cmpxchg_mem_T0_T1_EAX_cc
[3] = {
431 gen_op_cmpxchgb_mem_T0_T1_EAX_cc
,
432 gen_op_cmpxchgw_mem_T0_T1_EAX_cc
,
433 gen_op_cmpxchgl_mem_T0_T1_EAX_cc
,
436 static GenOpFunc
*gen_op_shift_T0_T1_cc
[3][8] = {
438 gen_op_rolb_T0_T1_cc
,
439 gen_op_rorb_T0_T1_cc
,
440 gen_op_rclb_T0_T1_cc
,
441 gen_op_rcrb_T0_T1_cc
,
442 gen_op_shlb_T0_T1_cc
,
443 gen_op_shrb_T0_T1_cc
,
444 gen_op_shlb_T0_T1_cc
,
445 gen_op_sarb_T0_T1_cc
,
448 gen_op_rolw_T0_T1_cc
,
449 gen_op_rorw_T0_T1_cc
,
450 gen_op_rclw_T0_T1_cc
,
451 gen_op_rcrw_T0_T1_cc
,
452 gen_op_shlw_T0_T1_cc
,
453 gen_op_shrw_T0_T1_cc
,
454 gen_op_shlw_T0_T1_cc
,
455 gen_op_sarw_T0_T1_cc
,
458 gen_op_roll_T0_T1_cc
,
459 gen_op_rorl_T0_T1_cc
,
460 gen_op_rcll_T0_T1_cc
,
461 gen_op_rcrl_T0_T1_cc
,
462 gen_op_shll_T0_T1_cc
,
463 gen_op_shrl_T0_T1_cc
,
464 gen_op_shll_T0_T1_cc
,
465 gen_op_sarl_T0_T1_cc
,
469 static GenOpFunc
*gen_op_shift_mem_T0_T1_cc
[3][8] = {
471 gen_op_rolb_mem_T0_T1_cc
,
472 gen_op_rorb_mem_T0_T1_cc
,
473 gen_op_rclb_mem_T0_T1_cc
,
474 gen_op_rcrb_mem_T0_T1_cc
,
475 gen_op_shlb_mem_T0_T1_cc
,
476 gen_op_shrb_mem_T0_T1_cc
,
477 gen_op_shlb_mem_T0_T1_cc
,
478 gen_op_sarb_mem_T0_T1_cc
,
481 gen_op_rolw_mem_T0_T1_cc
,
482 gen_op_rorw_mem_T0_T1_cc
,
483 gen_op_rclw_mem_T0_T1_cc
,
484 gen_op_rcrw_mem_T0_T1_cc
,
485 gen_op_shlw_mem_T0_T1_cc
,
486 gen_op_shrw_mem_T0_T1_cc
,
487 gen_op_shlw_mem_T0_T1_cc
,
488 gen_op_sarw_mem_T0_T1_cc
,
491 gen_op_roll_mem_T0_T1_cc
,
492 gen_op_rorl_mem_T0_T1_cc
,
493 gen_op_rcll_mem_T0_T1_cc
,
494 gen_op_rcrl_mem_T0_T1_cc
,
495 gen_op_shll_mem_T0_T1_cc
,
496 gen_op_shrl_mem_T0_T1_cc
,
497 gen_op_shll_mem_T0_T1_cc
,
498 gen_op_sarl_mem_T0_T1_cc
,
502 static GenOpFunc1
*gen_op_shiftd_T0_T1_im_cc
[2][2] = {
504 gen_op_shldw_T0_T1_im_cc
,
505 gen_op_shrdw_T0_T1_im_cc
,
508 gen_op_shldl_T0_T1_im_cc
,
509 gen_op_shrdl_T0_T1_im_cc
,
513 static GenOpFunc
*gen_op_shiftd_T0_T1_ECX_cc
[2][2] = {
515 gen_op_shldw_T0_T1_ECX_cc
,
516 gen_op_shrdw_T0_T1_ECX_cc
,
519 gen_op_shldl_T0_T1_ECX_cc
,
520 gen_op_shrdl_T0_T1_ECX_cc
,
524 static GenOpFunc1
*gen_op_shiftd_mem_T0_T1_im_cc
[2][2] = {
526 gen_op_shldw_mem_T0_T1_im_cc
,
527 gen_op_shrdw_mem_T0_T1_im_cc
,
530 gen_op_shldl_mem_T0_T1_im_cc
,
531 gen_op_shrdl_mem_T0_T1_im_cc
,
535 static GenOpFunc
*gen_op_shiftd_mem_T0_T1_ECX_cc
[2][2] = {
537 gen_op_shldw_mem_T0_T1_ECX_cc
,
538 gen_op_shrdw_mem_T0_T1_ECX_cc
,
541 gen_op_shldl_mem_T0_T1_ECX_cc
,
542 gen_op_shrdl_mem_T0_T1_ECX_cc
,
546 static GenOpFunc
*gen_op_btx_T0_T1_cc
[2][4] = {
549 gen_op_btsw_T0_T1_cc
,
550 gen_op_btrw_T0_T1_cc
,
551 gen_op_btcw_T0_T1_cc
,
555 gen_op_btsl_T0_T1_cc
,
556 gen_op_btrl_T0_T1_cc
,
557 gen_op_btcl_T0_T1_cc
,
561 static GenOpFunc
*gen_op_bsx_T0_cc
[2][2] = {
572 static GenOpFunc
*gen_op_lds_T0_A0
[3 * 3] = {
573 gen_op_ldsb_raw_T0_A0
,
574 gen_op_ldsw_raw_T0_A0
,
576 #ifndef CONFIG_USER_ONLY
577 gen_op_ldsb_kernel_T0_A0
,
578 gen_op_ldsw_kernel_T0_A0
,
581 gen_op_ldsb_user_T0_A0
,
582 gen_op_ldsw_user_T0_A0
,
587 static GenOpFunc
*gen_op_ldu_T0_A0
[3 * 3] = {
588 gen_op_ldub_raw_T0_A0
,
589 gen_op_lduw_raw_T0_A0
,
592 #ifndef CONFIG_USER_ONLY
593 gen_op_ldub_kernel_T0_A0
,
594 gen_op_lduw_kernel_T0_A0
,
597 gen_op_ldub_user_T0_A0
,
598 gen_op_lduw_user_T0_A0
,
603 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
604 static GenOpFunc
*gen_op_ld_T0_A0
[3 * 3] = {
605 gen_op_ldub_raw_T0_A0
,
606 gen_op_lduw_raw_T0_A0
,
607 gen_op_ldl_raw_T0_A0
,
609 #ifndef CONFIG_USER_ONLY
610 gen_op_ldub_kernel_T0_A0
,
611 gen_op_lduw_kernel_T0_A0
,
612 gen_op_ldl_kernel_T0_A0
,
614 gen_op_ldub_user_T0_A0
,
615 gen_op_lduw_user_T0_A0
,
616 gen_op_ldl_user_T0_A0
,
620 static GenOpFunc
*gen_op_ld_T1_A0
[3 * 3] = {
621 gen_op_ldub_raw_T1_A0
,
622 gen_op_lduw_raw_T1_A0
,
623 gen_op_ldl_raw_T1_A0
,
625 #ifndef CONFIG_USER_ONLY
626 gen_op_ldub_kernel_T1_A0
,
627 gen_op_lduw_kernel_T1_A0
,
628 gen_op_ldl_kernel_T1_A0
,
630 gen_op_ldub_user_T1_A0
,
631 gen_op_lduw_user_T1_A0
,
632 gen_op_ldl_user_T1_A0
,
636 static GenOpFunc
*gen_op_st_T0_A0
[3 * 3] = {
637 gen_op_stb_raw_T0_A0
,
638 gen_op_stw_raw_T0_A0
,
639 gen_op_stl_raw_T0_A0
,
641 #ifndef CONFIG_USER_ONLY
642 gen_op_stb_kernel_T0_A0
,
643 gen_op_stw_kernel_T0_A0
,
644 gen_op_stl_kernel_T0_A0
,
646 gen_op_stb_user_T0_A0
,
647 gen_op_stw_user_T0_A0
,
648 gen_op_stl_user_T0_A0
,
652 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
656 override
= s
->override
;
659 if (s
->addseg
&& override
< 0)
662 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
663 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
665 gen_op_movl_A0_reg
[R_ESI
]();
668 /* 16 address, always override */
671 gen_op_movl_A0_reg
[R_ESI
]();
672 gen_op_andl_A0_ffff();
673 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
677 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
681 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
682 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
684 gen_op_movl_A0_reg
[R_EDI
]();
687 gen_op_movl_A0_reg
[R_EDI
]();
688 gen_op_andl_A0_ffff();
689 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
693 static GenOpFunc
*gen_op_movl_T0_Dshift
[3] = {
694 gen_op_movl_T0_Dshiftb
,
695 gen_op_movl_T0_Dshiftw
,
696 gen_op_movl_T0_Dshiftl
,
699 static GenOpFunc2
*gen_op_jz_ecx
[2] = {
704 static GenOpFunc1
*gen_op_jz_ecx_im
[2] = {
709 static GenOpFunc
*gen_op_dec_ECX
[2] = {
714 static GenOpFunc1
*gen_op_string_jnz_sub
[2][3] = {
716 gen_op_string_jnz_subb
,
717 gen_op_string_jnz_subw
,
718 gen_op_string_jnz_subl
,
721 gen_op_string_jz_subb
,
722 gen_op_string_jz_subw
,
723 gen_op_string_jz_subl
,
727 static GenOpFunc1
*gen_op_string_jnz_sub_im
[2][3] = {
729 gen_op_string_jnz_subb_im
,
730 gen_op_string_jnz_subw_im
,
731 gen_op_string_jnz_subl_im
,
734 gen_op_string_jz_subb_im
,
735 gen_op_string_jz_subw_im
,
736 gen_op_string_jz_subl_im
,
740 static GenOpFunc
*gen_op_in_DX_T0
[3] = {
746 static GenOpFunc
*gen_op_out_DX_T0
[3] = {
752 static inline void gen_movs(DisasContext
*s
, int ot
)
754 gen_string_movl_A0_ESI(s
);
755 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
756 gen_string_movl_A0_EDI(s
);
757 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
758 gen_op_movl_T0_Dshift
[ot
]();
760 gen_op_addl_ESI_T0();
761 gen_op_addl_EDI_T0();
763 gen_op_addw_ESI_T0();
764 gen_op_addw_EDI_T0();
768 static inline void gen_update_cc_op(DisasContext
*s
)
770 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
771 gen_op_set_cc_op(s
->cc_op
);
772 s
->cc_op
= CC_OP_DYNAMIC
;
776 static inline void gen_jz_ecx_string(DisasContext
*s
, unsigned int next_eip
)
779 gen_op_jz_ecx
[s
->aflag
]((long)s
->tb
, next_eip
);
781 /* XXX: does not work with gdbstub "ice" single step - not a
783 gen_op_jz_ecx_im
[s
->aflag
](next_eip
);
787 static inline void gen_stos(DisasContext
*s
, int ot
)
789 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
790 gen_string_movl_A0_EDI(s
);
791 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
792 gen_op_movl_T0_Dshift
[ot
]();
794 gen_op_addl_EDI_T0();
796 gen_op_addw_EDI_T0();
800 static inline void gen_lods(DisasContext
*s
, int ot
)
802 gen_string_movl_A0_ESI(s
);
803 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
804 gen_op_mov_reg_T0
[ot
][R_EAX
]();
805 gen_op_movl_T0_Dshift
[ot
]();
807 gen_op_addl_ESI_T0();
809 gen_op_addw_ESI_T0();
813 static inline void gen_scas(DisasContext
*s
, int ot
)
815 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
816 gen_string_movl_A0_EDI(s
);
817 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
818 gen_op_cmpl_T0_T1_cc();
819 gen_op_movl_T0_Dshift
[ot
]();
821 gen_op_addl_EDI_T0();
823 gen_op_addw_EDI_T0();
827 static inline void gen_cmps(DisasContext
*s
, int ot
)
829 gen_string_movl_A0_ESI(s
);
830 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
831 gen_string_movl_A0_EDI(s
);
832 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
833 gen_op_cmpl_T0_T1_cc();
834 gen_op_movl_T0_Dshift
[ot
]();
836 gen_op_addl_ESI_T0();
837 gen_op_addl_EDI_T0();
839 gen_op_addw_ESI_T0();
840 gen_op_addw_EDI_T0();
844 static inline void gen_ins(DisasContext
*s
, int ot
)
846 gen_op_in_DX_T0
[ot
]();
847 gen_string_movl_A0_EDI(s
);
848 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
849 gen_op_movl_T0_Dshift
[ot
]();
851 gen_op_addl_EDI_T0();
853 gen_op_addw_EDI_T0();
857 static inline void gen_outs(DisasContext
*s
, int ot
)
859 gen_string_movl_A0_ESI(s
);
860 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
861 gen_op_out_DX_T0
[ot
]();
862 gen_op_movl_T0_Dshift
[ot
]();
864 gen_op_addl_ESI_T0();
866 gen_op_addw_ESI_T0();
870 /* same method as Valgrind : we generate jumps to current or next
872 #define GEN_REPZ(op) \
873 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
874 unsigned int cur_eip, unsigned int next_eip) \
876 gen_update_cc_op(s); \
877 gen_jz_ecx_string(s, next_eip); \
879 gen_op_dec_ECX[s->aflag](); \
880 /* a loop would cause two single step exceptions if ECX = 1 \
881 before rep string_insn */ \
883 gen_op_jz_ecx_im[s->aflag](next_eip); \
884 gen_jmp(s, cur_eip); \
887 #define GEN_REPZ2(op) \
888 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
889 unsigned int cur_eip, \
890 unsigned int next_eip, \
893 gen_update_cc_op(s); \
894 gen_jz_ecx_string(s, next_eip); \
896 gen_op_dec_ECX[s->aflag](); \
897 gen_op_set_cc_op(CC_OP_SUBB + ot); \
899 gen_op_string_jnz_sub_im[nz][ot](next_eip); \
901 gen_op_string_jnz_sub[nz][ot]((long)s->tb); \
903 gen_op_jz_ecx_im[s->aflag](next_eip); \
904 gen_jmp(s, cur_eip); \
915 static GenOpFunc
*gen_op_in
[3] = {
921 static GenOpFunc
*gen_op_out
[3] = {
938 static GenOpFunc3
*gen_jcc_sub
[3][8] = {
970 static GenOpFunc2
*gen_op_loop
[2][4] = {
985 static GenOpFunc
*gen_setcc_slow
[8] = {
996 static GenOpFunc
*gen_setcc_sub
[3][8] = {
1000 gen_op_setz_T0_subb
,
1001 gen_op_setbe_T0_subb
,
1002 gen_op_sets_T0_subb
,
1004 gen_op_setl_T0_subb
,
1005 gen_op_setle_T0_subb
,
1009 gen_op_setb_T0_subw
,
1010 gen_op_setz_T0_subw
,
1011 gen_op_setbe_T0_subw
,
1012 gen_op_sets_T0_subw
,
1014 gen_op_setl_T0_subw
,
1015 gen_op_setle_T0_subw
,
1019 gen_op_setb_T0_subl
,
1020 gen_op_setz_T0_subl
,
1021 gen_op_setbe_T0_subl
,
1022 gen_op_sets_T0_subl
,
1024 gen_op_setl_T0_subl
,
1025 gen_op_setle_T0_subl
,
1029 static GenOpFunc
*gen_op_fp_arith_ST0_FT0
[8] = {
1030 gen_op_fadd_ST0_FT0
,
1031 gen_op_fmul_ST0_FT0
,
1032 gen_op_fcom_ST0_FT0
,
1033 gen_op_fcom_ST0_FT0
,
1034 gen_op_fsub_ST0_FT0
,
1035 gen_op_fsubr_ST0_FT0
,
1036 gen_op_fdiv_ST0_FT0
,
1037 gen_op_fdivr_ST0_FT0
,
1040 /* NOTE the exception in "r" op ordering */
1041 static GenOpFunc1
*gen_op_fp_arith_STN_ST0
[8] = {
1042 gen_op_fadd_STN_ST0
,
1043 gen_op_fmul_STN_ST0
,
1046 gen_op_fsubr_STN_ST0
,
1047 gen_op_fsub_STN_ST0
,
1048 gen_op_fdivr_STN_ST0
,
1049 gen_op_fdiv_STN_ST0
,
1052 /* if d == OR_TMP0, it means memory operand (address in A0) */
1053 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1055 GenOpFunc
*gen_update_cc
;
1058 gen_op_mov_TN_reg
[ot
][0][d
]();
1060 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1065 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1066 gen_op_set_cc_op(s1
->cc_op
);
1068 gen_op_arithc_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1069 gen_op_mov_reg_T0
[ot
][d
]();
1071 gen_op_arithc_mem_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1073 s1
->cc_op
= CC_OP_DYNAMIC
;
1076 gen_op_addl_T0_T1();
1077 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1078 gen_update_cc
= gen_op_update2_cc
;
1081 gen_op_subl_T0_T1();
1082 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1083 gen_update_cc
= gen_op_update2_cc
;
1089 gen_op_arith_T0_T1_cc
[op
]();
1090 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1091 gen_update_cc
= gen_op_update1_cc
;
1094 gen_op_cmpl_T0_T1_cc();
1095 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1096 gen_update_cc
= NULL
;
1099 if (op
!= OP_CMPL
) {
1101 gen_op_mov_reg_T0
[ot
][d
]();
1103 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1105 /* the flags update must happen after the memory write (precise
1106 exception support) */
1112 /* if d == OR_TMP0, it means memory operand (address in A0) */
1113 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1116 gen_op_mov_TN_reg
[ot
][0][d
]();
1118 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1119 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1120 gen_op_set_cc_op(s1
->cc_op
);
1123 s1
->cc_op
= CC_OP_INCB
+ ot
;
1126 s1
->cc_op
= CC_OP_DECB
+ ot
;
1129 gen_op_mov_reg_T0
[ot
][d
]();
1131 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1132 gen_op_update_inc_cc();
1135 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1138 gen_op_mov_TN_reg
[ot
][0][d
]();
1140 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1142 gen_op_mov_TN_reg
[ot
][1][s
]();
1143 /* for zero counts, flags are not updated, so must do it dynamically */
1144 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1145 gen_op_set_cc_op(s1
->cc_op
);
1148 gen_op_shift_T0_T1_cc
[ot
][op
]();
1150 gen_op_shift_mem_T0_T1_cc
[ot
][op
]();
1152 gen_op_mov_reg_T0
[ot
][d
]();
1153 s1
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1156 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1158 /* currently not optimized */
1159 gen_op_movl_T1_im(c
);
1160 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1163 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1170 int mod
, rm
, code
, override
, must_add_seg
;
1172 override
= s
->override
;
1173 must_add_seg
= s
->addseg
;
1176 mod
= (modrm
>> 6) & 3;
1188 code
= ldub_code(s
->pc
++);
1189 scale
= (code
>> 6) & 3;
1190 index
= (code
>> 3) & 7;
1198 disp
= ldl_code(s
->pc
);
1205 disp
= (int8_t)ldub_code(s
->pc
++);
1209 disp
= ldl_code(s
->pc
);
1215 /* for correct popl handling with esp */
1216 if (base
== 4 && s
->popl_esp_hack
)
1217 disp
+= s
->popl_esp_hack
;
1218 gen_op_movl_A0_reg
[base
]();
1220 gen_op_addl_A0_im(disp
);
1222 gen_op_movl_A0_im(disp
);
1224 /* XXX: index == 4 is always invalid */
1225 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1226 gen_op_addl_A0_reg_sN
[scale
][index
]();
1230 if (base
== R_EBP
|| base
== R_ESP
)
1235 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1241 disp
= lduw_code(s
->pc
);
1243 gen_op_movl_A0_im(disp
);
1244 rm
= 0; /* avoid SS override */
1251 disp
= (int8_t)ldub_code(s
->pc
++);
1255 disp
= lduw_code(s
->pc
);
1261 gen_op_movl_A0_reg
[R_EBX
]();
1262 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1265 gen_op_movl_A0_reg
[R_EBX
]();
1266 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1269 gen_op_movl_A0_reg
[R_EBP
]();
1270 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1273 gen_op_movl_A0_reg
[R_EBP
]();
1274 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1277 gen_op_movl_A0_reg
[R_ESI
]();
1280 gen_op_movl_A0_reg
[R_EDI
]();
1283 gen_op_movl_A0_reg
[R_EBP
]();
1287 gen_op_movl_A0_reg
[R_EBX
]();
1291 gen_op_addl_A0_im(disp
);
1292 gen_op_andl_A0_ffff();
1296 if (rm
== 2 || rm
== 3 || rm
== 6)
1301 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1311 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1313 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1315 int mod
, rm
, opreg
, disp
;
1317 mod
= (modrm
>> 6) & 3;
1322 gen_op_mov_TN_reg
[ot
][0][reg
]();
1323 gen_op_mov_reg_T0
[ot
][rm
]();
1325 gen_op_mov_TN_reg
[ot
][0][rm
]();
1327 gen_op_mov_reg_T0
[ot
][reg
]();
1330 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
1333 gen_op_mov_TN_reg
[ot
][0][reg
]();
1334 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1336 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1338 gen_op_mov_reg_T0
[ot
][reg
]();
1343 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
1349 ret
= ldub_code(s
->pc
);
1353 ret
= lduw_code(s
->pc
);
1358 ret
= ldl_code(s
->pc
);
1365 static inline void gen_jcc(DisasContext
*s
, int b
, int val
, int next_eip
)
1367 TranslationBlock
*tb
;
1372 jcc_op
= (b
>> 1) & 7;
1376 /* we optimize the cmp/jcc case */
1380 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1383 /* some jumps are easy to compute */
1410 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 3][jcc_op
];
1413 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 3][jcc_op
];
1425 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1426 gen_op_set_cc_op(s
->cc_op
);
1429 gen_setcc_slow
[jcc_op
]();
1435 func((long)tb
, val
, next_eip
);
1437 func((long)tb
, next_eip
, val
);
1441 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1442 gen_op_set_cc_op(s
->cc_op
);
1443 s
->cc_op
= CC_OP_DYNAMIC
;
1445 gen_setcc_slow
[jcc_op
]();
1447 gen_op_jcc_im(val
, next_eip
);
1449 gen_op_jcc_im(next_eip
, val
);
1455 static void gen_setcc(DisasContext
*s
, int b
)
1461 jcc_op
= (b
>> 1) & 7;
1463 /* we optimize the cmp/jcc case */
1467 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1472 /* some jumps are easy to compute */
1490 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 3][jcc_op
];
1493 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 3][jcc_op
];
1501 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1502 gen_op_set_cc_op(s
->cc_op
);
1503 func
= gen_setcc_slow
[jcc_op
];
1512 /* move T0 to seg_reg and compute if the CPU state may change. Never
1513 call this function with seg_reg == R_CS */
1514 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, unsigned int cur_eip
)
1516 if (s
->pe
&& !s
->vm86
)
1517 gen_op_movl_seg_T0(seg_reg
, cur_eip
);
1519 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
1520 /* abort translation because the register may have a non zero base
1521 or because ss32 may change. For R_SS, translation must always
1522 stop as a special handling must be done to disable hardware
1523 interrupts for the next instruction */
1524 if (seg_reg
== R_SS
|| (!s
->addseg
&& seg_reg
< R_FS
))
1528 /* generate a push. It depends on ss32, addseg and dflag */
1529 static void gen_push_T0(DisasContext
*s
)
1539 gen_op_pushl_ss32_T0();
1541 gen_op_pushw_ss32_T0();
1545 gen_op_pushl_ss16_T0();
1547 gen_op_pushw_ss16_T0();
1551 /* two step pop is necessary for precise exceptions */
1552 static void gen_pop_T0(DisasContext
*s
)
1562 gen_op_popl_ss32_T0();
1564 gen_op_popw_ss32_T0();
1568 gen_op_popl_ss16_T0();
1570 gen_op_popw_ss16_T0();
1574 static inline void gen_stack_update(DisasContext
*s
, int addend
)
1578 gen_op_addl_ESP_2();
1579 else if (addend
== 4)
1580 gen_op_addl_ESP_4();
1582 gen_op_addl_ESP_im(addend
);
1585 gen_op_addw_ESP_2();
1586 else if (addend
== 4)
1587 gen_op_addw_ESP_4();
1589 gen_op_addw_ESP_im(addend
);
1593 static void gen_pop_update(DisasContext
*s
)
1595 gen_stack_update(s
, 2 << s
->dflag
);
1598 static void gen_stack_A0(DisasContext
*s
)
1600 gen_op_movl_A0_ESP();
1602 gen_op_andl_A0_ffff();
1603 gen_op_movl_T1_A0();
1605 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
1608 /* NOTE: wrap around in 16 bit not fully handled */
1609 static void gen_pusha(DisasContext
*s
)
1612 gen_op_movl_A0_ESP();
1613 gen_op_addl_A0_im(-16 << s
->dflag
);
1615 gen_op_andl_A0_ffff();
1616 gen_op_movl_T1_A0();
1618 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
1619 for(i
= 0;i
< 8; i
++) {
1620 gen_op_mov_TN_reg
[OT_LONG
][0][7 - i
]();
1621 gen_op_st_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
1622 gen_op_addl_A0_im(2 << s
->dflag
);
1624 gen_op_mov_reg_T1
[OT_WORD
+ s
->dflag
][R_ESP
]();
1627 /* NOTE: wrap around in 16 bit not fully handled */
1628 static void gen_popa(DisasContext
*s
)
1631 gen_op_movl_A0_ESP();
1633 gen_op_andl_A0_ffff();
1634 gen_op_movl_T1_A0();
1635 gen_op_addl_T1_im(16 << s
->dflag
);
1637 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
1638 for(i
= 0;i
< 8; i
++) {
1639 /* ESP is not reloaded */
1641 gen_op_ld_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
1642 gen_op_mov_reg_T0
[OT_WORD
+ s
->dflag
][7 - i
]();
1644 gen_op_addl_A0_im(2 << s
->dflag
);
1646 gen_op_mov_reg_T1
[OT_WORD
+ s
->dflag
][R_ESP
]();
1649 /* NOTE: wrap around in 16 bit not fully handled */
1650 /* XXX: check this */
1651 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
1653 int ot
, level1
, addend
, opsize
;
1655 ot
= s
->dflag
+ OT_WORD
;
1658 opsize
= 2 << s
->dflag
;
1660 gen_op_movl_A0_ESP();
1661 gen_op_addl_A0_im(-opsize
);
1663 gen_op_andl_A0_ffff();
1664 gen_op_movl_T1_A0();
1666 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
1668 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
1669 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1672 gen_op_addl_A0_im(-opsize
);
1673 gen_op_addl_T0_im(-opsize
);
1674 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1676 gen_op_addl_A0_im(-opsize
);
1677 /* XXX: add st_T1_A0 ? */
1678 gen_op_movl_T0_T1();
1679 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1681 gen_op_mov_reg_T1
[ot
][R_EBP
]();
1682 addend
= -esp_addend
;
1684 addend
-= opsize
* (level1
+ 1);
1685 gen_op_addl_T1_im(addend
);
1686 gen_op_mov_reg_T1
[ot
][R_ESP
]();
1689 static void gen_exception(DisasContext
*s
, int trapno
, unsigned int cur_eip
)
1691 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1692 gen_op_set_cc_op(s
->cc_op
);
1693 gen_op_jmp_im(cur_eip
);
1694 gen_op_raise_exception(trapno
);
1698 /* an interrupt is different from an exception because of the
1699 priviledge checks */
1700 static void gen_interrupt(DisasContext
*s
, int intno
,
1701 unsigned int cur_eip
, unsigned int next_eip
)
1703 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1704 gen_op_set_cc_op(s
->cc_op
);
1705 gen_op_jmp_im(cur_eip
);
1706 gen_op_raise_interrupt(intno
, next_eip
);
1710 static void gen_debug(DisasContext
*s
, unsigned int cur_eip
)
1712 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1713 gen_op_set_cc_op(s
->cc_op
);
1714 gen_op_jmp_im(cur_eip
);
1719 /* generate a generic end of block. Trace exception is also generated
1721 static void gen_eob(DisasContext
*s
)
1723 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1724 gen_op_set_cc_op(s
->cc_op
);
1725 if (s
->singlestep_enabled
) {
1728 gen_op_raise_exception(EXCP01_SSTP
);
1736 /* generate a jump to eip. No segment change must happen before as a
1737 direct call to the next block may occur */
1738 static void gen_jmp(DisasContext
*s
, unsigned int eip
)
1740 TranslationBlock
*tb
= s
->tb
;
1743 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1744 gen_op_set_cc_op(s
->cc_op
);
1745 gen_op_jmp((long)tb
, eip
);
1753 /* convert one instruction. s->is_jmp is set if the translation must
1754 be stopped. Return the next pc value */
1755 static uint8_t *disas_insn(DisasContext
*s
, uint8_t *pc_start
)
1757 int b
, prefixes
, aflag
, dflag
;
1759 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
1760 unsigned int next_eip
;
1768 b
= ldub_code(s
->pc
);
1770 /* check prefixes */
1773 prefixes
|= PREFIX_REPZ
;
1776 prefixes
|= PREFIX_REPNZ
;
1779 prefixes
|= PREFIX_LOCK
;
1800 prefixes
|= PREFIX_DATA
;
1803 prefixes
|= PREFIX_ADR
;
1807 if (prefixes
& PREFIX_DATA
)
1809 if (prefixes
& PREFIX_ADR
)
1812 s
->prefix
= prefixes
;
1816 /* lock generation */
1817 if (prefixes
& PREFIX_LOCK
)
1820 /* now check op code */
1824 /**************************/
1825 /* extended op code */
1826 b
= ldub_code(s
->pc
++) | 0x100;
1829 /**************************/
1847 ot
= dflag
? OT_LONG
: OT_WORD
;
1850 case 0: /* OP Ev, Gv */
1851 modrm
= ldub_code(s
->pc
++);
1852 reg
= ((modrm
>> 3) & 7);
1853 mod
= (modrm
>> 6) & 3;
1856 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
1858 } else if (op
== OP_XORL
&& rm
== reg
) {
1860 /* xor reg, reg optimisation */
1862 s
->cc_op
= CC_OP_LOGICB
+ ot
;
1863 gen_op_mov_reg_T0
[ot
][reg
]();
1864 gen_op_update1_cc();
1869 gen_op_mov_TN_reg
[ot
][1][reg
]();
1870 gen_op(s
, op
, ot
, opreg
);
1872 case 1: /* OP Gv, Ev */
1873 modrm
= ldub_code(s
->pc
++);
1874 mod
= (modrm
>> 6) & 3;
1875 reg
= ((modrm
>> 3) & 7);
1878 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
1879 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
1880 } else if (op
== OP_XORL
&& rm
== reg
) {
1883 gen_op_mov_TN_reg
[ot
][1][rm
]();
1885 gen_op(s
, op
, ot
, reg
);
1887 case 2: /* OP A, Iv */
1888 val
= insn_get(s
, ot
);
1889 gen_op_movl_T1_im(val
);
1890 gen_op(s
, op
, ot
, OR_EAX
);
1896 case 0x80: /* GRP1 */
1905 ot
= dflag
? OT_LONG
: OT_WORD
;
1907 modrm
= ldub_code(s
->pc
++);
1908 mod
= (modrm
>> 6) & 3;
1910 op
= (modrm
>> 3) & 7;
1913 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
1916 opreg
= rm
+ OR_EAX
;
1923 val
= insn_get(s
, ot
);
1926 val
= (int8_t)insn_get(s
, OT_BYTE
);
1929 gen_op_movl_T1_im(val
);
1930 gen_op(s
, op
, ot
, opreg
);
1934 /**************************/
1935 /* inc, dec, and other misc arith */
1936 case 0x40 ... 0x47: /* inc Gv */
1937 ot
= dflag
? OT_LONG
: OT_WORD
;
1938 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
1940 case 0x48 ... 0x4f: /* dec Gv */
1941 ot
= dflag
? OT_LONG
: OT_WORD
;
1942 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
1944 case 0xf6: /* GRP3 */
1949 ot
= dflag
? OT_LONG
: OT_WORD
;
1951 modrm
= ldub_code(s
->pc
++);
1952 mod
= (modrm
>> 6) & 3;
1954 op
= (modrm
>> 3) & 7;
1956 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
1957 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1959 gen_op_mov_TN_reg
[ot
][0][rm
]();
1964 val
= insn_get(s
, ot
);
1965 gen_op_movl_T1_im(val
);
1966 gen_op_testl_T0_T1_cc();
1967 s
->cc_op
= CC_OP_LOGICB
+ ot
;
1972 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1974 gen_op_mov_reg_T0
[ot
][rm
]();
1980 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1982 gen_op_mov_reg_T0
[ot
][rm
]();
1984 gen_op_update_neg_cc();
1985 s
->cc_op
= CC_OP_SUBB
+ ot
;
1990 gen_op_mulb_AL_T0();
1993 gen_op_mulw_AX_T0();
1997 gen_op_mull_EAX_T0();
2000 s
->cc_op
= CC_OP_MUL
;
2005 gen_op_imulb_AL_T0();
2008 gen_op_imulw_AX_T0();
2012 gen_op_imull_EAX_T0();
2015 s
->cc_op
= CC_OP_MUL
;
2020 gen_op_divb_AL_T0(pc_start
- s
->cs_base
);
2023 gen_op_divw_AX_T0(pc_start
- s
->cs_base
);
2027 gen_op_divl_EAX_T0(pc_start
- s
->cs_base
);
2034 gen_op_idivb_AL_T0(pc_start
- s
->cs_base
);
2037 gen_op_idivw_AX_T0(pc_start
- s
->cs_base
);
2041 gen_op_idivl_EAX_T0(pc_start
- s
->cs_base
);
2050 case 0xfe: /* GRP4 */
2051 case 0xff: /* GRP5 */
2055 ot
= dflag
? OT_LONG
: OT_WORD
;
2057 modrm
= ldub_code(s
->pc
++);
2058 mod
= (modrm
>> 6) & 3;
2060 op
= (modrm
>> 3) & 7;
2061 if (op
>= 2 && b
== 0xfe) {
2065 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2066 if (op
>= 2 && op
!= 3 && op
!= 5)
2067 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
2069 gen_op_mov_TN_reg
[ot
][0][rm
]();
2073 case 0: /* inc Ev */
2078 gen_inc(s
, ot
, opreg
, 1);
2080 case 1: /* dec Ev */
2085 gen_inc(s
, ot
, opreg
, -1);
2087 case 2: /* call Ev */
2088 /* XXX: optimize if memory (no and is necessary) */
2090 gen_op_andl_T0_ffff();
2092 next_eip
= s
->pc
- s
->cs_base
;
2093 gen_op_movl_T0_im(next_eip
);
2097 case 3: /* lcall Ev */
2098 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2099 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
2100 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
2102 if (s
->pe
&& !s
->vm86
) {
2103 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2104 gen_op_set_cc_op(s
->cc_op
);
2105 gen_op_jmp_im(pc_start
- s
->cs_base
);
2106 gen_op_lcall_protected_T0_T1(dflag
, s
->pc
- s
->cs_base
);
2108 gen_op_lcall_real_T0_T1(dflag
, s
->pc
- s
->cs_base
);
2112 case 4: /* jmp Ev */
2114 gen_op_andl_T0_ffff();
2118 case 5: /* ljmp Ev */
2119 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2120 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
2121 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
2123 if (s
->pe
&& !s
->vm86
) {
2124 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2125 gen_op_set_cc_op(s
->cc_op
);
2126 gen_op_jmp_im(pc_start
- s
->cs_base
);
2127 gen_op_ljmp_protected_T0_T1();
2129 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
2130 gen_op_movl_T0_T1();
2135 case 6: /* push Ev */
2143 case 0x84: /* test Ev, Gv */
2148 ot
= dflag
? OT_LONG
: OT_WORD
;
2150 modrm
= ldub_code(s
->pc
++);
2151 mod
= (modrm
>> 6) & 3;
2153 reg
= (modrm
>> 3) & 7;
2155 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
2156 gen_op_mov_TN_reg
[ot
][1][reg
+ OR_EAX
]();
2157 gen_op_testl_T0_T1_cc();
2158 s
->cc_op
= CC_OP_LOGICB
+ ot
;
2161 case 0xa8: /* test eAX, Iv */
2166 ot
= dflag
? OT_LONG
: OT_WORD
;
2167 val
= insn_get(s
, ot
);
2169 gen_op_mov_TN_reg
[ot
][0][OR_EAX
]();
2170 gen_op_movl_T1_im(val
);
2171 gen_op_testl_T0_T1_cc();
2172 s
->cc_op
= CC_OP_LOGICB
+ ot
;
2175 case 0x98: /* CWDE/CBW */
2177 gen_op_movswl_EAX_AX();
2179 gen_op_movsbw_AX_AL();
2181 case 0x99: /* CDQ/CWD */
2183 gen_op_movslq_EDX_EAX();
2185 gen_op_movswl_DX_AX();
2187 case 0x1af: /* imul Gv, Ev */
2188 case 0x69: /* imul Gv, Ev, I */
2190 ot
= dflag
? OT_LONG
: OT_WORD
;
2191 modrm
= ldub_code(s
->pc
++);
2192 reg
= ((modrm
>> 3) & 7) + OR_EAX
;
2193 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
2195 val
= insn_get(s
, ot
);
2196 gen_op_movl_T1_im(val
);
2197 } else if (b
== 0x6b) {
2198 val
= insn_get(s
, OT_BYTE
);
2199 gen_op_movl_T1_im(val
);
2201 gen_op_mov_TN_reg
[ot
][1][reg
]();
2204 if (ot
== OT_LONG
) {
2205 gen_op_imull_T0_T1();
2207 gen_op_imulw_T0_T1();
2209 gen_op_mov_reg_T0
[ot
][reg
]();
2210 s
->cc_op
= CC_OP_MUL
;
2213 case 0x1c1: /* xadd Ev, Gv */
2217 ot
= dflag
? OT_LONG
: OT_WORD
;
2218 modrm
= ldub_code(s
->pc
++);
2219 reg
= (modrm
>> 3) & 7;
2220 mod
= (modrm
>> 6) & 3;
2223 gen_op_mov_TN_reg
[ot
][0][reg
]();
2224 gen_op_mov_TN_reg
[ot
][1][rm
]();
2225 gen_op_addl_T0_T1();
2226 gen_op_mov_reg_T0
[ot
][rm
]();
2227 gen_op_mov_reg_T1
[ot
][reg
]();
2229 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2230 gen_op_mov_TN_reg
[ot
][0][reg
]();
2231 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2232 gen_op_addl_T0_T1();
2233 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2234 gen_op_mov_reg_T1
[ot
][reg
]();
2236 gen_op_update2_cc();
2237 s
->cc_op
= CC_OP_ADDB
+ ot
;
2240 case 0x1b1: /* cmpxchg Ev, Gv */
2244 ot
= dflag
? OT_LONG
: OT_WORD
;
2245 modrm
= ldub_code(s
->pc
++);
2246 reg
= (modrm
>> 3) & 7;
2247 mod
= (modrm
>> 6) & 3;
2248 gen_op_mov_TN_reg
[ot
][1][reg
]();
2251 gen_op_mov_TN_reg
[ot
][0][rm
]();
2252 gen_op_cmpxchg_T0_T1_EAX_cc
[ot
]();
2253 gen_op_mov_reg_T0
[ot
][rm
]();
2255 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2256 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
2257 gen_op_cmpxchg_mem_T0_T1_EAX_cc
[ot
]();
2259 s
->cc_op
= CC_OP_SUBB
+ ot
;
2261 case 0x1c7: /* cmpxchg8b */
2262 modrm
= ldub_code(s
->pc
++);
2263 mod
= (modrm
>> 6) & 3;
2266 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2267 gen_op_set_cc_op(s
->cc_op
);
2268 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2270 s
->cc_op
= CC_OP_EFLAGS
;
2273 /**************************/
2275 case 0x50 ... 0x57: /* push */
2276 gen_op_mov_TN_reg
[OT_LONG
][0][b
& 7]();
2279 case 0x58 ... 0x5f: /* pop */
2280 ot
= dflag
? OT_LONG
: OT_WORD
;
2282 gen_op_mov_reg_T0
[ot
][b
& 7]();
2285 case 0x60: /* pusha */
2288 case 0x61: /* popa */
2291 case 0x68: /* push Iv */
2293 ot
= dflag
? OT_LONG
: OT_WORD
;
2295 val
= insn_get(s
, ot
);
2297 val
= (int8_t)insn_get(s
, OT_BYTE
);
2298 gen_op_movl_T0_im(val
);
2301 case 0x8f: /* pop Ev */
2302 ot
= dflag
? OT_LONG
: OT_WORD
;
2303 modrm
= ldub_code(s
->pc
++);
2305 s
->popl_esp_hack
= 2 << dflag
;
2306 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
2307 s
->popl_esp_hack
= 0;
2310 case 0xc8: /* enter */
2313 val
= lduw_code(s
->pc
);
2315 level
= ldub_code(s
->pc
++);
2316 gen_enter(s
, val
, level
);
2319 case 0xc9: /* leave */
2320 /* XXX: exception not precise (ESP is updated before potential exception) */
2322 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
2323 gen_op_mov_reg_T0
[OT_LONG
][R_ESP
]();
2325 gen_op_mov_TN_reg
[OT_WORD
][0][R_EBP
]();
2326 gen_op_mov_reg_T0
[OT_WORD
][R_ESP
]();
2329 ot
= dflag
? OT_LONG
: OT_WORD
;
2330 gen_op_mov_reg_T0
[ot
][R_EBP
]();
2333 case 0x06: /* push es */
2334 case 0x0e: /* push cs */
2335 case 0x16: /* push ss */
2336 case 0x1e: /* push ds */
2337 gen_op_movl_T0_seg(b
>> 3);
2340 case 0x1a0: /* push fs */
2341 case 0x1a8: /* push gs */
2342 gen_op_movl_T0_seg((b
>> 3) & 7);
2345 case 0x07: /* pop es */
2346 case 0x17: /* pop ss */
2347 case 0x1f: /* pop ds */
2350 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
2353 /* if reg == SS, inhibit interrupts/trace */
2354 gen_op_set_inhibit_irq();
2358 gen_op_jmp_im(s
->pc
- s
->cs_base
);
2362 case 0x1a1: /* pop fs */
2363 case 0x1a9: /* pop gs */
2365 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
2368 gen_op_jmp_im(s
->pc
- s
->cs_base
);
2373 /**************************/
2376 case 0x89: /* mov Gv, Ev */
2380 ot
= dflag
? OT_LONG
: OT_WORD
;
2381 modrm
= ldub_code(s
->pc
++);
2382 reg
= (modrm
>> 3) & 7;
2384 /* generate a generic store */
2385 gen_ldst_modrm(s
, modrm
, ot
, OR_EAX
+ reg
, 1);
2388 case 0xc7: /* mov Ev, Iv */
2392 ot
= dflag
? OT_LONG
: OT_WORD
;
2393 modrm
= ldub_code(s
->pc
++);
2394 mod
= (modrm
>> 6) & 3;
2396 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2397 val
= insn_get(s
, ot
);
2398 gen_op_movl_T0_im(val
);
2400 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2402 gen_op_mov_reg_T0
[ot
][modrm
& 7]();
2405 case 0x8b: /* mov Ev, Gv */
2409 ot
= dflag
? OT_LONG
: OT_WORD
;
2410 modrm
= ldub_code(s
->pc
++);
2411 reg
= (modrm
>> 3) & 7;
2413 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
2414 gen_op_mov_reg_T0
[ot
][reg
]();
2416 case 0x8e: /* mov seg, Gv */
2417 modrm
= ldub_code(s
->pc
++);
2418 reg
= (modrm
>> 3) & 7;
2419 if (reg
>= 6 || reg
== R_CS
)
2421 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
2422 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
2424 /* if reg == SS, inhibit interrupts/trace */
2425 gen_op_set_inhibit_irq();
2429 gen_op_jmp_im(s
->pc
- s
->cs_base
);
2433 case 0x8c: /* mov Gv, seg */
2434 modrm
= ldub_code(s
->pc
++);
2435 reg
= (modrm
>> 3) & 7;
2436 mod
= (modrm
>> 6) & 3;
2439 gen_op_movl_T0_seg(reg
);
2441 if (mod
== 3 && dflag
)
2443 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
2446 case 0x1b6: /* movzbS Gv, Eb */
2447 case 0x1b7: /* movzwS Gv, Eb */
2448 case 0x1be: /* movsbS Gv, Eb */
2449 case 0x1bf: /* movswS Gv, Eb */
2452 /* d_ot is the size of destination */
2453 d_ot
= dflag
+ OT_WORD
;
2454 /* ot is the size of source */
2455 ot
= (b
& 1) + OT_BYTE
;
2456 modrm
= ldub_code(s
->pc
++);
2457 reg
= ((modrm
>> 3) & 7) + OR_EAX
;
2458 mod
= (modrm
>> 6) & 3;
2462 gen_op_mov_TN_reg
[ot
][0][rm
]();
2463 switch(ot
| (b
& 8)) {
2465 gen_op_movzbl_T0_T0();
2468 gen_op_movsbl_T0_T0();
2471 gen_op_movzwl_T0_T0();
2475 gen_op_movswl_T0_T0();
2478 gen_op_mov_reg_T0
[d_ot
][reg
]();
2480 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2482 gen_op_lds_T0_A0
[ot
+ s
->mem_index
]();
2484 gen_op_ldu_T0_A0
[ot
+ s
->mem_index
]();
2486 gen_op_mov_reg_T0
[d_ot
][reg
]();
2491 case 0x8d: /* lea */
2492 ot
= dflag
? OT_LONG
: OT_WORD
;
2493 modrm
= ldub_code(s
->pc
++);
2494 reg
= (modrm
>> 3) & 7;
2495 /* we must ensure that no segment is added */
2499 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2501 gen_op_mov_reg_A0
[ot
- OT_WORD
][reg
]();
2504 case 0xa0: /* mov EAX, Ov */
2506 case 0xa2: /* mov Ov, EAX */
2511 ot
= dflag
? OT_LONG
: OT_WORD
;
2513 offset_addr
= insn_get(s
, OT_LONG
);
2515 offset_addr
= insn_get(s
, OT_WORD
);
2516 gen_op_movl_A0_im(offset_addr
);
2517 /* handle override */
2519 int override
, must_add_seg
;
2520 must_add_seg
= s
->addseg
;
2521 if (s
->override
>= 0) {
2522 override
= s
->override
;
2528 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
2532 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
2533 gen_op_mov_reg_T0
[ot
][R_EAX
]();
2535 gen_op_mov_TN_reg
[ot
][0][R_EAX
]();
2536 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2539 case 0xd7: /* xlat */
2540 gen_op_movl_A0_reg
[R_EBX
]();
2541 gen_op_addl_A0_AL();
2543 gen_op_andl_A0_ffff();
2544 /* handle override */
2546 int override
, must_add_seg
;
2547 must_add_seg
= s
->addseg
;
2549 if (s
->override
>= 0) {
2550 override
= s
->override
;
2556 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
2559 gen_op_ldu_T0_A0
[OT_BYTE
+ s
->mem_index
]();
2560 gen_op_mov_reg_T0
[OT_BYTE
][R_EAX
]();
2562 case 0xb0 ... 0xb7: /* mov R, Ib */
2563 val
= insn_get(s
, OT_BYTE
);
2564 gen_op_movl_T0_im(val
);
2565 gen_op_mov_reg_T0
[OT_BYTE
][b
& 7]();
2567 case 0xb8 ... 0xbf: /* mov R, Iv */
2568 ot
= dflag
? OT_LONG
: OT_WORD
;
2569 val
= insn_get(s
, ot
);
2570 reg
= OR_EAX
+ (b
& 7);
2571 gen_op_movl_T0_im(val
);
2572 gen_op_mov_reg_T0
[ot
][reg
]();
2575 case 0x91 ... 0x97: /* xchg R, EAX */
2576 ot
= dflag
? OT_LONG
: OT_WORD
;
2581 case 0x87: /* xchg Ev, Gv */
2585 ot
= dflag
? OT_LONG
: OT_WORD
;
2586 modrm
= ldub_code(s
->pc
++);
2587 reg
= (modrm
>> 3) & 7;
2588 mod
= (modrm
>> 6) & 3;
2592 gen_op_mov_TN_reg
[ot
][0][reg
]();
2593 gen_op_mov_TN_reg
[ot
][1][rm
]();
2594 gen_op_mov_reg_T0
[ot
][rm
]();
2595 gen_op_mov_reg_T1
[ot
][reg
]();
2597 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2598 gen_op_mov_TN_reg
[ot
][0][reg
]();
2599 /* for xchg, lock is implicit */
2600 if (!(prefixes
& PREFIX_LOCK
))
2602 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2603 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2604 if (!(prefixes
& PREFIX_LOCK
))
2606 gen_op_mov_reg_T1
[ot
][reg
]();
2609 case 0xc4: /* les Gv */
2612 case 0xc5: /* lds Gv */
2615 case 0x1b2: /* lss Gv */
2618 case 0x1b4: /* lfs Gv */
2621 case 0x1b5: /* lgs Gv */
2624 ot
= dflag
? OT_LONG
: OT_WORD
;
2625 modrm
= ldub_code(s
->pc
++);
2626 reg
= (modrm
>> 3) & 7;
2627 mod
= (modrm
>> 6) & 3;
2630 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2631 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
2632 gen_op_addl_A0_im(1 << (ot
- OT_WORD
+ 1));
2633 /* load the segment first to handle exceptions properly */
2634 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
2635 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
2636 /* then put the data */
2637 gen_op_mov_reg_T1
[ot
][reg
]();
2639 gen_op_jmp_im(s
->pc
- s
->cs_base
);
2644 /************************/
2655 ot
= dflag
? OT_LONG
: OT_WORD
;
2657 modrm
= ldub_code(s
->pc
++);
2658 mod
= (modrm
>> 6) & 3;
2660 op
= (modrm
>> 3) & 7;
2663 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2666 opreg
= rm
+ OR_EAX
;
2671 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
2674 shift
= ldub_code(s
->pc
++);
2676 gen_shifti(s
, op
, ot
, opreg
, shift
);
2691 case 0x1a4: /* shld imm */
2695 case 0x1a5: /* shld cl */
2699 case 0x1ac: /* shrd imm */
2703 case 0x1ad: /* shrd cl */
2707 ot
= dflag
? OT_LONG
: OT_WORD
;
2708 modrm
= ldub_code(s
->pc
++);
2709 mod
= (modrm
>> 6) & 3;
2711 reg
= (modrm
>> 3) & 7;
2714 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2715 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
2717 gen_op_mov_TN_reg
[ot
][0][rm
]();
2719 gen_op_mov_TN_reg
[ot
][1][reg
]();
2722 val
= ldub_code(s
->pc
++);
2726 gen_op_shiftd_T0_T1_im_cc
[ot
- OT_WORD
][op
](val
);
2728 gen_op_shiftd_mem_T0_T1_im_cc
[ot
- OT_WORD
][op
](val
);
2729 if (op
== 0 && ot
!= OT_WORD
)
2730 s
->cc_op
= CC_OP_SHLB
+ ot
;
2732 s
->cc_op
= CC_OP_SARB
+ ot
;
2735 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2736 gen_op_set_cc_op(s
->cc_op
);
2738 gen_op_shiftd_T0_T1_ECX_cc
[ot
- OT_WORD
][op
]();
2740 gen_op_shiftd_mem_T0_T1_ECX_cc
[ot
- OT_WORD
][op
]();
2741 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
2744 gen_op_mov_reg_T0
[ot
][rm
]();
2748 /************************/
2751 modrm
= ldub_code(s
->pc
++);
2752 mod
= (modrm
>> 6) & 3;
2754 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
2758 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2760 case 0x00 ... 0x07: /* fxxxs */
2761 case 0x10 ... 0x17: /* fixxxl */
2762 case 0x20 ... 0x27: /* fxxxl */
2763 case 0x30 ... 0x37: /* fixxx */
2770 gen_op_flds_FT0_A0();
2773 gen_op_fildl_FT0_A0();
2776 gen_op_fldl_FT0_A0();
2780 gen_op_fild_FT0_A0();
2784 gen_op_fp_arith_ST0_FT0
[op1
]();
2786 /* fcomp needs pop */
2791 case 0x08: /* flds */
2792 case 0x0a: /* fsts */
2793 case 0x0b: /* fstps */
2794 case 0x18: /* fildl */
2795 case 0x1a: /* fistl */
2796 case 0x1b: /* fistpl */
2797 case 0x28: /* fldl */
2798 case 0x2a: /* fstl */
2799 case 0x2b: /* fstpl */
2800 case 0x38: /* filds */
2801 case 0x3a: /* fists */
2802 case 0x3b: /* fistps */
2808 gen_op_flds_ST0_A0();
2811 gen_op_fildl_ST0_A0();
2814 gen_op_fldl_ST0_A0();
2818 gen_op_fild_ST0_A0();
2825 gen_op_fsts_ST0_A0();
2828 gen_op_fistl_ST0_A0();
2831 gen_op_fstl_ST0_A0();
2835 gen_op_fist_ST0_A0();
2843 case 0x0c: /* fldenv mem */
2844 gen_op_fldenv_A0(s
->dflag
);
2846 case 0x0d: /* fldcw mem */
2849 case 0x0e: /* fnstenv mem */
2850 gen_op_fnstenv_A0(s
->dflag
);
2852 case 0x0f: /* fnstcw mem */
2855 case 0x1d: /* fldt mem */
2856 gen_op_fldt_ST0_A0();
2858 case 0x1f: /* fstpt mem */
2859 gen_op_fstt_ST0_A0();
2862 case 0x2c: /* frstor mem */
2863 gen_op_frstor_A0(s
->dflag
);
2865 case 0x2e: /* fnsave mem */
2866 gen_op_fnsave_A0(s
->dflag
);
2868 case 0x2f: /* fnstsw mem */
2871 case 0x3c: /* fbld */
2872 gen_op_fbld_ST0_A0();
2874 case 0x3e: /* fbstp */
2875 gen_op_fbst_ST0_A0();
2878 case 0x3d: /* fildll */
2879 gen_op_fildll_ST0_A0();
2881 case 0x3f: /* fistpll */
2882 gen_op_fistll_ST0_A0();
2889 /* register float ops */
2893 case 0x08: /* fld sti */
2895 gen_op_fmov_ST0_STN((opreg
+ 1) & 7);
2897 case 0x09: /* fxchg sti */
2898 gen_op_fxchg_ST0_STN(opreg
);
2900 case 0x0a: /* grp d9/2 */
2908 case 0x0c: /* grp d9/4 */
2918 gen_op_fcom_ST0_FT0();
2927 case 0x0d: /* grp d9/5 */
2936 gen_op_fldl2t_ST0();
2940 gen_op_fldl2e_ST0();
2948 gen_op_fldlg2_ST0();
2952 gen_op_fldln2_ST0();
2963 case 0x0e: /* grp d9/6 */
2974 case 3: /* fpatan */
2977 case 4: /* fxtract */
2980 case 5: /* fprem1 */
2983 case 6: /* fdecstp */
2987 case 7: /* fincstp */
2992 case 0x0f: /* grp d9/7 */
2997 case 1: /* fyl2xp1 */
3003 case 3: /* fsincos */
3006 case 5: /* fscale */
3009 case 4: /* frndint */
3021 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
3022 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
3023 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
3029 gen_op_fp_arith_STN_ST0
[op1
](opreg
);
3033 gen_op_fmov_FT0_STN(opreg
);
3034 gen_op_fp_arith_ST0_FT0
[op1
]();
3038 case 0x02: /* fcom */
3039 gen_op_fmov_FT0_STN(opreg
);
3040 gen_op_fcom_ST0_FT0();
3042 case 0x03: /* fcomp */
3043 gen_op_fmov_FT0_STN(opreg
);
3044 gen_op_fcom_ST0_FT0();
3047 case 0x15: /* da/5 */
3049 case 1: /* fucompp */
3050 gen_op_fmov_FT0_STN(1);
3051 gen_op_fucom_ST0_FT0();
3061 case 0: /* feni (287 only, just do nop here) */
3063 case 1: /* fdisi (287 only, just do nop here) */
3068 case 3: /* fninit */
3071 case 4: /* fsetpm (287 only, just do nop here) */
3077 case 0x1d: /* fucomi */
3078 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3079 gen_op_set_cc_op(s
->cc_op
);
3080 gen_op_fmov_FT0_STN(opreg
);
3081 gen_op_fucomi_ST0_FT0();
3082 s
->cc_op
= CC_OP_EFLAGS
;
3084 case 0x1e: /* fcomi */
3085 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3086 gen_op_set_cc_op(s
->cc_op
);
3087 gen_op_fmov_FT0_STN(opreg
);
3088 gen_op_fcomi_ST0_FT0();
3089 s
->cc_op
= CC_OP_EFLAGS
;
3091 case 0x2a: /* fst sti */
3092 gen_op_fmov_STN_ST0(opreg
);
3094 case 0x2b: /* fstp sti */
3095 gen_op_fmov_STN_ST0(opreg
);
3098 case 0x2c: /* fucom st(i) */
3099 gen_op_fmov_FT0_STN(opreg
);
3100 gen_op_fucom_ST0_FT0();
3102 case 0x2d: /* fucomp st(i) */
3103 gen_op_fmov_FT0_STN(opreg
);
3104 gen_op_fucom_ST0_FT0();
3107 case 0x33: /* de/3 */
3109 case 1: /* fcompp */
3110 gen_op_fmov_FT0_STN(1);
3111 gen_op_fcom_ST0_FT0();
3119 case 0x3c: /* df/4 */
3122 gen_op_fnstsw_EAX();
3128 case 0x3d: /* fucomip */
3129 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3130 gen_op_set_cc_op(s
->cc_op
);
3131 gen_op_fmov_FT0_STN(opreg
);
3132 gen_op_fucomi_ST0_FT0();
3134 s
->cc_op
= CC_OP_EFLAGS
;
3136 case 0x3e: /* fcomip */
3137 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3138 gen_op_set_cc_op(s
->cc_op
);
3139 gen_op_fmov_FT0_STN(opreg
);
3140 gen_op_fcomi_ST0_FT0();
3142 s
->cc_op
= CC_OP_EFLAGS
;
3149 /************************/
3152 case 0xa4: /* movsS */
3157 ot
= dflag
? OT_LONG
: OT_WORD
;
3159 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3160 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3166 case 0xaa: /* stosS */
3171 ot
= dflag
? OT_LONG
: OT_WORD
;
3173 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3174 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3179 case 0xac: /* lodsS */
3184 ot
= dflag
? OT_LONG
: OT_WORD
;
3185 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3186 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3191 case 0xae: /* scasS */
3196 ot
= dflag
? OT_LONG
: OT_WORD
;
3197 if (prefixes
& PREFIX_REPNZ
) {
3198 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
3199 } else if (prefixes
& PREFIX_REPZ
) {
3200 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
3203 s
->cc_op
= CC_OP_SUBB
+ ot
;
3207 case 0xa6: /* cmpsS */
3212 ot
= dflag
? OT_LONG
: OT_WORD
;
3213 if (prefixes
& PREFIX_REPNZ
) {
3214 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
3215 } else if (prefixes
& PREFIX_REPZ
) {
3216 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
3219 s
->cc_op
= CC_OP_SUBB
+ ot
;
3222 case 0x6c: /* insS */
3224 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
3225 /* NOTE: even for (E)CX = 0 the exception is raised */
3226 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3231 ot
= dflag
? OT_LONG
: OT_WORD
;
3232 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3233 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3239 case 0x6e: /* outsS */
3241 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
3242 /* NOTE: even for (E)CX = 0 the exception is raised */
3243 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3248 ot
= dflag
? OT_LONG
: OT_WORD
;
3249 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
3250 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3257 /************************/
3261 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
3262 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3267 ot
= dflag
? OT_LONG
: OT_WORD
;
3268 val
= ldub_code(s
->pc
++);
3269 gen_op_movl_T0_im(val
);
3271 gen_op_mov_reg_T1
[ot
][R_EAX
]();
3276 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
3277 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3282 ot
= dflag
? OT_LONG
: OT_WORD
;
3283 val
= ldub_code(s
->pc
++);
3284 gen_op_movl_T0_im(val
);
3285 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
3291 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
3292 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3297 ot
= dflag
? OT_LONG
: OT_WORD
;
3298 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
3300 gen_op_mov_reg_T1
[ot
][R_EAX
]();
3305 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
3306 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3311 ot
= dflag
? OT_LONG
: OT_WORD
;
3312 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
3313 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
3318 /************************/
3320 case 0xc2: /* ret im */
3321 val
= ldsw_code(s
->pc
);
3324 gen_stack_update(s
, val
+ (2 << s
->dflag
));
3326 gen_op_andl_T0_ffff();
3330 case 0xc3: /* ret */
3334 gen_op_andl_T0_ffff();
3338 case 0xca: /* lret im */
3339 val
= ldsw_code(s
->pc
);
3342 if (s
->pe
&& !s
->vm86
) {
3343 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3344 gen_op_set_cc_op(s
->cc_op
);
3345 gen_op_jmp_im(pc_start
- s
->cs_base
);
3346 gen_op_lret_protected(s
->dflag
, val
);
3350 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
3352 gen_op_andl_T0_ffff();
3353 /* NOTE: keeping EIP updated is not a problem in case of
3357 gen_op_addl_A0_im(2 << s
->dflag
);
3358 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
3359 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
3360 /* add stack offset */
3361 gen_stack_update(s
, val
+ (4 << s
->dflag
));
3365 case 0xcb: /* lret */
3368 case 0xcf: /* iret */
3371 gen_op_iret_real(s
->dflag
);
3372 s
->cc_op
= CC_OP_EFLAGS
;
3373 } else if (s
->vm86
&& s
->iopl
!= 3) {
3374 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3376 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3377 gen_op_set_cc_op(s
->cc_op
);
3378 gen_op_jmp_im(pc_start
- s
->cs_base
);
3379 gen_op_iret_protected(s
->dflag
);
3380 s
->cc_op
= CC_OP_EFLAGS
;
3384 case 0xe8: /* call im */
3386 unsigned int next_eip
;
3387 ot
= dflag
? OT_LONG
: OT_WORD
;
3388 val
= insn_get(s
, ot
);
3389 next_eip
= s
->pc
- s
->cs_base
;
3393 gen_op_movl_T0_im(next_eip
);
3398 case 0x9a: /* lcall im */
3400 unsigned int selector
, offset
;
3402 ot
= dflag
? OT_LONG
: OT_WORD
;
3403 offset
= insn_get(s
, ot
);
3404 selector
= insn_get(s
, OT_WORD
);
3406 gen_op_movl_T0_im(selector
);
3407 gen_op_movl_T1_im(offset
);
3410 case 0xe9: /* jmp */
3411 ot
= dflag
? OT_LONG
: OT_WORD
;
3412 val
= insn_get(s
, ot
);
3413 val
+= s
->pc
- s
->cs_base
;
3418 case 0xea: /* ljmp im */
3420 unsigned int selector
, offset
;
3422 ot
= dflag
? OT_LONG
: OT_WORD
;
3423 offset
= insn_get(s
, ot
);
3424 selector
= insn_get(s
, OT_WORD
);
3426 gen_op_movl_T0_im(selector
);
3427 gen_op_movl_T1_im(offset
);
3430 case 0xeb: /* jmp Jb */
3431 val
= (int8_t)insn_get(s
, OT_BYTE
);
3432 val
+= s
->pc
- s
->cs_base
;
3437 case 0x70 ... 0x7f: /* jcc Jb */
3438 val
= (int8_t)insn_get(s
, OT_BYTE
);
3440 case 0x180 ... 0x18f: /* jcc Jv */
3442 val
= insn_get(s
, OT_LONG
);
3444 val
= (int16_t)insn_get(s
, OT_WORD
);
3447 next_eip
= s
->pc
- s
->cs_base
;
3451 gen_jcc(s
, b
, val
, next_eip
);
3454 case 0x190 ... 0x19f: /* setcc Gv */
3455 modrm
= ldub_code(s
->pc
++);
3457 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
3459 case 0x140 ... 0x14f: /* cmov Gv, Ev */
3460 ot
= dflag
? OT_LONG
: OT_WORD
;
3461 modrm
= ldub_code(s
->pc
++);
3462 reg
= (modrm
>> 3) & 7;
3463 mod
= (modrm
>> 6) & 3;
3466 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3467 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3470 gen_op_mov_TN_reg
[ot
][1][rm
]();
3472 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
3475 /************************/
3477 case 0x9c: /* pushf */
3478 if (s
->vm86
&& s
->iopl
!= 3) {
3479 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3481 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3482 gen_op_set_cc_op(s
->cc_op
);
3483 gen_op_movl_T0_eflags();
3487 case 0x9d: /* popf */
3488 if (s
->vm86
&& s
->iopl
!= 3) {
3489 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3494 gen_op_movl_eflags_T0_cpl0();
3496 gen_op_movw_eflags_T0_cpl0();
3500 gen_op_movl_eflags_T0();
3502 gen_op_movw_eflags_T0();
3506 s
->cc_op
= CC_OP_EFLAGS
;
3507 /* abort translation because TF flag may change */
3508 gen_op_jmp_im(s
->pc
- s
->cs_base
);
3512 case 0x9e: /* sahf */
3513 gen_op_mov_TN_reg
[OT_BYTE
][0][R_AH
]();
3514 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3515 gen_op_set_cc_op(s
->cc_op
);
3516 gen_op_movb_eflags_T0();
3517 s
->cc_op
= CC_OP_EFLAGS
;
3519 case 0x9f: /* lahf */
3520 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3521 gen_op_set_cc_op(s
->cc_op
);
3522 gen_op_movl_T0_eflags();
3523 gen_op_mov_reg_T0
[OT_BYTE
][R_AH
]();
3525 case 0xf5: /* cmc */
3526 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3527 gen_op_set_cc_op(s
->cc_op
);
3529 s
->cc_op
= CC_OP_EFLAGS
;
3531 case 0xf8: /* clc */
3532 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3533 gen_op_set_cc_op(s
->cc_op
);
3535 s
->cc_op
= CC_OP_EFLAGS
;
3537 case 0xf9: /* stc */
3538 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3539 gen_op_set_cc_op(s
->cc_op
);
3541 s
->cc_op
= CC_OP_EFLAGS
;
3543 case 0xfc: /* cld */
3546 case 0xfd: /* std */
3550 /************************/
3551 /* bit operations */
3552 case 0x1ba: /* bt/bts/btr/btc Gv, im */
3553 ot
= dflag
? OT_LONG
: OT_WORD
;
3554 modrm
= ldub_code(s
->pc
++);
3555 op
= (modrm
>> 3) & 7;
3556 mod
= (modrm
>> 6) & 3;
3559 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3560 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3562 gen_op_mov_TN_reg
[ot
][0][rm
]();
3565 val
= ldub_code(s
->pc
++);
3566 gen_op_movl_T1_im(val
);
3570 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
3571 s
->cc_op
= CC_OP_SARB
+ ot
;
3574 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3576 gen_op_mov_reg_T0
[ot
][rm
]();
3577 gen_op_update_bt_cc();
3580 case 0x1a3: /* bt Gv, Ev */
3583 case 0x1ab: /* bts */
3586 case 0x1b3: /* btr */
3589 case 0x1bb: /* btc */
3592 ot
= dflag
? OT_LONG
: OT_WORD
;
3593 modrm
= ldub_code(s
->pc
++);
3594 reg
= (modrm
>> 3) & 7;
3595 mod
= (modrm
>> 6) & 3;
3597 gen_op_mov_TN_reg
[OT_LONG
][1][reg
]();
3599 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3600 /* specific case: we need to add a displacement */
3602 gen_op_add_bitw_A0_T1();
3604 gen_op_add_bitl_A0_T1();
3605 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3607 gen_op_mov_TN_reg
[ot
][0][rm
]();
3609 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
3610 s
->cc_op
= CC_OP_SARB
+ ot
;
3613 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3615 gen_op_mov_reg_T0
[ot
][rm
]();
3616 gen_op_update_bt_cc();
3619 case 0x1bc: /* bsf */
3620 case 0x1bd: /* bsr */
3621 ot
= dflag
? OT_LONG
: OT_WORD
;
3622 modrm
= ldub_code(s
->pc
++);
3623 reg
= (modrm
>> 3) & 7;
3624 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3625 gen_op_bsx_T0_cc
[ot
- OT_WORD
][b
& 1]();
3626 /* NOTE: we always write back the result. Intel doc says it is
3627 undefined if T0 == 0 */
3628 gen_op_mov_reg_T0
[ot
][reg
]();
3629 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3631 /************************/
3633 case 0x27: /* daa */
3634 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3635 gen_op_set_cc_op(s
->cc_op
);
3637 s
->cc_op
= CC_OP_EFLAGS
;
3639 case 0x2f: /* das */
3640 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3641 gen_op_set_cc_op(s
->cc_op
);
3643 s
->cc_op
= CC_OP_EFLAGS
;
3645 case 0x37: /* aaa */
3646 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3647 gen_op_set_cc_op(s
->cc_op
);
3649 s
->cc_op
= CC_OP_EFLAGS
;
3651 case 0x3f: /* aas */
3652 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3653 gen_op_set_cc_op(s
->cc_op
);
3655 s
->cc_op
= CC_OP_EFLAGS
;
3657 case 0xd4: /* aam */
3658 val
= ldub_code(s
->pc
++);
3660 s
->cc_op
= CC_OP_LOGICB
;
3662 case 0xd5: /* aad */
3663 val
= ldub_code(s
->pc
++);
3665 s
->cc_op
= CC_OP_LOGICB
;
3667 /************************/
3669 case 0x90: /* nop */
3671 case 0x9b: /* fwait */
3673 case 0xcc: /* int3 */
3674 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3676 case 0xcd: /* int N */
3677 val
= ldub_code(s
->pc
++);
3678 /* XXX: add error code for vm86 GPF */
3680 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
3682 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3684 case 0xce: /* into */
3685 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3686 gen_op_set_cc_op(s
->cc_op
);
3687 gen_op_into(s
->pc
- s
->cs_base
);
3689 case 0xf1: /* icebp (undocumented, exits to external debugger) */
3690 gen_debug(s
, pc_start
- s
->cs_base
);
3692 case 0xfa: /* cli */
3694 if (s
->cpl
<= s
->iopl
) {
3697 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3703 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3707 case 0xfb: /* sti */
3709 if (s
->cpl
<= s
->iopl
) {
3712 /* interruptions are enabled only the first insn after sti */
3713 gen_op_set_inhibit_irq();
3714 /* give a chance to handle pending irqs */
3715 gen_op_jmp_im(s
->pc
- s
->cs_base
);
3718 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3724 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3728 case 0x62: /* bound */
3729 ot
= dflag
? OT_LONG
: OT_WORD
;
3730 modrm
= ldub_code(s
->pc
++);
3731 reg
= (modrm
>> 3) & 7;
3732 mod
= (modrm
>> 6) & 3;
3735 gen_op_mov_reg_T0
[ot
][reg
]();
3736 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3738 gen_op_boundw(pc_start
- s
->cs_base
);
3740 gen_op_boundl(pc_start
- s
->cs_base
);
3742 case 0x1c8 ... 0x1cf: /* bswap reg */
3744 gen_op_mov_TN_reg
[OT_LONG
][0][reg
]();
3746 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
3748 case 0xd6: /* salc */
3749 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3750 gen_op_set_cc_op(s
->cc_op
);
3753 case 0xe0: /* loopnz */
3754 case 0xe1: /* loopz */
3755 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3756 gen_op_set_cc_op(s
->cc_op
);
3758 case 0xe2: /* loop */
3759 case 0xe3: /* jecxz */
3760 val
= (int8_t)insn_get(s
, OT_BYTE
);
3761 next_eip
= s
->pc
- s
->cs_base
;
3765 gen_op_loop
[s
->aflag
][b
& 3](val
, next_eip
);
3768 case 0x130: /* wrmsr */
3769 case 0x132: /* rdmsr */
3771 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3779 case 0x131: /* rdtsc */
3782 case 0x1a2: /* cpuid */
3785 case 0xf4: /* hlt */
3787 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3789 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3790 gen_op_set_cc_op(s
->cc_op
);
3791 gen_op_jmp_im(s
->pc
- s
->cs_base
);
3797 modrm
= ldub_code(s
->pc
++);
3798 mod
= (modrm
>> 6) & 3;
3799 op
= (modrm
>> 3) & 7;
3802 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
3806 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3810 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3812 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3813 gen_op_jmp_im(pc_start
- s
->cs_base
);
3818 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
3822 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3826 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3828 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3829 gen_op_jmp_im(pc_start
- s
->cs_base
);
3840 modrm
= ldub_code(s
->pc
++);
3841 mod
= (modrm
>> 6) & 3;
3842 op
= (modrm
>> 3) & 7;
3848 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3850 gen_op_movl_T0_env(offsetof(CPUX86State
,gdt
.limit
));
3852 gen_op_movl_T0_env(offsetof(CPUX86State
,idt
.limit
));
3853 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
3854 gen_op_addl_A0_im(2);
3856 gen_op_movl_T0_env(offsetof(CPUX86State
,gdt
.base
));
3858 gen_op_movl_T0_env(offsetof(CPUX86State
,idt
.base
));
3860 gen_op_andl_T0_im(0xffffff);
3861 gen_op_st_T0_A0
[OT_LONG
+ s
->mem_index
]();
3868 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3870 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3871 gen_op_ld_T1_A0
[OT_WORD
+ s
->mem_index
]();
3872 gen_op_addl_A0_im(2);
3873 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
3875 gen_op_andl_T0_im(0xffffff);
3877 gen_op_movl_env_T0(offsetof(CPUX86State
,gdt
.base
));
3878 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
3880 gen_op_movl_env_T0(offsetof(CPUX86State
,idt
.base
));
3881 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
3886 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
3887 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
3891 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3893 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3897 case 7: /* invlpg */
3899 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3903 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3911 case 0x102: /* lar */
3912 case 0x103: /* lsl */
3913 if (!s
->pe
|| s
->vm86
)
3915 ot
= dflag
? OT_LONG
: OT_WORD
;
3916 modrm
= ldub_code(s
->pc
++);
3917 reg
= (modrm
>> 3) & 7;
3918 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3919 gen_op_mov_TN_reg
[ot
][1][reg
]();
3920 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3921 gen_op_set_cc_op(s
->cc_op
);
3926 s
->cc_op
= CC_OP_EFLAGS
;
3927 gen_op_mov_reg_T1
[ot
][reg
]();
3930 modrm
= ldub_code(s
->pc
++);
3931 mod
= (modrm
>> 6) & 3;
3932 op
= (modrm
>> 3) & 7;
3934 case 0: /* prefetchnta */
3935 case 1: /* prefetchnt0 */
3936 case 2: /* prefetchnt0 */
3937 case 3: /* prefetchnt0 */
3940 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3941 /* nothing more to do */
3947 case 0x120: /* mov reg, crN */
3948 case 0x122: /* mov crN, reg */
3950 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3952 modrm
= ldub_code(s
->pc
++);
3953 if ((modrm
& 0xc0) != 0xc0)
3956 reg
= (modrm
>> 3) & 7;
3963 gen_op_mov_TN_reg
[OT_LONG
][0][rm
]();
3964 gen_op_movl_crN_T0(reg
);
3965 gen_op_jmp_im(s
->pc
- s
->cs_base
);
3968 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
3969 gen_op_mov_reg_T0
[OT_LONG
][rm
]();
3977 case 0x121: /* mov reg, drN */
3978 case 0x123: /* mov drN, reg */
3980 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
3982 modrm
= ldub_code(s
->pc
++);
3983 if ((modrm
& 0xc0) != 0xc0)
3986 reg
= (modrm
>> 3) & 7;
3987 /* XXX: do it dynamically with CR4.DE bit */
3988 if (reg
== 4 || reg
== 5)
3991 gen_op_mov_TN_reg
[OT_LONG
][0][rm
]();
3992 gen_op_movl_drN_T0(reg
);
3993 gen_op_jmp_im(s
->pc
- s
->cs_base
);
3996 gen_op_movl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
3997 gen_op_mov_reg_T0
[OT_LONG
][rm
]();
4001 case 0x106: /* clts */
4003 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4011 /* lock generation */
4012 if (s
->prefix
& PREFIX_LOCK
)
4016 /* XXX: ensure that no lock was generated */
4017 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
4021 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
4022 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
4024 /* flags read by an operation */
4025 static uint16_t opc_read_flags
[NB_OPS
] = {
4026 [INDEX_op_aas
] = CC_A
,
4027 [INDEX_op_aaa
] = CC_A
,
4028 [INDEX_op_das
] = CC_A
| CC_C
,
4029 [INDEX_op_daa
] = CC_A
| CC_C
,
4031 [INDEX_op_adcb_T0_T1_cc
] = CC_C
,
4032 [INDEX_op_adcw_T0_T1_cc
] = CC_C
,
4033 [INDEX_op_adcl_T0_T1_cc
] = CC_C
,
4034 [INDEX_op_sbbb_T0_T1_cc
] = CC_C
,
4035 [INDEX_op_sbbw_T0_T1_cc
] = CC_C
,
4036 [INDEX_op_sbbl_T0_T1_cc
] = CC_C
,
4038 [INDEX_op_adcb_mem_T0_T1_cc
] = CC_C
,
4039 [INDEX_op_adcw_mem_T0_T1_cc
] = CC_C
,
4040 [INDEX_op_adcl_mem_T0_T1_cc
] = CC_C
,
4041 [INDEX_op_sbbb_mem_T0_T1_cc
] = CC_C
,
4042 [INDEX_op_sbbw_mem_T0_T1_cc
] = CC_C
,
4043 [INDEX_op_sbbl_mem_T0_T1_cc
] = CC_C
,
4045 /* subtle: due to the incl/decl implementation, C is used */
4046 [INDEX_op_update_inc_cc
] = CC_C
,
4048 [INDEX_op_into
] = CC_O
,
4050 [INDEX_op_jb_subb
] = CC_C
,
4051 [INDEX_op_jb_subw
] = CC_C
,
4052 [INDEX_op_jb_subl
] = CC_C
,
4054 [INDEX_op_jz_subb
] = CC_Z
,
4055 [INDEX_op_jz_subw
] = CC_Z
,
4056 [INDEX_op_jz_subl
] = CC_Z
,
4058 [INDEX_op_jbe_subb
] = CC_Z
| CC_C
,
4059 [INDEX_op_jbe_subw
] = CC_Z
| CC_C
,
4060 [INDEX_op_jbe_subl
] = CC_Z
| CC_C
,
4062 [INDEX_op_js_subb
] = CC_S
,
4063 [INDEX_op_js_subw
] = CC_S
,
4064 [INDEX_op_js_subl
] = CC_S
,
4066 [INDEX_op_jl_subb
] = CC_O
| CC_S
,
4067 [INDEX_op_jl_subw
] = CC_O
| CC_S
,
4068 [INDEX_op_jl_subl
] = CC_O
| CC_S
,
4070 [INDEX_op_jle_subb
] = CC_O
| CC_S
| CC_Z
,
4071 [INDEX_op_jle_subw
] = CC_O
| CC_S
| CC_Z
,
4072 [INDEX_op_jle_subl
] = CC_O
| CC_S
| CC_Z
,
4074 [INDEX_op_loopnzw
] = CC_Z
,
4075 [INDEX_op_loopnzl
] = CC_Z
,
4076 [INDEX_op_loopzw
] = CC_Z
,
4077 [INDEX_op_loopzl
] = CC_Z
,
4079 [INDEX_op_seto_T0_cc
] = CC_O
,
4080 [INDEX_op_setb_T0_cc
] = CC_C
,
4081 [INDEX_op_setz_T0_cc
] = CC_Z
,
4082 [INDEX_op_setbe_T0_cc
] = CC_Z
| CC_C
,
4083 [INDEX_op_sets_T0_cc
] = CC_S
,
4084 [INDEX_op_setp_T0_cc
] = CC_P
,
4085 [INDEX_op_setl_T0_cc
] = CC_O
| CC_S
,
4086 [INDEX_op_setle_T0_cc
] = CC_O
| CC_S
| CC_Z
,
4088 [INDEX_op_setb_T0_subb
] = CC_C
,
4089 [INDEX_op_setb_T0_subw
] = CC_C
,
4090 [INDEX_op_setb_T0_subl
] = CC_C
,
4092 [INDEX_op_setz_T0_subb
] = CC_Z
,
4093 [INDEX_op_setz_T0_subw
] = CC_Z
,
4094 [INDEX_op_setz_T0_subl
] = CC_Z
,
4096 [INDEX_op_setbe_T0_subb
] = CC_Z
| CC_C
,
4097 [INDEX_op_setbe_T0_subw
] = CC_Z
| CC_C
,
4098 [INDEX_op_setbe_T0_subl
] = CC_Z
| CC_C
,
4100 [INDEX_op_sets_T0_subb
] = CC_S
,
4101 [INDEX_op_sets_T0_subw
] = CC_S
,
4102 [INDEX_op_sets_T0_subl
] = CC_S
,
4104 [INDEX_op_setl_T0_subb
] = CC_O
| CC_S
,
4105 [INDEX_op_setl_T0_subw
] = CC_O
| CC_S
,
4106 [INDEX_op_setl_T0_subl
] = CC_O
| CC_S
,
4108 [INDEX_op_setle_T0_subb
] = CC_O
| CC_S
| CC_Z
,
4109 [INDEX_op_setle_T0_subw
] = CC_O
| CC_S
| CC_Z
,
4110 [INDEX_op_setle_T0_subl
] = CC_O
| CC_S
| CC_Z
,
4112 [INDEX_op_movl_T0_eflags
] = CC_OSZAPC
,
4113 [INDEX_op_cmc
] = CC_C
,
4114 [INDEX_op_salc
] = CC_C
,
4116 [INDEX_op_rclb_T0_T1_cc
] = CC_C
,
4117 [INDEX_op_rclw_T0_T1_cc
] = CC_C
,
4118 [INDEX_op_rcll_T0_T1_cc
] = CC_C
,
4119 [INDEX_op_rcrb_T0_T1_cc
] = CC_C
,
4120 [INDEX_op_rcrw_T0_T1_cc
] = CC_C
,
4121 [INDEX_op_rcrl_T0_T1_cc
] = CC_C
,
4123 [INDEX_op_rclb_mem_T0_T1_cc
] = CC_C
,
4124 [INDEX_op_rclw_mem_T0_T1_cc
] = CC_C
,
4125 [INDEX_op_rcll_mem_T0_T1_cc
] = CC_C
,
4126 [INDEX_op_rcrb_mem_T0_T1_cc
] = CC_C
,
4127 [INDEX_op_rcrw_mem_T0_T1_cc
] = CC_C
,
4128 [INDEX_op_rcrl_mem_T0_T1_cc
] = CC_C
,
4131 /* flags written by an operation */
4132 static uint16_t opc_write_flags
[NB_OPS
] = {
4133 [INDEX_op_update2_cc
] = CC_OSZAPC
,
4134 [INDEX_op_update1_cc
] = CC_OSZAPC
,
4135 [INDEX_op_cmpl_T0_T1_cc
] = CC_OSZAPC
,
4136 [INDEX_op_update_neg_cc
] = CC_OSZAPC
,
4137 /* subtle: due to the incl/decl implementation, C is used */
4138 [INDEX_op_update_inc_cc
] = CC_OSZAPC
,
4139 [INDEX_op_testl_T0_T1_cc
] = CC_OSZAPC
,
4141 [INDEX_op_adcb_T0_T1_cc
] = CC_OSZAPC
,
4142 [INDEX_op_adcw_T0_T1_cc
] = CC_OSZAPC
,
4143 [INDEX_op_adcl_T0_T1_cc
] = CC_OSZAPC
,
4144 [INDEX_op_sbbb_T0_T1_cc
] = CC_OSZAPC
,
4145 [INDEX_op_sbbw_T0_T1_cc
] = CC_OSZAPC
,
4146 [INDEX_op_sbbl_T0_T1_cc
] = CC_OSZAPC
,
4148 [INDEX_op_adcb_mem_T0_T1_cc
] = CC_OSZAPC
,
4149 [INDEX_op_adcw_mem_T0_T1_cc
] = CC_OSZAPC
,
4150 [INDEX_op_adcl_mem_T0_T1_cc
] = CC_OSZAPC
,
4151 [INDEX_op_sbbb_mem_T0_T1_cc
] = CC_OSZAPC
,
4152 [INDEX_op_sbbw_mem_T0_T1_cc
] = CC_OSZAPC
,
4153 [INDEX_op_sbbl_mem_T0_T1_cc
] = CC_OSZAPC
,
4155 [INDEX_op_mulb_AL_T0
] = CC_OSZAPC
,
4156 [INDEX_op_imulb_AL_T0
] = CC_OSZAPC
,
4157 [INDEX_op_mulw_AX_T0
] = CC_OSZAPC
,
4158 [INDEX_op_imulw_AX_T0
] = CC_OSZAPC
,
4159 [INDEX_op_mull_EAX_T0
] = CC_OSZAPC
,
4160 [INDEX_op_imull_EAX_T0
] = CC_OSZAPC
,
4161 [INDEX_op_imulw_T0_T1
] = CC_OSZAPC
,
4162 [INDEX_op_imull_T0_T1
] = CC_OSZAPC
,
4165 [INDEX_op_aam
] = CC_OSZAPC
,
4166 [INDEX_op_aad
] = CC_OSZAPC
,
4167 [INDEX_op_aas
] = CC_OSZAPC
,
4168 [INDEX_op_aaa
] = CC_OSZAPC
,
4169 [INDEX_op_das
] = CC_OSZAPC
,
4170 [INDEX_op_daa
] = CC_OSZAPC
,
4172 [INDEX_op_movb_eflags_T0
] = CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
,
4173 [INDEX_op_movw_eflags_T0
] = CC_OSZAPC
,
4174 [INDEX_op_movl_eflags_T0
] = CC_OSZAPC
,
4175 [INDEX_op_clc
] = CC_C
,
4176 [INDEX_op_stc
] = CC_C
,
4177 [INDEX_op_cmc
] = CC_C
,
4179 [INDEX_op_rolb_T0_T1_cc
] = CC_O
| CC_C
,
4180 [INDEX_op_rolw_T0_T1_cc
] = CC_O
| CC_C
,
4181 [INDEX_op_roll_T0_T1_cc
] = CC_O
| CC_C
,
4182 [INDEX_op_rorb_T0_T1_cc
] = CC_O
| CC_C
,
4183 [INDEX_op_rorw_T0_T1_cc
] = CC_O
| CC_C
,
4184 [INDEX_op_rorl_T0_T1_cc
] = CC_O
| CC_C
,
4186 [INDEX_op_rclb_T0_T1_cc
] = CC_O
| CC_C
,
4187 [INDEX_op_rclw_T0_T1_cc
] = CC_O
| CC_C
,
4188 [INDEX_op_rcll_T0_T1_cc
] = CC_O
| CC_C
,
4189 [INDEX_op_rcrb_T0_T1_cc
] = CC_O
| CC_C
,
4190 [INDEX_op_rcrw_T0_T1_cc
] = CC_O
| CC_C
,
4191 [INDEX_op_rcrl_T0_T1_cc
] = CC_O
| CC_C
,
4193 [INDEX_op_shlb_T0_T1_cc
] = CC_OSZAPC
,
4194 [INDEX_op_shlw_T0_T1_cc
] = CC_OSZAPC
,
4195 [INDEX_op_shll_T0_T1_cc
] = CC_OSZAPC
,
4197 [INDEX_op_shrb_T0_T1_cc
] = CC_OSZAPC
,
4198 [INDEX_op_shrw_T0_T1_cc
] = CC_OSZAPC
,
4199 [INDEX_op_shrl_T0_T1_cc
] = CC_OSZAPC
,
4201 [INDEX_op_sarb_T0_T1_cc
] = CC_OSZAPC
,
4202 [INDEX_op_sarw_T0_T1_cc
] = CC_OSZAPC
,
4203 [INDEX_op_sarl_T0_T1_cc
] = CC_OSZAPC
,
4205 [INDEX_op_shldw_T0_T1_ECX_cc
] = CC_OSZAPC
,
4206 [INDEX_op_shldl_T0_T1_ECX_cc
] = CC_OSZAPC
,
4207 [INDEX_op_shldw_T0_T1_im_cc
] = CC_OSZAPC
,
4208 [INDEX_op_shldl_T0_T1_im_cc
] = CC_OSZAPC
,
4210 [INDEX_op_shrdw_T0_T1_ECX_cc
] = CC_OSZAPC
,
4211 [INDEX_op_shrdl_T0_T1_ECX_cc
] = CC_OSZAPC
,
4212 [INDEX_op_shrdw_T0_T1_im_cc
] = CC_OSZAPC
,
4213 [INDEX_op_shrdl_T0_T1_im_cc
] = CC_OSZAPC
,
4215 [INDEX_op_rolb_mem_T0_T1_cc
] = CC_O
| CC_C
,
4216 [INDEX_op_rolw_mem_T0_T1_cc
] = CC_O
| CC_C
,
4217 [INDEX_op_roll_mem_T0_T1_cc
] = CC_O
| CC_C
,
4218 [INDEX_op_rorb_mem_T0_T1_cc
] = CC_O
| CC_C
,
4219 [INDEX_op_rorw_mem_T0_T1_cc
] = CC_O
| CC_C
,
4220 [INDEX_op_rorl_mem_T0_T1_cc
] = CC_O
| CC_C
,
4222 [INDEX_op_rclb_mem_T0_T1_cc
] = CC_O
| CC_C
,
4223 [INDEX_op_rclw_mem_T0_T1_cc
] = CC_O
| CC_C
,
4224 [INDEX_op_rcll_mem_T0_T1_cc
] = CC_O
| CC_C
,
4225 [INDEX_op_rcrb_mem_T0_T1_cc
] = CC_O
| CC_C
,
4226 [INDEX_op_rcrw_mem_T0_T1_cc
] = CC_O
| CC_C
,
4227 [INDEX_op_rcrl_mem_T0_T1_cc
] = CC_O
| CC_C
,
4229 [INDEX_op_shlb_mem_T0_T1_cc
] = CC_OSZAPC
,
4230 [INDEX_op_shlw_mem_T0_T1_cc
] = CC_OSZAPC
,
4231 [INDEX_op_shll_mem_T0_T1_cc
] = CC_OSZAPC
,
4233 [INDEX_op_shrb_mem_T0_T1_cc
] = CC_OSZAPC
,
4234 [INDEX_op_shrw_mem_T0_T1_cc
] = CC_OSZAPC
,
4235 [INDEX_op_shrl_mem_T0_T1_cc
] = CC_OSZAPC
,
4237 [INDEX_op_sarb_mem_T0_T1_cc
] = CC_OSZAPC
,
4238 [INDEX_op_sarw_mem_T0_T1_cc
] = CC_OSZAPC
,
4239 [INDEX_op_sarl_mem_T0_T1_cc
] = CC_OSZAPC
,
4241 [INDEX_op_shldw_mem_T0_T1_ECX_cc
] = CC_OSZAPC
,
4242 [INDEX_op_shldl_mem_T0_T1_ECX_cc
] = CC_OSZAPC
,
4243 [INDEX_op_shldw_mem_T0_T1_im_cc
] = CC_OSZAPC
,
4244 [INDEX_op_shldl_mem_T0_T1_im_cc
] = CC_OSZAPC
,
4246 [INDEX_op_shrdw_mem_T0_T1_ECX_cc
] = CC_OSZAPC
,
4247 [INDEX_op_shrdl_mem_T0_T1_ECX_cc
] = CC_OSZAPC
,
4248 [INDEX_op_shrdw_mem_T0_T1_im_cc
] = CC_OSZAPC
,
4249 [INDEX_op_shrdl_mem_T0_T1_im_cc
] = CC_OSZAPC
,
4251 [INDEX_op_btw_T0_T1_cc
] = CC_OSZAPC
,
4252 [INDEX_op_btl_T0_T1_cc
] = CC_OSZAPC
,
4253 [INDEX_op_btsw_T0_T1_cc
] = CC_OSZAPC
,
4254 [INDEX_op_btsl_T0_T1_cc
] = CC_OSZAPC
,
4255 [INDEX_op_btrw_T0_T1_cc
] = CC_OSZAPC
,
4256 [INDEX_op_btrl_T0_T1_cc
] = CC_OSZAPC
,
4257 [INDEX_op_btcw_T0_T1_cc
] = CC_OSZAPC
,
4258 [INDEX_op_btcl_T0_T1_cc
] = CC_OSZAPC
,
4260 [INDEX_op_bsfw_T0_cc
] = CC_OSZAPC
,
4261 [INDEX_op_bsfl_T0_cc
] = CC_OSZAPC
,
4262 [INDEX_op_bsrw_T0_cc
] = CC_OSZAPC
,
4263 [INDEX_op_bsrl_T0_cc
] = CC_OSZAPC
,
4265 [INDEX_op_cmpxchgb_T0_T1_EAX_cc
] = CC_OSZAPC
,
4266 [INDEX_op_cmpxchgw_T0_T1_EAX_cc
] = CC_OSZAPC
,
4267 [INDEX_op_cmpxchgl_T0_T1_EAX_cc
] = CC_OSZAPC
,
4269 [INDEX_op_cmpxchgb_mem_T0_T1_EAX_cc
] = CC_OSZAPC
,
4270 [INDEX_op_cmpxchgw_mem_T0_T1_EAX_cc
] = CC_OSZAPC
,
4271 [INDEX_op_cmpxchgl_mem_T0_T1_EAX_cc
] = CC_OSZAPC
,
4273 [INDEX_op_cmpxchg8b
] = CC_Z
,
4274 [INDEX_op_lar
] = CC_Z
,
4275 [INDEX_op_lsl
] = CC_Z
,
4276 [INDEX_op_fcomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
4277 [INDEX_op_fucomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
4280 /* simpler form of an operation if no flags need to be generated */
4281 static uint16_t opc_simpler
[NB_OPS
] = {
4282 [INDEX_op_update2_cc
] = INDEX_op_nop
,
4283 [INDEX_op_update1_cc
] = INDEX_op_nop
,
4284 [INDEX_op_update_neg_cc
] = INDEX_op_nop
,
4286 /* broken: CC_OP logic must be rewritten */
4287 [INDEX_op_update_inc_cc
] = INDEX_op_nop
,
4289 [INDEX_op_rolb_T0_T1_cc
] = INDEX_op_rolb_T0_T1
,
4290 [INDEX_op_rolw_T0_T1_cc
] = INDEX_op_rolw_T0_T1
,
4291 [INDEX_op_roll_T0_T1_cc
] = INDEX_op_roll_T0_T1
,
4293 [INDEX_op_rorb_T0_T1_cc
] = INDEX_op_rorb_T0_T1
,
4294 [INDEX_op_rorw_T0_T1_cc
] = INDEX_op_rorw_T0_T1
,
4295 [INDEX_op_rorl_T0_T1_cc
] = INDEX_op_rorl_T0_T1
,
4297 [INDEX_op_rolb_mem_T0_T1_cc
] = INDEX_op_rolb_mem_T0_T1
,
4298 [INDEX_op_rolw_mem_T0_T1_cc
] = INDEX_op_rolw_mem_T0_T1
,
4299 [INDEX_op_roll_mem_T0_T1_cc
] = INDEX_op_roll_mem_T0_T1
,
4301 [INDEX_op_rorb_mem_T0_T1_cc
] = INDEX_op_rorb_mem_T0_T1
,
4302 [INDEX_op_rorw_mem_T0_T1_cc
] = INDEX_op_rorw_mem_T0_T1
,
4303 [INDEX_op_rorl_mem_T0_T1_cc
] = INDEX_op_rorl_mem_T0_T1
,
4305 [INDEX_op_shlb_T0_T1_cc
] = INDEX_op_shlb_T0_T1
,
4306 [INDEX_op_shlw_T0_T1_cc
] = INDEX_op_shlw_T0_T1
,
4307 [INDEX_op_shll_T0_T1_cc
] = INDEX_op_shll_T0_T1
,
4309 [INDEX_op_shrb_T0_T1_cc
] = INDEX_op_shrb_T0_T1
,
4310 [INDEX_op_shrw_T0_T1_cc
] = INDEX_op_shrw_T0_T1
,
4311 [INDEX_op_shrl_T0_T1_cc
] = INDEX_op_shrl_T0_T1
,
4313 [INDEX_op_sarb_T0_T1_cc
] = INDEX_op_sarb_T0_T1
,
4314 [INDEX_op_sarw_T0_T1_cc
] = INDEX_op_sarw_T0_T1
,
4315 [INDEX_op_sarl_T0_T1_cc
] = INDEX_op_sarl_T0_T1
,
4318 void optimize_flags_init(void)
4321 /* put default values in arrays */
4322 for(i
= 0; i
< NB_OPS
; i
++) {
4323 if (opc_simpler
[i
] == 0)
4328 /* CPU flags computation optimization: we move backward thru the
4329 generated code to see which flags are needed. The operation is
4330 modified if suitable */
4331 static void optimize_flags(uint16_t *opc_buf
, int opc_buf_len
)
4334 int live_flags
, write_flags
, op
;
4336 opc_ptr
= opc_buf
+ opc_buf_len
;
4337 /* live_flags contains the flags needed by the next instructions
4338 in the code. At the end of the bloc, we consider that all the
4340 live_flags
= CC_OSZAPC
;
4341 while (opc_ptr
> opc_buf
) {
4343 /* if none of the flags written by the instruction is used,
4344 then we can try to find a simpler instruction */
4345 write_flags
= opc_write_flags
[op
];
4346 if ((live_flags
& write_flags
) == 0) {
4347 *opc_ptr
= opc_simpler
[op
];
4349 /* compute the live flags before the instruction */
4350 live_flags
&= ~write_flags
;
4351 live_flags
|= opc_read_flags
[op
];
4355 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
4356 basic block 'tb'. If search_pc is TRUE, also generate PC
4357 information for each intermediate instruction. */
4358 static inline int gen_intermediate_code_internal(CPUState
*env
,
4359 TranslationBlock
*tb
,
4362 DisasContext dc1
, *dc
= &dc1
;
4364 uint16_t *gen_opc_end
;
4369 /* generate intermediate code */
4370 pc_start
= (uint8_t *)tb
->pc
;
4371 cs_base
= (uint8_t *)tb
->cs_base
;
4374 dc
->pe
= env
->cr
[0] & CR0_PE_MASK
;
4375 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
4376 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
4377 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
4379 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
4380 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
4381 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
4382 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
4383 dc
->singlestep_enabled
= env
->singlestep_enabled
;
4384 dc
->cc_op
= CC_OP_DYNAMIC
;
4385 dc
->cs_base
= cs_base
;
4387 dc
->popl_esp_hack
= 0;
4388 /* select memory access functions */
4390 if (flags
& HF_SOFTMMU_MASK
) {
4396 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
4397 #ifndef CONFIG_SOFT_MMU
4398 || (flags
& HF_SOFTMMU_MASK
)
4401 gen_opc_ptr
= gen_opc_buf
;
4402 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4403 gen_opparam_ptr
= gen_opparam_buf
;
4405 dc
->is_jmp
= DISAS_NEXT
;
4409 /* if irq were inhibited for the next instruction, we can disable
4410 them here as it is simpler (otherwise jumps would have to
4411 handled as special case) */
4412 if (flags
& HF_INHIBIT_IRQ_MASK
) {
4413 gen_op_reset_inhibit_irq();
4416 if (env
->nb_breakpoints
> 0) {
4417 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
4418 if (env
->breakpoints
[j
] == (unsigned long)pc_ptr
) {
4419 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
4425 j
= gen_opc_ptr
- gen_opc_buf
;
4429 gen_opc_instr_start
[lj
++] = 0;
4431 gen_opc_pc
[lj
] = (uint32_t)pc_ptr
;
4432 gen_opc_cc_op
[lj
] = dc
->cc_op
;
4433 gen_opc_instr_start
[lj
] = 1;
4435 pc_ptr
= disas_insn(dc
, pc_ptr
);
4436 /* stop translation if indicated */
4439 /* if single step mode, we generate only one instruction and
4440 generate an exception */
4441 if (dc
->tf
|| dc
->singlestep_enabled
) {
4442 gen_op_jmp_im(pc_ptr
- dc
->cs_base
);
4446 /* if too long translation, stop generation too */
4447 if (gen_opc_ptr
>= gen_opc_end
||
4448 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
4449 gen_op_jmp_im(pc_ptr
- dc
->cs_base
);
4454 *gen_opc_ptr
= INDEX_op_end
;
4455 /* we don't forget to fill the last values */
4457 j
= gen_opc_ptr
- gen_opc_buf
;
4460 gen_opc_instr_start
[lj
++] = 0;
4465 fprintf(logfile
, "----------------\n");
4466 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
4467 disas(logfile
, pc_start
, pc_ptr
- pc_start
, 0, !dc
->code32
);
4468 fprintf(logfile
, "\n");
4470 fprintf(logfile
, "OP:\n");
4471 dump_ops(gen_opc_buf
, gen_opparam_buf
);
4472 fprintf(logfile
, "\n");
4476 /* optimize flag computations */
4477 optimize_flags(gen_opc_buf
, gen_opc_ptr
- gen_opc_buf
);
4481 fprintf(logfile
, "AFTER FLAGS OPT:\n");
4482 dump_ops(gen_opc_buf
, gen_opparam_buf
);
4483 fprintf(logfile
, "\n");
4487 tb
->size
= pc_ptr
- pc_start
;
4491 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
4493 return gen_intermediate_code_internal(env
, tb
, 0);
4496 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
4498 return gen_intermediate_code_internal(env
, tb
, 1);