4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env
, cpu_T
[2], cpu_A0
, cpu_cc_op
, cpu_cc_src
, cpu_cc_dst
;
63 /* local register indexes (only used inside old micro ops) */
64 static TCGv cpu_tmp0
, cpu_tmp1_i64
, cpu_tmp2_i32
, cpu_tmp3_i32
, cpu_tmp4
, cpu_ptr0
, cpu_ptr1
;
65 static TCGv cpu_tmp5
, cpu_tmp6
;
68 static int x86_64_hregs
;
71 typedef struct DisasContext
{
72 /* current insn context */
73 int override
; /* -1 if no override */
76 target_ulong pc
; /* pc = eip + cs_base */
77 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
78 static state change (stop translation) */
79 /* current block context */
80 target_ulong cs_base
; /* base of CS segment */
81 int pe
; /* protected mode */
82 int code32
; /* 32 bit code segment */
84 int lma
; /* long mode active */
85 int code64
; /* 64 bit code segment */
88 int ss32
; /* 32 bit stack segment */
89 int cc_op
; /* current CC operation */
90 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
91 int f_st
; /* currently unused */
92 int vm86
; /* vm86 mode */
95 int tf
; /* TF cpu flag */
96 int singlestep_enabled
; /* "hardware" single step enabled */
97 int jmp_opt
; /* use direct block chaining for direct jumps */
98 int mem_index
; /* select memory access functions */
99 uint64_t flags
; /* all execution flags */
100 struct TranslationBlock
*tb
;
101 int popl_esp_hack
; /* for correct popl with esp base handling */
102 int rip_offset
; /* only used in x86_64, but left for simplicity */
104 int cpuid_ext_features
;
105 int cpuid_ext2_features
;
108 static void gen_eob(DisasContext
*s
);
109 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
110 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
112 /* i386 arith/logic operations */
132 OP_SHL1
, /* undocumented */
145 /* I386 int registers */
146 OR_EAX
, /* MUST be even numbered */
155 OR_TMP0
= 16, /* temporary operand register */
157 OR_A0
, /* temporary register used when doing address evaluation */
160 static inline void gen_op_movl_T0_0(void)
162 tcg_gen_movi_tl(cpu_T
[0], 0);
165 static inline void gen_op_movl_T0_im(int32_t val
)
167 tcg_gen_movi_tl(cpu_T
[0], val
);
170 static inline void gen_op_movl_T0_imu(uint32_t val
)
172 tcg_gen_movi_tl(cpu_T
[0], val
);
175 static inline void gen_op_movl_T1_im(int32_t val
)
177 tcg_gen_movi_tl(cpu_T
[1], val
);
180 static inline void gen_op_movl_T1_imu(uint32_t val
)
182 tcg_gen_movi_tl(cpu_T
[1], val
);
185 static inline void gen_op_movl_A0_im(uint32_t val
)
187 tcg_gen_movi_tl(cpu_A0
, val
);
191 static inline void gen_op_movq_A0_im(int64_t val
)
193 tcg_gen_movi_tl(cpu_A0
, val
);
197 static inline void gen_movtl_T0_im(target_ulong val
)
199 tcg_gen_movi_tl(cpu_T
[0], val
);
202 static inline void gen_movtl_T1_im(target_ulong val
)
204 tcg_gen_movi_tl(cpu_T
[1], val
);
207 static inline void gen_op_andl_T0_ffff(void)
209 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
212 static inline void gen_op_andl_T0_im(uint32_t val
)
214 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], val
);
217 static inline void gen_op_movl_T0_T1(void)
219 tcg_gen_mov_tl(cpu_T
[0], cpu_T
[1]);
222 static inline void gen_op_andl_A0_ffff(void)
224 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffff);
229 #define NB_OP_SIZES 4
231 #define DEF_REGS(prefix, suffix) \
232 prefix ## EAX ## suffix,\
233 prefix ## ECX ## suffix,\
234 prefix ## EDX ## suffix,\
235 prefix ## EBX ## suffix,\
236 prefix ## ESP ## suffix,\
237 prefix ## EBP ## suffix,\
238 prefix ## ESI ## suffix,\
239 prefix ## EDI ## suffix,\
240 prefix ## R8 ## suffix,\
241 prefix ## R9 ## suffix,\
242 prefix ## R10 ## suffix,\
243 prefix ## R11 ## suffix,\
244 prefix ## R12 ## suffix,\
245 prefix ## R13 ## suffix,\
246 prefix ## R14 ## suffix,\
247 prefix ## R15 ## suffix,
249 #else /* !TARGET_X86_64 */
251 #define NB_OP_SIZES 3
253 #define DEF_REGS(prefix, suffix) \
254 prefix ## EAX ## suffix,\
255 prefix ## ECX ## suffix,\
256 prefix ## EDX ## suffix,\
257 prefix ## EBX ## suffix,\
258 prefix ## ESP ## suffix,\
259 prefix ## EBP ## suffix,\
260 prefix ## ESI ## suffix,\
261 prefix ## EDI ## suffix,
263 #endif /* !TARGET_X86_64 */
265 #if defined(WORDS_BIGENDIAN)
266 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
267 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
268 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
269 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
270 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
272 #define REG_B_OFFSET 0
273 #define REG_H_OFFSET 1
274 #define REG_W_OFFSET 0
275 #define REG_L_OFFSET 0
276 #define REG_LH_OFFSET 4
279 static inline void gen_op_mov_reg_TN(int ot
, int t_index
, int reg
)
283 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
284 tcg_gen_st8_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_B_OFFSET
);
286 tcg_gen_st8_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
290 tcg_gen_st16_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
294 tcg_gen_st32_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
295 /* high part of register set to zero */
296 tcg_gen_movi_tl(cpu_tmp0
, 0);
297 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
301 tcg_gen_st_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]));
306 tcg_gen_st32_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
312 static inline void gen_op_mov_reg_T0(int ot
, int reg
)
314 gen_op_mov_reg_TN(ot
, 0, reg
);
317 static inline void gen_op_mov_reg_T1(int ot
, int reg
)
319 gen_op_mov_reg_TN(ot
, 1, reg
);
322 static inline void gen_op_mov_reg_A0(int size
, int reg
)
326 tcg_gen_st16_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
330 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
331 /* high part of register set to zero */
332 tcg_gen_movi_tl(cpu_tmp0
, 0);
333 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
337 tcg_gen_st_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
342 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
348 static inline void gen_op_mov_TN_reg(int ot
, int t_index
, int reg
)
352 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
355 tcg_gen_ld8u_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
360 tcg_gen_ld_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]));
365 static inline void gen_op_movl_A0_reg(int reg
)
367 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
370 static inline void gen_op_addl_A0_im(int32_t val
)
372 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
374 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
379 static inline void gen_op_addq_A0_im(int64_t val
)
381 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
385 static void gen_add_A0_im(DisasContext
*s
, int val
)
389 gen_op_addq_A0_im(val
);
392 gen_op_addl_A0_im(val
);
395 static inline void gen_op_addl_T0_T1(void)
397 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
400 static inline void gen_op_jmp_T0(void)
402 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUState
, eip
));
405 static inline void gen_op_addw_ESP_im(int32_t val
)
407 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
408 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
409 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]) + REG_W_OFFSET
);
412 static inline void gen_op_addl_ESP_im(int32_t val
)
414 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
415 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
417 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffff);
419 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
423 static inline void gen_op_addq_ESP_im(int32_t val
)
425 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
426 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
427 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
431 static inline void gen_op_set_cc_op(int32_t val
)
433 tcg_gen_movi_i32(cpu_cc_op
, val
);
436 static inline void gen_op_addl_A0_reg_sN(int shift
, int reg
)
438 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
440 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
441 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
443 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
447 static inline void gen_op_movl_A0_seg(int reg
)
449 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
) + REG_L_OFFSET
);
452 static inline void gen_op_addl_A0_seg(int reg
)
454 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
455 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
457 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
462 static inline void gen_op_movq_A0_seg(int reg
)
464 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
467 static inline void gen_op_addq_A0_seg(int reg
)
469 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
470 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
473 static inline void gen_op_movq_A0_reg(int reg
)
475 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
478 static inline void gen_op_addq_A0_reg_sN(int shift
, int reg
)
480 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
482 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
483 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
487 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
489 DEF_REGS(gen_op_cmovw_
, _T1_T0
)
492 DEF_REGS(gen_op_cmovl_
, _T1_T0
)
496 DEF_REGS(gen_op_cmovq_
, _T1_T0
)
501 static GenOpFunc
*gen_op_btx_T0_T1_cc
[3][4] = {
504 gen_op_btsw_T0_T1_cc
,
505 gen_op_btrw_T0_T1_cc
,
506 gen_op_btcw_T0_T1_cc
,
510 gen_op_btsl_T0_T1_cc
,
511 gen_op_btrl_T0_T1_cc
,
512 gen_op_btcl_T0_T1_cc
,
517 gen_op_btsq_T0_T1_cc
,
518 gen_op_btrq_T0_T1_cc
,
519 gen_op_btcq_T0_T1_cc
,
524 static GenOpFunc
*gen_op_add_bit_A0_T1
[3] = {
525 gen_op_add_bitw_A0_T1
,
526 gen_op_add_bitl_A0_T1
,
527 X86_64_ONLY(gen_op_add_bitq_A0_T1
),
530 static GenOpFunc
*gen_op_bsx_T0_cc
[3][2] = {
547 static inline void gen_op_lds_T0_A0(int idx
)
549 int mem_index
= (idx
>> 2) - 1;
552 tcg_gen_qemu_ld8s(cpu_T
[0], cpu_A0
, mem_index
);
555 tcg_gen_qemu_ld16s(cpu_T
[0], cpu_A0
, mem_index
);
559 tcg_gen_qemu_ld32s(cpu_T
[0], cpu_A0
, mem_index
);
564 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
565 static inline void gen_op_ld_T0_A0(int idx
)
567 int mem_index
= (idx
>> 2) - 1;
570 tcg_gen_qemu_ld8u(cpu_T
[0], cpu_A0
, mem_index
);
573 tcg_gen_qemu_ld16u(cpu_T
[0], cpu_A0
, mem_index
);
576 tcg_gen_qemu_ld32u(cpu_T
[0], cpu_A0
, mem_index
);
580 tcg_gen_qemu_ld64(cpu_T
[0], cpu_A0
, mem_index
);
585 static inline void gen_op_ldu_T0_A0(int idx
)
587 gen_op_ld_T0_A0(idx
);
590 static inline void gen_op_ld_T1_A0(int idx
)
592 int mem_index
= (idx
>> 2) - 1;
595 tcg_gen_qemu_ld8u(cpu_T
[1], cpu_A0
, mem_index
);
598 tcg_gen_qemu_ld16u(cpu_T
[1], cpu_A0
, mem_index
);
601 tcg_gen_qemu_ld32u(cpu_T
[1], cpu_A0
, mem_index
);
605 tcg_gen_qemu_ld64(cpu_T
[1], cpu_A0
, mem_index
);
610 static inline void gen_op_st_T0_A0(int idx
)
612 int mem_index
= (idx
>> 2) - 1;
615 tcg_gen_qemu_st8(cpu_T
[0], cpu_A0
, mem_index
);
618 tcg_gen_qemu_st16(cpu_T
[0], cpu_A0
, mem_index
);
621 tcg_gen_qemu_st32(cpu_T
[0], cpu_A0
, mem_index
);
625 tcg_gen_qemu_st64(cpu_T
[0], cpu_A0
, mem_index
);
630 static inline void gen_op_st_T1_A0(int idx
)
632 int mem_index
= (idx
>> 2) - 1;
635 tcg_gen_qemu_st8(cpu_T
[1], cpu_A0
, mem_index
);
638 tcg_gen_qemu_st16(cpu_T
[1], cpu_A0
, mem_index
);
641 tcg_gen_qemu_st32(cpu_T
[1], cpu_A0
, mem_index
);
645 tcg_gen_qemu_st64(cpu_T
[1], cpu_A0
, mem_index
);
650 static inline void gen_jmp_im(target_ulong pc
)
652 tcg_gen_movi_tl(cpu_tmp0
, pc
);
653 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, eip
));
656 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
660 override
= s
->override
;
664 gen_op_movq_A0_seg(override
);
665 gen_op_addq_A0_reg_sN(0, R_ESI
);
667 gen_op_movq_A0_reg(R_ESI
);
673 if (s
->addseg
&& override
< 0)
676 gen_op_movl_A0_seg(override
);
677 gen_op_addl_A0_reg_sN(0, R_ESI
);
679 gen_op_movl_A0_reg(R_ESI
);
682 /* 16 address, always override */
685 gen_op_movl_A0_reg(R_ESI
);
686 gen_op_andl_A0_ffff();
687 gen_op_addl_A0_seg(override
);
691 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
695 gen_op_movq_A0_reg(R_EDI
);
700 gen_op_movl_A0_seg(R_ES
);
701 gen_op_addl_A0_reg_sN(0, R_EDI
);
703 gen_op_movl_A0_reg(R_EDI
);
706 gen_op_movl_A0_reg(R_EDI
);
707 gen_op_andl_A0_ffff();
708 gen_op_addl_A0_seg(R_ES
);
712 static GenOpFunc
*gen_op_movl_T0_Dshift
[4] = {
713 gen_op_movl_T0_Dshiftb
,
714 gen_op_movl_T0_Dshiftw
,
715 gen_op_movl_T0_Dshiftl
,
716 X86_64_ONLY(gen_op_movl_T0_Dshiftq
),
719 static GenOpFunc1
*gen_op_jnz_ecx
[3] = {
722 X86_64_ONLY(gen_op_jnz_ecxq
),
725 static GenOpFunc1
*gen_op_jz_ecx
[3] = {
728 X86_64_ONLY(gen_op_jz_ecxq
),
731 static GenOpFunc
*gen_op_dec_ECX
[3] = {
734 X86_64_ONLY(gen_op_decq_ECX
),
737 static GenOpFunc1
*gen_op_string_jnz_sub
[2][4] = {
742 X86_64_ONLY(gen_op_jnz_subq
),
748 X86_64_ONLY(gen_op_jz_subq
),
752 static void *helper_in_func
[3] = {
758 static void *helper_out_func
[3] = {
764 static void *gen_check_io_func
[3] = {
770 static void gen_check_io(DisasContext
*s
, int ot
, target_ulong cur_eip
,
774 target_ulong next_eip
;
777 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
778 if (s
->cc_op
!= CC_OP_DYNAMIC
)
779 gen_op_set_cc_op(s
->cc_op
);
782 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
783 tcg_gen_helper_0_1(gen_check_io_func
[ot
],
786 if(s
->flags
& (1ULL << INTERCEPT_IOIO_PROT
)) {
788 if (s
->cc_op
!= CC_OP_DYNAMIC
)
789 gen_op_set_cc_op(s
->cc_op
);
793 svm_flags
|= (1 << (4 + ot
));
794 next_eip
= s
->pc
- s
->cs_base
;
795 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
796 tcg_gen_helper_0_3(helper_svm_check_io
,
798 tcg_const_i32(svm_flags
),
799 tcg_const_i32(next_eip
- cur_eip
));
803 static inline void gen_movs(DisasContext
*s
, int ot
)
805 gen_string_movl_A0_ESI(s
);
806 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
807 gen_string_movl_A0_EDI(s
);
808 gen_op_st_T0_A0(ot
+ s
->mem_index
);
809 gen_op_movl_T0_Dshift
[ot
]();
812 gen_op_addq_ESI_T0();
813 gen_op_addq_EDI_T0();
817 gen_op_addl_ESI_T0();
818 gen_op_addl_EDI_T0();
820 gen_op_addw_ESI_T0();
821 gen_op_addw_EDI_T0();
825 static inline void gen_update_cc_op(DisasContext
*s
)
827 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
828 gen_op_set_cc_op(s
->cc_op
);
829 s
->cc_op
= CC_OP_DYNAMIC
;
833 static void gen_op_update1_cc(void)
835 tcg_gen_discard_tl(cpu_cc_src
);
836 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
839 static void gen_op_update2_cc(void)
841 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
842 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
845 static inline void gen_op_cmpl_T0_T1_cc(void)
847 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
848 tcg_gen_sub_tl(cpu_cc_dst
, cpu_T
[0], cpu_T
[1]);
851 static inline void gen_op_testl_T0_T1_cc(void)
853 tcg_gen_discard_tl(cpu_cc_src
);
854 tcg_gen_and_tl(cpu_cc_dst
, cpu_T
[0], cpu_T
[1]);
857 static void gen_op_update_neg_cc(void)
859 tcg_gen_neg_tl(cpu_cc_src
, cpu_T
[0]);
860 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
863 /* XXX: does not work with gdbstub "ice" single step - not a
865 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
869 l1
= gen_new_label();
870 l2
= gen_new_label();
871 gen_op_jnz_ecx
[s
->aflag
](l1
);
873 gen_jmp_tb(s
, next_eip
, 1);
878 static inline void gen_stos(DisasContext
*s
, int ot
)
880 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
881 gen_string_movl_A0_EDI(s
);
882 gen_op_st_T0_A0(ot
+ s
->mem_index
);
883 gen_op_movl_T0_Dshift
[ot
]();
886 gen_op_addq_EDI_T0();
890 gen_op_addl_EDI_T0();
892 gen_op_addw_EDI_T0();
896 static inline void gen_lods(DisasContext
*s
, int ot
)
898 gen_string_movl_A0_ESI(s
);
899 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
900 gen_op_mov_reg_T0(ot
, R_EAX
);
901 gen_op_movl_T0_Dshift
[ot
]();
904 gen_op_addq_ESI_T0();
908 gen_op_addl_ESI_T0();
910 gen_op_addw_ESI_T0();
914 static inline void gen_scas(DisasContext
*s
, int ot
)
916 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
917 gen_string_movl_A0_EDI(s
);
918 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
919 gen_op_cmpl_T0_T1_cc();
920 gen_op_movl_T0_Dshift
[ot
]();
923 gen_op_addq_EDI_T0();
927 gen_op_addl_EDI_T0();
929 gen_op_addw_EDI_T0();
933 static inline void gen_cmps(DisasContext
*s
, int ot
)
935 gen_string_movl_A0_ESI(s
);
936 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
937 gen_string_movl_A0_EDI(s
);
938 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
939 gen_op_cmpl_T0_T1_cc();
940 gen_op_movl_T0_Dshift
[ot
]();
943 gen_op_addq_ESI_T0();
944 gen_op_addq_EDI_T0();
948 gen_op_addl_ESI_T0();
949 gen_op_addl_EDI_T0();
951 gen_op_addw_ESI_T0();
952 gen_op_addw_EDI_T0();
956 static inline void gen_ins(DisasContext
*s
, int ot
)
958 gen_string_movl_A0_EDI(s
);
960 gen_op_st_T0_A0(ot
+ s
->mem_index
);
961 gen_op_mov_TN_reg(OT_WORD
, 1, R_EDX
);
962 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[1]);
963 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
964 tcg_gen_helper_1_1(helper_in_func
[ot
], cpu_T
[0], cpu_tmp2_i32
);
965 gen_op_st_T0_A0(ot
+ s
->mem_index
);
966 gen_op_movl_T0_Dshift
[ot
]();
969 gen_op_addq_EDI_T0();
973 gen_op_addl_EDI_T0();
975 gen_op_addw_EDI_T0();
979 static inline void gen_outs(DisasContext
*s
, int ot
)
981 gen_string_movl_A0_ESI(s
);
982 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
984 gen_op_mov_TN_reg(OT_WORD
, 1, R_EDX
);
985 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[1]);
986 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
987 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[0]);
988 tcg_gen_helper_0_2(helper_out_func
[ot
], cpu_tmp2_i32
, cpu_tmp3_i32
);
990 gen_op_movl_T0_Dshift
[ot
]();
993 gen_op_addq_ESI_T0();
997 gen_op_addl_ESI_T0();
999 gen_op_addw_ESI_T0();
1003 /* same method as Valgrind : we generate jumps to current or next
1005 #define GEN_REPZ(op) \
1006 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1007 target_ulong cur_eip, target_ulong next_eip) \
1010 gen_update_cc_op(s); \
1011 l2 = gen_jz_ecx_string(s, next_eip); \
1012 gen_ ## op(s, ot); \
1013 gen_op_dec_ECX[s->aflag](); \
1014 /* a loop would cause two single step exceptions if ECX = 1 \
1015 before rep string_insn */ \
1017 gen_op_jz_ecx[s->aflag](l2); \
1018 gen_jmp(s, cur_eip); \
1021 #define GEN_REPZ2(op) \
1022 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1023 target_ulong cur_eip, \
1024 target_ulong next_eip, \
1028 gen_update_cc_op(s); \
1029 l2 = gen_jz_ecx_string(s, next_eip); \
1030 gen_ ## op(s, ot); \
1031 gen_op_dec_ECX[s->aflag](); \
1032 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1033 gen_op_string_jnz_sub[nz][ot](l2);\
1035 gen_op_jz_ecx[s->aflag](l2); \
1036 gen_jmp(s, cur_eip); \
1058 static GenOpFunc1
*gen_jcc_sub
[4][8] = {
1089 #ifdef TARGET_X86_64
1092 BUGGY_64(gen_op_jb_subq
),
1094 BUGGY_64(gen_op_jbe_subq
),
1097 BUGGY_64(gen_op_jl_subq
),
1098 BUGGY_64(gen_op_jle_subq
),
1102 static GenOpFunc1
*gen_op_loop
[3][4] = {
1113 #ifdef TARGET_X86_64
1122 static GenOpFunc
*gen_setcc_slow
[8] = {
1133 static GenOpFunc
*gen_setcc_sub
[4][8] = {
1136 gen_op_setb_T0_subb
,
1137 gen_op_setz_T0_subb
,
1138 gen_op_setbe_T0_subb
,
1139 gen_op_sets_T0_subb
,
1141 gen_op_setl_T0_subb
,
1142 gen_op_setle_T0_subb
,
1146 gen_op_setb_T0_subw
,
1147 gen_op_setz_T0_subw
,
1148 gen_op_setbe_T0_subw
,
1149 gen_op_sets_T0_subw
,
1151 gen_op_setl_T0_subw
,
1152 gen_op_setle_T0_subw
,
1156 gen_op_setb_T0_subl
,
1157 gen_op_setz_T0_subl
,
1158 gen_op_setbe_T0_subl
,
1159 gen_op_sets_T0_subl
,
1161 gen_op_setl_T0_subl
,
1162 gen_op_setle_T0_subl
,
1164 #ifdef TARGET_X86_64
1167 gen_op_setb_T0_subq
,
1168 gen_op_setz_T0_subq
,
1169 gen_op_setbe_T0_subq
,
1170 gen_op_sets_T0_subq
,
1172 gen_op_setl_T0_subq
,
1173 gen_op_setle_T0_subq
,
1178 static void *helper_fp_arith_ST0_FT0
[8] = {
1179 helper_fadd_ST0_FT0
,
1180 helper_fmul_ST0_FT0
,
1181 helper_fcom_ST0_FT0
,
1182 helper_fcom_ST0_FT0
,
1183 helper_fsub_ST0_FT0
,
1184 helper_fsubr_ST0_FT0
,
1185 helper_fdiv_ST0_FT0
,
1186 helper_fdivr_ST0_FT0
,
1189 /* NOTE the exception in "r" op ordering */
1190 static void *helper_fp_arith_STN_ST0
[8] = {
1191 helper_fadd_STN_ST0
,
1192 helper_fmul_STN_ST0
,
1195 helper_fsubr_STN_ST0
,
1196 helper_fsub_STN_ST0
,
1197 helper_fdivr_STN_ST0
,
1198 helper_fdiv_STN_ST0
,
1201 /* compute eflags.C to reg */
1202 static void gen_compute_eflags_c(TCGv reg
)
1204 #if TCG_TARGET_REG_BITS == 32
1205 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_cc_op
, 3);
1206 tcg_gen_addi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
,
1207 (long)cc_table
+ offsetof(CCTable
, compute_c
));
1208 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0);
1209 tcg_gen_call(&tcg_ctx
, cpu_tmp2_i32
, TCG_CALL_PURE
,
1210 1, &cpu_tmp2_i32
, 0, NULL
);
1212 tcg_gen_extu_i32_tl(cpu_tmp1_i64
, cpu_cc_op
);
1213 tcg_gen_shli_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 4);
1214 tcg_gen_addi_i64(cpu_tmp1_i64
, cpu_tmp1_i64
,
1215 (long)cc_table
+ offsetof(CCTable
, compute_c
));
1216 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 0);
1217 tcg_gen_call(&tcg_ctx
, cpu_tmp1_i64
, TCG_CALL_PURE
,
1218 1, &cpu_tmp2_i32
, 0, NULL
);
1220 tcg_gen_extu_i32_tl(reg
, cpu_tmp2_i32
);
1223 /* compute all eflags to cc_src */
1224 static void gen_compute_eflags(TCGv reg
)
1226 #if TCG_TARGET_REG_BITS == 32
1227 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_cc_op
, 3);
1228 tcg_gen_addi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
,
1229 (long)cc_table
+ offsetof(CCTable
, compute_all
));
1230 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0);
1231 tcg_gen_call(&tcg_ctx
, cpu_tmp2_i32
, TCG_CALL_PURE
,
1232 1, &cpu_tmp2_i32
, 0, NULL
);
1234 tcg_gen_extu_i32_tl(cpu_tmp1_i64
, cpu_cc_op
);
1235 tcg_gen_shli_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 4);
1236 tcg_gen_addi_i64(cpu_tmp1_i64
, cpu_tmp1_i64
,
1237 (long)cc_table
+ offsetof(CCTable
, compute_all
));
1238 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 0);
1239 tcg_gen_call(&tcg_ctx
, cpu_tmp1_i64
, TCG_CALL_PURE
,
1240 1, &cpu_tmp2_i32
, 0, NULL
);
1242 tcg_gen_extu_i32_tl(reg
, cpu_tmp2_i32
);
1245 /* if d == OR_TMP0, it means memory operand (address in A0) */
1246 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1249 gen_op_mov_TN_reg(ot
, 0, d
);
1251 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1255 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1256 gen_op_set_cc_op(s1
->cc_op
);
1257 gen_compute_eflags_c(cpu_tmp4
);
1258 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1259 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
1261 gen_op_mov_reg_T0(ot
, d
);
1263 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1264 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
1265 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1266 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp4
);
1267 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 2);
1268 tcg_gen_addi_i32(cpu_cc_op
, cpu_tmp2_i32
, CC_OP_ADDB
+ ot
);
1269 s1
->cc_op
= CC_OP_DYNAMIC
;
1272 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1273 gen_op_set_cc_op(s1
->cc_op
);
1274 gen_compute_eflags_c(cpu_tmp4
);
1275 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1276 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
1278 gen_op_mov_reg_T0(ot
, d
);
1280 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1281 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
1282 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1283 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp4
);
1284 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 2);
1285 tcg_gen_addi_i32(cpu_cc_op
, cpu_tmp2_i32
, CC_OP_SUBB
+ ot
);
1286 s1
->cc_op
= CC_OP_DYNAMIC
;
1289 gen_op_addl_T0_T1();
1291 gen_op_mov_reg_T0(ot
, d
);
1293 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1294 gen_op_update2_cc();
1295 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1298 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1300 gen_op_mov_reg_T0(ot
, d
);
1302 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1303 gen_op_update2_cc();
1304 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1308 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1310 gen_op_mov_reg_T0(ot
, d
);
1312 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1313 gen_op_update1_cc();
1314 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1317 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1319 gen_op_mov_reg_T0(ot
, d
);
1321 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1322 gen_op_update1_cc();
1323 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1326 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1328 gen_op_mov_reg_T0(ot
, d
);
1330 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1331 gen_op_update1_cc();
1332 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1335 gen_op_cmpl_T0_T1_cc();
1336 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1341 /* if d == OR_TMP0, it means memory operand (address in A0) */
1342 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1345 gen_op_mov_TN_reg(ot
, 0, d
);
1347 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1348 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1349 gen_op_set_cc_op(s1
->cc_op
);
1351 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], 1);
1352 s1
->cc_op
= CC_OP_INCB
+ ot
;
1354 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], -1);
1355 s1
->cc_op
= CC_OP_DECB
+ ot
;
1358 gen_op_mov_reg_T0(ot
, d
);
1360 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1361 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1362 gen_compute_eflags_c(cpu_cc_src
);
1365 static void gen_extu(int ot
, TCGv reg
)
1369 tcg_gen_ext8u_tl(reg
, reg
);
1372 tcg_gen_ext16u_tl(reg
, reg
);
1375 tcg_gen_ext32u_tl(reg
, reg
);
1382 /* XXX: add faster immediate case */
1383 static void gen_shift_rm_T1(DisasContext
*s
, int ot
, int op1
,
1384 int is_right
, int is_arith
)
1396 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1398 gen_op_mov_TN_reg(ot
, 0, op1
);
1400 tcg_gen_andi_tl(cpu_T
[1], cpu_T
[1], mask
);
1402 tcg_gen_addi_tl(cpu_tmp5
, cpu_T
[1], -1);
1408 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
1411 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
1414 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
1419 tcg_gen_sar_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1420 tcg_gen_sar_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1422 gen_extu(ot
, cpu_T
[0]);
1423 tcg_gen_shr_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1424 tcg_gen_shr_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1427 tcg_gen_shl_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1428 tcg_gen_shl_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1433 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1435 gen_op_mov_reg_T0(ot
, op1
);
1437 /* update eflags if non zero shift */
1438 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1439 gen_op_set_cc_op(s
->cc_op
);
1441 shift_label
= gen_new_label();
1442 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T
[1], tcg_const_tl(0), shift_label
);
1444 tcg_gen_mov_tl(cpu_cc_src
, cpu_T3
);
1445 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1447 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SARB
+ ot
);
1449 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SHLB
+ ot
);
1451 gen_set_label(shift_label
);
1452 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1455 static inline void tcg_gen_lshift(TCGv ret
, TCGv arg1
, target_long arg2
)
1458 tcg_gen_shli_tl(ret
, arg1
, arg2
);
1460 tcg_gen_shri_tl(ret
, arg1
, -arg2
);
1463 /* XXX: add faster immediate case */
1464 static void gen_rot_rm_T1(DisasContext
*s
, int ot
, int op1
,
1468 int label1
, label2
, data_bits
;
1477 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1479 gen_op_mov_TN_reg(ot
, 0, op1
);
1481 tcg_gen_andi_tl(cpu_T
[1], cpu_T
[1], mask
);
1483 /* Must test zero case to avoid using undefined behaviour in TCG
1485 label1
= gen_new_label();
1486 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T
[1], tcg_const_tl(0), label1
);
1489 tcg_gen_andi_tl(cpu_tmp0
, cpu_T
[1], (1 << (3 + ot
)) - 1);
1491 tcg_gen_mov_tl(cpu_tmp0
, cpu_T
[1]);
1493 gen_extu(ot
, cpu_T
[0]);
1494 tcg_gen_mov_tl(cpu_T3
, cpu_T
[0]);
1496 data_bits
= 8 << ot
;
1497 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1498 fix TCG definition) */
1500 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_tmp0
);
1501 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(data_bits
), cpu_tmp0
);
1502 tcg_gen_shl_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
1504 tcg_gen_shl_tl(cpu_tmp4
, cpu_T
[0], cpu_tmp0
);
1505 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(data_bits
), cpu_tmp0
);
1506 tcg_gen_shr_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
1508 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
1510 gen_set_label(label1
);
1513 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1515 gen_op_mov_reg_T0(ot
, op1
);
1518 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1519 gen_op_set_cc_op(s
->cc_op
);
1521 label2
= gen_new_label();
1522 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T
[1], tcg_const_tl(0), label2
);
1524 gen_compute_eflags(cpu_cc_src
);
1525 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~(CC_O
| CC_C
));
1526 tcg_gen_xor_tl(cpu_tmp0
, cpu_T3
, cpu_T
[0]);
1527 tcg_gen_lshift(cpu_tmp0
, cpu_tmp0
, 11 - (data_bits
- 1));
1528 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, CC_O
);
1529 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
1531 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], data_bits
- 1);
1533 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], CC_C
);
1534 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_T
[0]);
1536 tcg_gen_discard_tl(cpu_cc_dst
);
1537 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_EFLAGS
);
1539 gen_set_label(label2
);
1540 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1543 static void *helper_rotc
[8] = {
1547 X86_64_ONLY(helper_rclq
),
1551 X86_64_ONLY(helper_rcrq
),
1554 /* XXX: add faster immediate = 1 case */
1555 static void gen_rotc_rm_T1(DisasContext
*s
, int ot
, int op1
,
1560 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1561 gen_op_set_cc_op(s
->cc_op
);
1565 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1567 gen_op_mov_TN_reg(ot
, 0, op1
);
1569 tcg_gen_helper_1_2(helper_rotc
[ot
+ (is_right
* 4)],
1570 cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1573 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1575 gen_op_mov_reg_T0(ot
, op1
);
1578 label1
= gen_new_label();
1579 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T3
, tcg_const_tl(-1), label1
);
1581 tcg_gen_mov_tl(cpu_cc_src
, cpu_T3
);
1582 tcg_gen_discard_tl(cpu_cc_dst
);
1583 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_EFLAGS
);
1585 gen_set_label(label1
);
1586 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1589 /* XXX: add faster immediate case */
1590 static void gen_shiftd_rm_T1_T3(DisasContext
*s
, int ot
, int op1
,
1593 int label1
, label2
, data_bits
;
1603 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1605 gen_op_mov_TN_reg(ot
, 0, op1
);
1607 tcg_gen_andi_tl(cpu_T3
, cpu_T3
, mask
);
1608 /* Must test zero case to avoid using undefined behaviour in TCG
1610 label1
= gen_new_label();
1611 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T3
, tcg_const_tl(0), label1
);
1613 tcg_gen_addi_tl(cpu_tmp5
, cpu_T3
, -1);
1614 if (ot
== OT_WORD
) {
1615 /* Note: we implement the Intel behaviour for shift count > 16 */
1617 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
1618 tcg_gen_shli_tl(cpu_tmp0
, cpu_T
[1], 16);
1619 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
1620 tcg_gen_ext32u_tl(cpu_T
[0], cpu_T
[0]);
1622 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_tmp5
);
1624 /* only needed if count > 16, but a test would complicate */
1625 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(32), cpu_T3
);
1626 tcg_gen_shl_tl(cpu_tmp0
, cpu_T
[0], cpu_tmp5
);
1628 tcg_gen_shr_tl(cpu_T
[0], cpu_T
[0], cpu_T3
);
1630 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
1632 /* XXX: not optimal */
1633 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
1634 tcg_gen_shli_tl(cpu_T
[1], cpu_T
[1], 16);
1635 tcg_gen_or_tl(cpu_T
[1], cpu_T
[1], cpu_T
[0]);
1636 tcg_gen_ext32u_tl(cpu_T
[1], cpu_T
[1]);
1638 tcg_gen_shl_tl(cpu_tmp4
, cpu_T
[0], cpu_tmp5
);
1639 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(32), cpu_tmp5
);
1640 tcg_gen_shr_tl(cpu_tmp6
, cpu_T
[1], cpu_tmp0
);
1641 tcg_gen_or_tl(cpu_tmp4
, cpu_tmp4
, cpu_tmp6
);
1643 tcg_gen_shl_tl(cpu_T
[0], cpu_T
[0], cpu_T3
);
1644 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(32), cpu_T3
);
1645 tcg_gen_shr_tl(cpu_T
[1], cpu_T
[1], cpu_tmp5
);
1646 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1649 data_bits
= 8 << ot
;
1652 tcg_gen_ext32u_tl(cpu_T
[0], cpu_T
[0]);
1654 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_tmp5
);
1656 tcg_gen_shr_tl(cpu_T
[0], cpu_T
[0], cpu_T3
);
1657 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(data_bits
), cpu_T3
);
1658 tcg_gen_shl_tl(cpu_T
[1], cpu_T
[1], cpu_tmp5
);
1659 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1663 tcg_gen_ext32u_tl(cpu_T
[1], cpu_T
[1]);
1665 tcg_gen_shl_tl(cpu_tmp4
, cpu_T
[0], cpu_tmp5
);
1667 tcg_gen_shl_tl(cpu_T
[0], cpu_T
[0], cpu_T3
);
1668 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(data_bits
), cpu_T3
);
1669 tcg_gen_shr_tl(cpu_T
[1], cpu_T
[1], cpu_tmp5
);
1670 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1673 tcg_gen_mov_tl(cpu_T
[1], cpu_tmp4
);
1675 gen_set_label(label1
);
1678 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1680 gen_op_mov_reg_T0(ot
, op1
);
1683 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1684 gen_op_set_cc_op(s
->cc_op
);
1686 label2
= gen_new_label();
1687 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T3
, tcg_const_tl(0), label2
);
1689 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
1690 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1692 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SARB
+ ot
);
1694 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SHLB
+ ot
);
1696 gen_set_label(label2
);
1697 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1700 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1703 gen_op_mov_TN_reg(ot
, 1, s
);
1706 gen_rot_rm_T1(s1
, ot
, d
, 0);
1709 gen_rot_rm_T1(s1
, ot
, d
, 1);
1713 gen_shift_rm_T1(s1
, ot
, d
, 0, 0);
1716 gen_shift_rm_T1(s1
, ot
, d
, 1, 0);
1719 gen_shift_rm_T1(s1
, ot
, d
, 1, 1);
1722 gen_rotc_rm_T1(s1
, ot
, d
, 0);
1725 gen_rotc_rm_T1(s1
, ot
, d
, 1);
1730 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1732 /* currently not optimized */
1733 gen_op_movl_T1_im(c
);
1734 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1737 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1745 int mod
, rm
, code
, override
, must_add_seg
;
1747 override
= s
->override
;
1748 must_add_seg
= s
->addseg
;
1751 mod
= (modrm
>> 6) & 3;
1763 code
= ldub_code(s
->pc
++);
1764 scale
= (code
>> 6) & 3;
1765 index
= ((code
>> 3) & 7) | REX_X(s
);
1772 if ((base
& 7) == 5) {
1774 disp
= (int32_t)ldl_code(s
->pc
);
1776 if (CODE64(s
) && !havesib
) {
1777 disp
+= s
->pc
+ s
->rip_offset
;
1784 disp
= (int8_t)ldub_code(s
->pc
++);
1788 disp
= ldl_code(s
->pc
);
1794 /* for correct popl handling with esp */
1795 if (base
== 4 && s
->popl_esp_hack
)
1796 disp
+= s
->popl_esp_hack
;
1797 #ifdef TARGET_X86_64
1798 if (s
->aflag
== 2) {
1799 gen_op_movq_A0_reg(base
);
1801 gen_op_addq_A0_im(disp
);
1806 gen_op_movl_A0_reg(base
);
1808 gen_op_addl_A0_im(disp
);
1811 #ifdef TARGET_X86_64
1812 if (s
->aflag
== 2) {
1813 gen_op_movq_A0_im(disp
);
1817 gen_op_movl_A0_im(disp
);
1820 /* XXX: index == 4 is always invalid */
1821 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1822 #ifdef TARGET_X86_64
1823 if (s
->aflag
== 2) {
1824 gen_op_addq_A0_reg_sN(scale
, index
);
1828 gen_op_addl_A0_reg_sN(scale
, index
);
1833 if (base
== R_EBP
|| base
== R_ESP
)
1838 #ifdef TARGET_X86_64
1839 if (s
->aflag
== 2) {
1840 gen_op_addq_A0_seg(override
);
1844 gen_op_addl_A0_seg(override
);
1851 disp
= lduw_code(s
->pc
);
1853 gen_op_movl_A0_im(disp
);
1854 rm
= 0; /* avoid SS override */
1861 disp
= (int8_t)ldub_code(s
->pc
++);
1865 disp
= lduw_code(s
->pc
);
1871 gen_op_movl_A0_reg(R_EBX
);
1872 gen_op_addl_A0_reg_sN(0, R_ESI
);
1875 gen_op_movl_A0_reg(R_EBX
);
1876 gen_op_addl_A0_reg_sN(0, R_EDI
);
1879 gen_op_movl_A0_reg(R_EBP
);
1880 gen_op_addl_A0_reg_sN(0, R_ESI
);
1883 gen_op_movl_A0_reg(R_EBP
);
1884 gen_op_addl_A0_reg_sN(0, R_EDI
);
1887 gen_op_movl_A0_reg(R_ESI
);
1890 gen_op_movl_A0_reg(R_EDI
);
1893 gen_op_movl_A0_reg(R_EBP
);
1897 gen_op_movl_A0_reg(R_EBX
);
1901 gen_op_addl_A0_im(disp
);
1902 gen_op_andl_A0_ffff();
1906 if (rm
== 2 || rm
== 3 || rm
== 6)
1911 gen_op_addl_A0_seg(override
);
1921 static void gen_nop_modrm(DisasContext
*s
, int modrm
)
1923 int mod
, rm
, base
, code
;
1925 mod
= (modrm
>> 6) & 3;
1935 code
= ldub_code(s
->pc
++);
1971 /* used for LEA and MOV AX, mem */
1972 static void gen_add_A0_ds_seg(DisasContext
*s
)
1974 int override
, must_add_seg
;
1975 must_add_seg
= s
->addseg
;
1977 if (s
->override
>= 0) {
1978 override
= s
->override
;
1984 #ifdef TARGET_X86_64
1986 gen_op_addq_A0_seg(override
);
1990 gen_op_addl_A0_seg(override
);
1995 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1997 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1999 int mod
, rm
, opreg
, disp
;
2001 mod
= (modrm
>> 6) & 3;
2002 rm
= (modrm
& 7) | REX_B(s
);
2006 gen_op_mov_TN_reg(ot
, 0, reg
);
2007 gen_op_mov_reg_T0(ot
, rm
);
2009 gen_op_mov_TN_reg(ot
, 0, rm
);
2011 gen_op_mov_reg_T0(ot
, reg
);
2014 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
2017 gen_op_mov_TN_reg(ot
, 0, reg
);
2018 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2020 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
2022 gen_op_mov_reg_T0(ot
, reg
);
2027 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
2033 ret
= ldub_code(s
->pc
);
2037 ret
= lduw_code(s
->pc
);
2042 ret
= ldl_code(s
->pc
);
2049 static inline int insn_const_size(unsigned int ot
)
2057 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong eip
)
2059 TranslationBlock
*tb
;
2062 pc
= s
->cs_base
+ eip
;
2064 /* NOTE: we handle the case where the TB spans two pages here */
2065 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
2066 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
2067 /* jump to same page: we can use a direct jump */
2068 tcg_gen_goto_tb(tb_num
);
2070 tcg_gen_exit_tb((long)tb
+ tb_num
);
2072 /* jump to another page: currently not optimized */
2078 static inline void gen_jcc(DisasContext
*s
, int b
,
2079 target_ulong val
, target_ulong next_eip
)
2081 TranslationBlock
*tb
;
2088 jcc_op
= (b
>> 1) & 7;
2092 /* we optimize the cmp/jcc case */
2097 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
2100 /* some jumps are easy to compute */
2142 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
2145 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
2157 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2158 gen_op_set_cc_op(s
->cc_op
);
2159 s
->cc_op
= CC_OP_DYNAMIC
;
2163 gen_setcc_slow
[jcc_op
]();
2164 func
= gen_op_jnz_T0_label
;
2174 l1
= gen_new_label();
2177 gen_goto_tb(s
, 0, next_eip
);
2180 gen_goto_tb(s
, 1, val
);
2185 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2186 gen_op_set_cc_op(s
->cc_op
);
2187 s
->cc_op
= CC_OP_DYNAMIC
;
2189 gen_setcc_slow
[jcc_op
]();
2195 l1
= gen_new_label();
2196 l2
= gen_new_label();
2197 gen_op_jnz_T0_label(l1
);
2198 gen_jmp_im(next_eip
);
2199 gen_op_jmp_label(l2
);
2207 static void gen_setcc(DisasContext
*s
, int b
)
2213 jcc_op
= (b
>> 1) & 7;
2215 /* we optimize the cmp/jcc case */
2220 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
2225 /* some jumps are easy to compute */
2252 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
2255 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
2263 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2264 gen_op_set_cc_op(s
->cc_op
);
2265 func
= gen_setcc_slow
[jcc_op
];
2274 /* move T0 to seg_reg and compute if the CPU state may change. Never
2275 call this function with seg_reg == R_CS */
2276 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
2278 if (s
->pe
&& !s
->vm86
) {
2279 /* XXX: optimize by finding processor state dynamically */
2280 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2281 gen_op_set_cc_op(s
->cc_op
);
2282 gen_jmp_im(cur_eip
);
2283 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
2284 tcg_gen_helper_0_2(helper_load_seg
, tcg_const_i32(seg_reg
), cpu_tmp2_i32
);
2285 /* abort translation because the addseg value may change or
2286 because ss32 may change. For R_SS, translation must always
2287 stop as a special handling must be done to disable hardware
2288 interrupts for the next instruction */
2289 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
2292 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
2293 if (seg_reg
== R_SS
)
2298 static inline int svm_is_rep(int prefixes
)
2300 return ((prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) ? 8 : 0);
2304 gen_svm_check_intercept_param(DisasContext
*s
, target_ulong pc_start
,
2305 uint32_t type
, uint64_t param
)
2307 if(!(s
->flags
& (INTERCEPT_SVM_MASK
)))
2308 /* no SVM activated */
2311 /* CRx and DRx reads/writes */
2312 case SVM_EXIT_READ_CR0
... SVM_EXIT_EXCP_BASE
- 1:
2313 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2314 gen_op_set_cc_op(s
->cc_op
);
2316 gen_jmp_im(pc_start
- s
->cs_base
);
2317 tcg_gen_helper_0_2(helper_svm_check_intercept_param
,
2318 tcg_const_i32(type
), tcg_const_i64(param
));
2319 /* this is a special case as we do not know if the interception occurs
2320 so we assume there was none */
2323 if(s
->flags
& (1ULL << INTERCEPT_MSR_PROT
)) {
2324 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2325 gen_op_set_cc_op(s
->cc_op
);
2327 gen_jmp_im(pc_start
- s
->cs_base
);
2328 tcg_gen_helper_0_2(helper_svm_check_intercept_param
,
2329 tcg_const_i32(type
), tcg_const_i64(param
));
2330 /* this is a special case as we do not know if the interception occurs
2331 so we assume there was none */
2336 if(s
->flags
& (1ULL << ((type
- SVM_EXIT_INTR
) + INTERCEPT_INTR
))) {
2337 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2338 gen_op_set_cc_op(s
->cc_op
);
2340 gen_jmp_im(pc_start
- s
->cs_base
);
2341 tcg_gen_helper_0_2(helper_vmexit
,
2342 tcg_const_i32(type
), tcg_const_i64(param
));
2343 /* we can optimize this one so TBs don't get longer
2344 than up to vmexit */
2353 gen_svm_check_intercept(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
2355 return gen_svm_check_intercept_param(s
, pc_start
, type
, 0);
2358 static inline void gen_stack_update(DisasContext
*s
, int addend
)
2360 #ifdef TARGET_X86_64
2362 gen_op_addq_ESP_im(addend
);
2366 gen_op_addl_ESP_im(addend
);
2368 gen_op_addw_ESP_im(addend
);
2372 /* generate a push. It depends on ss32, addseg and dflag */
2373 static void gen_push_T0(DisasContext
*s
)
2375 #ifdef TARGET_X86_64
2377 gen_op_movq_A0_reg(R_ESP
);
2379 gen_op_addq_A0_im(-8);
2380 gen_op_st_T0_A0(OT_QUAD
+ s
->mem_index
);
2382 gen_op_addq_A0_im(-2);
2383 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2385 gen_op_mov_reg_A0(2, R_ESP
);
2389 gen_op_movl_A0_reg(R_ESP
);
2391 gen_op_addl_A0_im(-2);
2393 gen_op_addl_A0_im(-4);
2396 gen_op_movl_T1_A0();
2397 gen_op_addl_A0_seg(R_SS
);
2400 gen_op_andl_A0_ffff();
2401 gen_op_movl_T1_A0();
2402 gen_op_addl_A0_seg(R_SS
);
2404 gen_op_st_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2405 if (s
->ss32
&& !s
->addseg
)
2406 gen_op_mov_reg_A0(1, R_ESP
);
2408 gen_op_mov_reg_T1(s
->ss32
+ 1, R_ESP
);
2412 /* generate a push. It depends on ss32, addseg and dflag */
2413 /* slower version for T1, only used for call Ev */
2414 static void gen_push_T1(DisasContext
*s
)
2416 #ifdef TARGET_X86_64
2418 gen_op_movq_A0_reg(R_ESP
);
2420 gen_op_addq_A0_im(-8);
2421 gen_op_st_T1_A0(OT_QUAD
+ s
->mem_index
);
2423 gen_op_addq_A0_im(-2);
2424 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2426 gen_op_mov_reg_A0(2, R_ESP
);
2430 gen_op_movl_A0_reg(R_ESP
);
2432 gen_op_addl_A0_im(-2);
2434 gen_op_addl_A0_im(-4);
2437 gen_op_addl_A0_seg(R_SS
);
2440 gen_op_andl_A0_ffff();
2441 gen_op_addl_A0_seg(R_SS
);
2443 gen_op_st_T1_A0(s
->dflag
+ 1 + s
->mem_index
);
2445 if (s
->ss32
&& !s
->addseg
)
2446 gen_op_mov_reg_A0(1, R_ESP
);
2448 gen_stack_update(s
, (-2) << s
->dflag
);
2452 /* two step pop is necessary for precise exceptions */
2453 static void gen_pop_T0(DisasContext
*s
)
2455 #ifdef TARGET_X86_64
2457 gen_op_movq_A0_reg(R_ESP
);
2458 gen_op_ld_T0_A0((s
->dflag
? OT_QUAD
: OT_WORD
) + s
->mem_index
);
2462 gen_op_movl_A0_reg(R_ESP
);
2465 gen_op_addl_A0_seg(R_SS
);
2467 gen_op_andl_A0_ffff();
2468 gen_op_addl_A0_seg(R_SS
);
2470 gen_op_ld_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2474 static void gen_pop_update(DisasContext
*s
)
2476 #ifdef TARGET_X86_64
2477 if (CODE64(s
) && s
->dflag
) {
2478 gen_stack_update(s
, 8);
2482 gen_stack_update(s
, 2 << s
->dflag
);
2486 static void gen_stack_A0(DisasContext
*s
)
2488 gen_op_movl_A0_reg(R_ESP
);
2490 gen_op_andl_A0_ffff();
2491 gen_op_movl_T1_A0();
2493 gen_op_addl_A0_seg(R_SS
);
2496 /* NOTE: wrap around in 16 bit not fully handled */
2497 static void gen_pusha(DisasContext
*s
)
2500 gen_op_movl_A0_reg(R_ESP
);
2501 gen_op_addl_A0_im(-16 << s
->dflag
);
2503 gen_op_andl_A0_ffff();
2504 gen_op_movl_T1_A0();
2506 gen_op_addl_A0_seg(R_SS
);
2507 for(i
= 0;i
< 8; i
++) {
2508 gen_op_mov_TN_reg(OT_LONG
, 0, 7 - i
);
2509 gen_op_st_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2510 gen_op_addl_A0_im(2 << s
->dflag
);
2512 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2515 /* NOTE: wrap around in 16 bit not fully handled */
2516 static void gen_popa(DisasContext
*s
)
2519 gen_op_movl_A0_reg(R_ESP
);
2521 gen_op_andl_A0_ffff();
2522 gen_op_movl_T1_A0();
2523 gen_op_addl_T1_im(16 << s
->dflag
);
2525 gen_op_addl_A0_seg(R_SS
);
2526 for(i
= 0;i
< 8; i
++) {
2527 /* ESP is not reloaded */
2529 gen_op_ld_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2530 gen_op_mov_reg_T0(OT_WORD
+ s
->dflag
, 7 - i
);
2532 gen_op_addl_A0_im(2 << s
->dflag
);
2534 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2537 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2542 #ifdef TARGET_X86_64
2544 ot
= s
->dflag
? OT_QUAD
: OT_WORD
;
2547 gen_op_movl_A0_reg(R_ESP
);
2548 gen_op_addq_A0_im(-opsize
);
2549 gen_op_movl_T1_A0();
2552 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2553 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2555 /* XXX: must save state */
2556 tcg_gen_helper_0_3(helper_enter64_level
,
2557 tcg_const_i32(level
),
2558 tcg_const_i32((ot
== OT_QUAD
)),
2561 gen_op_mov_reg_T1(ot
, R_EBP
);
2562 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2563 gen_op_mov_reg_T1(OT_QUAD
, R_ESP
);
2567 ot
= s
->dflag
+ OT_WORD
;
2568 opsize
= 2 << s
->dflag
;
2570 gen_op_movl_A0_reg(R_ESP
);
2571 gen_op_addl_A0_im(-opsize
);
2573 gen_op_andl_A0_ffff();
2574 gen_op_movl_T1_A0();
2576 gen_op_addl_A0_seg(R_SS
);
2578 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2579 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2581 /* XXX: must save state */
2582 tcg_gen_helper_0_3(helper_enter_level
,
2583 tcg_const_i32(level
),
2584 tcg_const_i32(s
->dflag
),
2587 gen_op_mov_reg_T1(ot
, R_EBP
);
2588 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2589 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2593 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2595 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2596 gen_op_set_cc_op(s
->cc_op
);
2597 gen_jmp_im(cur_eip
);
2598 tcg_gen_helper_0_1(helper_raise_exception
, tcg_const_i32(trapno
));
2602 /* an interrupt is different from an exception because of the
2604 static void gen_interrupt(DisasContext
*s
, int intno
,
2605 target_ulong cur_eip
, target_ulong next_eip
)
2607 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2608 gen_op_set_cc_op(s
->cc_op
);
2609 gen_jmp_im(cur_eip
);
2610 tcg_gen_helper_0_2(helper_raise_interrupt
,
2611 tcg_const_i32(intno
),
2612 tcg_const_i32(next_eip
- cur_eip
));
2616 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2618 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2619 gen_op_set_cc_op(s
->cc_op
);
2620 gen_jmp_im(cur_eip
);
2621 tcg_gen_helper_0_0(helper_debug
);
2625 /* generate a generic end of block. Trace exception is also generated
2627 static void gen_eob(DisasContext
*s
)
2629 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2630 gen_op_set_cc_op(s
->cc_op
);
2631 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2632 tcg_gen_helper_0_0(helper_reset_inhibit_irq
);
2634 if (s
->singlestep_enabled
) {
2635 tcg_gen_helper_0_0(helper_debug
);
2637 tcg_gen_helper_0_0(helper_single_step
);
2644 /* generate a jump to eip. No segment change must happen before as a
2645 direct call to the next block may occur */
2646 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2649 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2650 gen_op_set_cc_op(s
->cc_op
);
2651 s
->cc_op
= CC_OP_DYNAMIC
;
2653 gen_goto_tb(s
, tb_num
, eip
);
2661 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2663 gen_jmp_tb(s
, eip
, 0);
2666 static inline void gen_ldq_env_A0(int idx
, int offset
)
2668 int mem_index
= (idx
>> 2) - 1;
2669 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2670 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
);
2673 static inline void gen_stq_env_A0(int idx
, int offset
)
2675 int mem_index
= (idx
>> 2) - 1;
2676 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
);
2677 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2680 static inline void gen_ldo_env_A0(int idx
, int offset
)
2682 int mem_index
= (idx
>> 2) - 1;
2683 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2684 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2685 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2686 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_tmp0
, mem_index
);
2687 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2690 static inline void gen_sto_env_A0(int idx
, int offset
)
2692 int mem_index
= (idx
>> 2) - 1;
2693 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2694 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2695 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2696 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2697 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_tmp0
, mem_index
);
2700 static inline void gen_op_movo(int d_offset
, int s_offset
)
2702 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
);
2703 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2704 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
+ 8);
2705 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
+ 8);
2708 static inline void gen_op_movq(int d_offset
, int s_offset
)
2710 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
);
2711 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2714 static inline void gen_op_movl(int d_offset
, int s_offset
)
2716 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_env
, s_offset
);
2717 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, d_offset
);
2720 static inline void gen_op_movq_env_0(int d_offset
)
2722 tcg_gen_movi_i64(cpu_tmp1_i64
, 0);
2723 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2726 #define SSE_SPECIAL ((void *)1)
2727 #define SSE_DUMMY ((void *)2)
2729 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2730 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2731 helper_ ## x ## ss, helper_ ## x ## sd, }
2733 static void *sse_op_table1
[256][4] = {
2734 /* 3DNow! extensions */
2735 [0x0e] = { SSE_DUMMY
}, /* femms */
2736 [0x0f] = { SSE_DUMMY
}, /* pf... */
2737 /* pure SSE operations */
2738 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2739 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2740 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd, movsldup, movddup */
2741 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2742 [0x14] = { helper_punpckldq_xmm
, helper_punpcklqdq_xmm
},
2743 [0x15] = { helper_punpckhdq_xmm
, helper_punpckhqdq_xmm
},
2744 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2745 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2747 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2748 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2749 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2750 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2751 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2752 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2753 [0x2e] = { helper_ucomiss
, helper_ucomisd
},
2754 [0x2f] = { helper_comiss
, helper_comisd
},
2755 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2756 [0x51] = SSE_FOP(sqrt
),
2757 [0x52] = { helper_rsqrtps
, NULL
, helper_rsqrtss
, NULL
},
2758 [0x53] = { helper_rcpps
, NULL
, helper_rcpss
, NULL
},
2759 [0x54] = { helper_pand_xmm
, helper_pand_xmm
}, /* andps, andpd */
2760 [0x55] = { helper_pandn_xmm
, helper_pandn_xmm
}, /* andnps, andnpd */
2761 [0x56] = { helper_por_xmm
, helper_por_xmm
}, /* orps, orpd */
2762 [0x57] = { helper_pxor_xmm
, helper_pxor_xmm
}, /* xorps, xorpd */
2763 [0x58] = SSE_FOP(add
),
2764 [0x59] = SSE_FOP(mul
),
2765 [0x5a] = { helper_cvtps2pd
, helper_cvtpd2ps
,
2766 helper_cvtss2sd
, helper_cvtsd2ss
},
2767 [0x5b] = { helper_cvtdq2ps
, helper_cvtps2dq
, helper_cvttps2dq
},
2768 [0x5c] = SSE_FOP(sub
),
2769 [0x5d] = SSE_FOP(min
),
2770 [0x5e] = SSE_FOP(div
),
2771 [0x5f] = SSE_FOP(max
),
2773 [0xc2] = SSE_FOP(cmpeq
),
2774 [0xc6] = { helper_shufps
, helper_shufpd
},
2776 /* MMX ops and their SSE extensions */
2777 [0x60] = MMX_OP2(punpcklbw
),
2778 [0x61] = MMX_OP2(punpcklwd
),
2779 [0x62] = MMX_OP2(punpckldq
),
2780 [0x63] = MMX_OP2(packsswb
),
2781 [0x64] = MMX_OP2(pcmpgtb
),
2782 [0x65] = MMX_OP2(pcmpgtw
),
2783 [0x66] = MMX_OP2(pcmpgtl
),
2784 [0x67] = MMX_OP2(packuswb
),
2785 [0x68] = MMX_OP2(punpckhbw
),
2786 [0x69] = MMX_OP2(punpckhwd
),
2787 [0x6a] = MMX_OP2(punpckhdq
),
2788 [0x6b] = MMX_OP2(packssdw
),
2789 [0x6c] = { NULL
, helper_punpcklqdq_xmm
},
2790 [0x6d] = { NULL
, helper_punpckhqdq_xmm
},
2791 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2792 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2793 [0x70] = { helper_pshufw_mmx
,
2796 helper_pshuflw_xmm
},
2797 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2798 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2799 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2800 [0x74] = MMX_OP2(pcmpeqb
),
2801 [0x75] = MMX_OP2(pcmpeqw
),
2802 [0x76] = MMX_OP2(pcmpeql
),
2803 [0x77] = { SSE_DUMMY
}, /* emms */
2804 [0x7c] = { NULL
, helper_haddpd
, NULL
, helper_haddps
},
2805 [0x7d] = { NULL
, helper_hsubpd
, NULL
, helper_hsubps
},
2806 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2807 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2808 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2809 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2810 [0xd0] = { NULL
, helper_addsubpd
, NULL
, helper_addsubps
},
2811 [0xd1] = MMX_OP2(psrlw
),
2812 [0xd2] = MMX_OP2(psrld
),
2813 [0xd3] = MMX_OP2(psrlq
),
2814 [0xd4] = MMX_OP2(paddq
),
2815 [0xd5] = MMX_OP2(pmullw
),
2816 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2817 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2818 [0xd8] = MMX_OP2(psubusb
),
2819 [0xd9] = MMX_OP2(psubusw
),
2820 [0xda] = MMX_OP2(pminub
),
2821 [0xdb] = MMX_OP2(pand
),
2822 [0xdc] = MMX_OP2(paddusb
),
2823 [0xdd] = MMX_OP2(paddusw
),
2824 [0xde] = MMX_OP2(pmaxub
),
2825 [0xdf] = MMX_OP2(pandn
),
2826 [0xe0] = MMX_OP2(pavgb
),
2827 [0xe1] = MMX_OP2(psraw
),
2828 [0xe2] = MMX_OP2(psrad
),
2829 [0xe3] = MMX_OP2(pavgw
),
2830 [0xe4] = MMX_OP2(pmulhuw
),
2831 [0xe5] = MMX_OP2(pmulhw
),
2832 [0xe6] = { NULL
, helper_cvttpd2dq
, helper_cvtdq2pd
, helper_cvtpd2dq
},
2833 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2834 [0xe8] = MMX_OP2(psubsb
),
2835 [0xe9] = MMX_OP2(psubsw
),
2836 [0xea] = MMX_OP2(pminsw
),
2837 [0xeb] = MMX_OP2(por
),
2838 [0xec] = MMX_OP2(paddsb
),
2839 [0xed] = MMX_OP2(paddsw
),
2840 [0xee] = MMX_OP2(pmaxsw
),
2841 [0xef] = MMX_OP2(pxor
),
2842 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu */
2843 [0xf1] = MMX_OP2(psllw
),
2844 [0xf2] = MMX_OP2(pslld
),
2845 [0xf3] = MMX_OP2(psllq
),
2846 [0xf4] = MMX_OP2(pmuludq
),
2847 [0xf5] = MMX_OP2(pmaddwd
),
2848 [0xf6] = MMX_OP2(psadbw
),
2849 [0xf7] = MMX_OP2(maskmov
),
2850 [0xf8] = MMX_OP2(psubb
),
2851 [0xf9] = MMX_OP2(psubw
),
2852 [0xfa] = MMX_OP2(psubl
),
2853 [0xfb] = MMX_OP2(psubq
),
2854 [0xfc] = MMX_OP2(paddb
),
2855 [0xfd] = MMX_OP2(paddw
),
2856 [0xfe] = MMX_OP2(paddl
),
2859 static void *sse_op_table2
[3 * 8][2] = {
2860 [0 + 2] = MMX_OP2(psrlw
),
2861 [0 + 4] = MMX_OP2(psraw
),
2862 [0 + 6] = MMX_OP2(psllw
),
2863 [8 + 2] = MMX_OP2(psrld
),
2864 [8 + 4] = MMX_OP2(psrad
),
2865 [8 + 6] = MMX_OP2(pslld
),
2866 [16 + 2] = MMX_OP2(psrlq
),
2867 [16 + 3] = { NULL
, helper_psrldq_xmm
},
2868 [16 + 6] = MMX_OP2(psllq
),
2869 [16 + 7] = { NULL
, helper_pslldq_xmm
},
2872 static void *sse_op_table3
[4 * 3] = {
2875 X86_64_ONLY(helper_cvtsq2ss
),
2876 X86_64_ONLY(helper_cvtsq2sd
),
2880 X86_64_ONLY(helper_cvttss2sq
),
2881 X86_64_ONLY(helper_cvttsd2sq
),
2885 X86_64_ONLY(helper_cvtss2sq
),
2886 X86_64_ONLY(helper_cvtsd2sq
),
2889 static void *sse_op_table4
[8][4] = {
2900 static void *sse_op_table5
[256] = {
2901 [0x0c] = helper_pi2fw
,
2902 [0x0d] = helper_pi2fd
,
2903 [0x1c] = helper_pf2iw
,
2904 [0x1d] = helper_pf2id
,
2905 [0x8a] = helper_pfnacc
,
2906 [0x8e] = helper_pfpnacc
,
2907 [0x90] = helper_pfcmpge
,
2908 [0x94] = helper_pfmin
,
2909 [0x96] = helper_pfrcp
,
2910 [0x97] = helper_pfrsqrt
,
2911 [0x9a] = helper_pfsub
,
2912 [0x9e] = helper_pfadd
,
2913 [0xa0] = helper_pfcmpgt
,
2914 [0xa4] = helper_pfmax
,
2915 [0xa6] = helper_movq
, /* pfrcpit1; no need to actually increase precision */
2916 [0xa7] = helper_movq
, /* pfrsqit1 */
2917 [0xaa] = helper_pfsubr
,
2918 [0xae] = helper_pfacc
,
2919 [0xb0] = helper_pfcmpeq
,
2920 [0xb4] = helper_pfmul
,
2921 [0xb6] = helper_movq
, /* pfrcpit2 */
2922 [0xb7] = helper_pmulhrw_mmx
,
2923 [0xbb] = helper_pswapd
,
2924 [0xbf] = helper_pavgb_mmx
/* pavgusb */
2927 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
2929 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
2930 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
2934 if (s
->prefix
& PREFIX_DATA
)
2936 else if (s
->prefix
& PREFIX_REPZ
)
2938 else if (s
->prefix
& PREFIX_REPNZ
)
2942 sse_op2
= sse_op_table1
[b
][b1
];
2945 if ((b
<= 0x5f && b
>= 0x10) || b
== 0xc6 || b
== 0xc2) {
2955 /* simple MMX/SSE operation */
2956 if (s
->flags
& HF_TS_MASK
) {
2957 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
2960 if (s
->flags
& HF_EM_MASK
) {
2962 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
2965 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
2968 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
2971 tcg_gen_helper_0_0(helper_emms
);
2976 tcg_gen_helper_0_0(helper_emms
);
2979 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2980 the static cpu state) */
2982 tcg_gen_helper_0_0(helper_enter_mmx
);
2985 modrm
= ldub_code(s
->pc
++);
2986 reg
= ((modrm
>> 3) & 7);
2989 mod
= (modrm
>> 6) & 3;
2990 if (sse_op2
== SSE_SPECIAL
) {
2993 case 0x0e7: /* movntq */
2996 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2997 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2999 case 0x1e7: /* movntdq */
3000 case 0x02b: /* movntps */
3001 case 0x12b: /* movntps */
3002 case 0x3f0: /* lddqu */
3005 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3006 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3008 case 0x6e: /* movd mm, ea */
3009 #ifdef TARGET_X86_64
3010 if (s
->dflag
== 2) {
3011 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
3012 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3016 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
3017 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3018 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3019 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx
, cpu_ptr0
, cpu_T
[0]);
3022 case 0x16e: /* movd xmm, ea */
3023 #ifdef TARGET_X86_64
3024 if (s
->dflag
== 2) {
3025 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
3026 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3027 offsetof(CPUX86State
,xmm_regs
[reg
]));
3028 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm
, cpu_ptr0
, cpu_T
[0]);
3032 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
3033 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3034 offsetof(CPUX86State
,xmm_regs
[reg
]));
3035 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3036 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm
, cpu_ptr0
, cpu_tmp2_i32
);
3039 case 0x6f: /* movq mm, ea */
3041 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3042 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3045 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
,
3046 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3047 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
,
3048 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3051 case 0x010: /* movups */
3052 case 0x110: /* movupd */
3053 case 0x028: /* movaps */
3054 case 0x128: /* movapd */
3055 case 0x16f: /* movdqa xmm, ea */
3056 case 0x26f: /* movdqu xmm, ea */
3058 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3059 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3061 rm
= (modrm
& 7) | REX_B(s
);
3062 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
3063 offsetof(CPUX86State
,xmm_regs
[rm
]));
3066 case 0x210: /* movss xmm, ea */
3068 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3069 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3070 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3072 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
3073 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3074 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3076 rm
= (modrm
& 7) | REX_B(s
);
3077 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3078 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
3081 case 0x310: /* movsd xmm, ea */
3083 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3084 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3086 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3087 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3089 rm
= (modrm
& 7) | REX_B(s
);
3090 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3091 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3094 case 0x012: /* movlps */
3095 case 0x112: /* movlpd */
3097 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3098 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3101 rm
= (modrm
& 7) | REX_B(s
);
3102 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3103 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3106 case 0x212: /* movsldup */
3108 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3109 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3111 rm
= (modrm
& 7) | REX_B(s
);
3112 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3113 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
3114 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
3115 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(2)));
3117 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
3118 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3119 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
3120 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3122 case 0x312: /* movddup */
3124 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3125 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3127 rm
= (modrm
& 7) | REX_B(s
);
3128 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3129 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3131 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
3132 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3134 case 0x016: /* movhps */
3135 case 0x116: /* movhpd */
3137 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3138 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3141 rm
= (modrm
& 7) | REX_B(s
);
3142 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
3143 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3146 case 0x216: /* movshdup */
3148 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3149 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3151 rm
= (modrm
& 7) | REX_B(s
);
3152 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
3153 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
3154 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
3155 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
3157 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3158 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
3159 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
3160 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3162 case 0x7e: /* movd ea, mm */
3163 #ifdef TARGET_X86_64
3164 if (s
->dflag
== 2) {
3165 tcg_gen_ld_i64(cpu_T
[0], cpu_env
,
3166 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3167 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
3171 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
3172 offsetof(CPUX86State
,fpregs
[reg
].mmx
.MMX_L(0)));
3173 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
3176 case 0x17e: /* movd ea, xmm */
3177 #ifdef TARGET_X86_64
3178 if (s
->dflag
== 2) {
3179 tcg_gen_ld_i64(cpu_T
[0], cpu_env
,
3180 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3181 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
3185 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
3186 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3187 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
3190 case 0x27e: /* movq xmm, ea */
3192 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3193 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3195 rm
= (modrm
& 7) | REX_B(s
);
3196 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3197 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3199 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3201 case 0x7f: /* movq ea, mm */
3203 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3204 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3207 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
3208 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3211 case 0x011: /* movups */
3212 case 0x111: /* movupd */
3213 case 0x029: /* movaps */
3214 case 0x129: /* movapd */
3215 case 0x17f: /* movdqa ea, xmm */
3216 case 0x27f: /* movdqu ea, xmm */
3218 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3219 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3221 rm
= (modrm
& 7) | REX_B(s
);
3222 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
3223 offsetof(CPUX86State
,xmm_regs
[reg
]));
3226 case 0x211: /* movss ea, xmm */
3228 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3229 gen_op_movl_T0_env(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3230 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
3232 rm
= (modrm
& 7) | REX_B(s
);
3233 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
3234 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3237 case 0x311: /* movsd ea, xmm */
3239 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3240 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3242 rm
= (modrm
& 7) | REX_B(s
);
3243 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3244 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3247 case 0x013: /* movlps */
3248 case 0x113: /* movlpd */
3250 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3251 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3256 case 0x017: /* movhps */
3257 case 0x117: /* movhpd */
3259 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3260 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3265 case 0x71: /* shift mm, im */
3268 case 0x171: /* shift xmm, im */
3271 val
= ldub_code(s
->pc
++);
3273 gen_op_movl_T0_im(val
);
3274 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3276 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
3277 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
3279 gen_op_movl_T0_im(val
);
3280 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
3282 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
3283 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
3285 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
3289 rm
= (modrm
& 7) | REX_B(s
);
3290 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3293 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3295 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op2_offset
);
3296 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op1_offset
);
3297 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3299 case 0x050: /* movmskps */
3300 rm
= (modrm
& 7) | REX_B(s
);
3301 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3302 offsetof(CPUX86State
,xmm_regs
[rm
]));
3303 tcg_gen_helper_1_1(helper_movmskps
, cpu_tmp2_i32
, cpu_ptr0
);
3304 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3305 gen_op_mov_reg_T0(OT_LONG
, reg
);
3307 case 0x150: /* movmskpd */
3308 rm
= (modrm
& 7) | REX_B(s
);
3309 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3310 offsetof(CPUX86State
,xmm_regs
[rm
]));
3311 tcg_gen_helper_1_1(helper_movmskpd
, cpu_tmp2_i32
, cpu_ptr0
);
3312 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3313 gen_op_mov_reg_T0(OT_LONG
, reg
);
3315 case 0x02a: /* cvtpi2ps */
3316 case 0x12a: /* cvtpi2pd */
3317 tcg_gen_helper_0_0(helper_enter_mmx
);
3319 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3320 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3321 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3324 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3326 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3327 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3328 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3331 tcg_gen_helper_0_2(helper_cvtpi2ps
, cpu_ptr0
, cpu_ptr1
);
3335 tcg_gen_helper_0_2(helper_cvtpi2pd
, cpu_ptr0
, cpu_ptr1
);
3339 case 0x22a: /* cvtsi2ss */
3340 case 0x32a: /* cvtsi2sd */
3341 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3342 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3343 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3344 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3345 sse_op2
= sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)];
3346 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3347 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_tmp2_i32
);
3349 case 0x02c: /* cvttps2pi */
3350 case 0x12c: /* cvttpd2pi */
3351 case 0x02d: /* cvtps2pi */
3352 case 0x12d: /* cvtpd2pi */
3353 tcg_gen_helper_0_0(helper_enter_mmx
);
3355 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3356 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3357 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3359 rm
= (modrm
& 7) | REX_B(s
);
3360 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3362 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
3363 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3364 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3367 tcg_gen_helper_0_2(helper_cvttps2pi
, cpu_ptr0
, cpu_ptr1
);
3370 tcg_gen_helper_0_2(helper_cvttpd2pi
, cpu_ptr0
, cpu_ptr1
);
3373 tcg_gen_helper_0_2(helper_cvtps2pi
, cpu_ptr0
, cpu_ptr1
);
3376 tcg_gen_helper_0_2(helper_cvtpd2pi
, cpu_ptr0
, cpu_ptr1
);
3380 case 0x22c: /* cvttss2si */
3381 case 0x32c: /* cvttsd2si */
3382 case 0x22d: /* cvtss2si */
3383 case 0x32d: /* cvtsd2si */
3384 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3386 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3388 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_Q(0)));
3390 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3391 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3393 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3395 rm
= (modrm
& 7) | REX_B(s
);
3396 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3398 sse_op2
= sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
3400 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op2_offset
);
3401 if (ot
== OT_LONG
) {
3402 tcg_gen_helper_1_1(sse_op2
, cpu_tmp2_i32
, cpu_ptr0
);
3403 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3405 tcg_gen_helper_1_1(sse_op2
, cpu_T
[0], cpu_ptr0
);
3407 gen_op_mov_reg_T0(ot
, reg
);
3409 case 0xc4: /* pinsrw */
3412 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3413 val
= ldub_code(s
->pc
++);
3416 tcg_gen_st16_tl(cpu_T
[0], cpu_env
,
3417 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_W(val
)));
3420 tcg_gen_st16_tl(cpu_T
[0], cpu_env
,
3421 offsetof(CPUX86State
,fpregs
[reg
].mmx
.MMX_W(val
)));
3424 case 0xc5: /* pextrw */
3428 val
= ldub_code(s
->pc
++);
3431 rm
= (modrm
& 7) | REX_B(s
);
3432 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
,
3433 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_W(val
)));
3437 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
,
3438 offsetof(CPUX86State
,fpregs
[rm
].mmx
.MMX_W(val
)));
3440 reg
= ((modrm
>> 3) & 7) | rex_r
;
3441 gen_op_mov_reg_T0(OT_LONG
, reg
);
3443 case 0x1d6: /* movq ea, xmm */
3445 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3446 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3448 rm
= (modrm
& 7) | REX_B(s
);
3449 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3450 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3451 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3454 case 0x2d6: /* movq2dq */
3455 tcg_gen_helper_0_0(helper_enter_mmx
);
3457 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3458 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3459 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3461 case 0x3d6: /* movdq2q */
3462 tcg_gen_helper_0_0(helper_enter_mmx
);
3463 rm
= (modrm
& 7) | REX_B(s
);
3464 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
),
3465 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3467 case 0xd7: /* pmovmskb */
3472 rm
= (modrm
& 7) | REX_B(s
);
3473 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, offsetof(CPUX86State
,xmm_regs
[rm
]));
3474 tcg_gen_helper_1_1(helper_pmovmskb_xmm
, cpu_tmp2_i32
, cpu_ptr0
);
3477 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3478 tcg_gen_helper_1_1(helper_pmovmskb_mmx
, cpu_tmp2_i32
, cpu_ptr0
);
3480 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3481 reg
= ((modrm
>> 3) & 7) | rex_r
;
3482 gen_op_mov_reg_T0(OT_LONG
, reg
);
3488 /* generic MMX or SSE operation */
3490 case 0x70: /* pshufx insn */
3491 case 0xc6: /* pshufx insn */
3492 case 0xc2: /* compare insns */
3499 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3501 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3502 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3503 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f && b
!= 0x5b) ||
3505 /* specific case for SSE single instructions */
3508 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3509 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3512 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
3515 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3518 rm
= (modrm
& 7) | REX_B(s
);
3519 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3522 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3524 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3525 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3526 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3529 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3533 case 0x0f: /* 3DNow! data insns */
3534 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
3536 val
= ldub_code(s
->pc
++);
3537 sse_op2
= sse_op_table5
[val
];
3540 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3541 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3542 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3544 case 0x70: /* pshufx insn */
3545 case 0xc6: /* pshufx insn */
3546 val
= ldub_code(s
->pc
++);
3547 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3548 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3549 tcg_gen_helper_0_3(sse_op2
, cpu_ptr0
, cpu_ptr1
, tcg_const_i32(val
));
3553 val
= ldub_code(s
->pc
++);
3556 sse_op2
= sse_op_table4
[val
][b1
];
3557 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3558 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3559 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3562 /* maskmov : we must prepare A0 */
3565 #ifdef TARGET_X86_64
3566 if (s
->aflag
== 2) {
3567 gen_op_movq_A0_reg(R_EDI
);
3571 gen_op_movl_A0_reg(R_EDI
);
3573 gen_op_andl_A0_ffff();
3575 gen_add_A0_ds_seg(s
);
3577 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3578 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3579 tcg_gen_helper_0_3(sse_op2
, cpu_ptr0
, cpu_ptr1
, cpu_A0
);
3582 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3583 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3584 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3587 if (b
== 0x2e || b
== 0x2f) {
3588 /* just to keep the EFLAGS optimization correct */
3590 s
->cc_op
= CC_OP_EFLAGS
;
3595 /* convert one instruction. s->is_jmp is set if the translation must
3596 be stopped. Return the next pc value */
3597 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
3599 int b
, prefixes
, aflag
, dflag
;
3601 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
3602 target_ulong next_eip
, tval
;
3612 #ifdef TARGET_X86_64
3617 s
->rip_offset
= 0; /* for relative ip address */
3619 b
= ldub_code(s
->pc
);
3621 /* check prefixes */
3622 #ifdef TARGET_X86_64
3626 prefixes
|= PREFIX_REPZ
;
3629 prefixes
|= PREFIX_REPNZ
;
3632 prefixes
|= PREFIX_LOCK
;
3653 prefixes
|= PREFIX_DATA
;
3656 prefixes
|= PREFIX_ADR
;
3660 rex_w
= (b
>> 3) & 1;
3661 rex_r
= (b
& 0x4) << 1;
3662 s
->rex_x
= (b
& 0x2) << 2;
3663 REX_B(s
) = (b
& 0x1) << 3;
3664 x86_64_hregs
= 1; /* select uniform byte register addressing */
3668 /* 0x66 is ignored if rex.w is set */
3671 if (prefixes
& PREFIX_DATA
)
3674 if (!(prefixes
& PREFIX_ADR
))
3681 prefixes
|= PREFIX_REPZ
;
3684 prefixes
|= PREFIX_REPNZ
;
3687 prefixes
|= PREFIX_LOCK
;
3708 prefixes
|= PREFIX_DATA
;
3711 prefixes
|= PREFIX_ADR
;
3714 if (prefixes
& PREFIX_DATA
)
3716 if (prefixes
& PREFIX_ADR
)
3720 s
->prefix
= prefixes
;
3724 /* lock generation */
3725 if (prefixes
& PREFIX_LOCK
)
3726 tcg_gen_helper_0_0(helper_lock
);
3728 /* now check op code */
3732 /**************************/
3733 /* extended op code */
3734 b
= ldub_code(s
->pc
++) | 0x100;
3737 /**************************/
3755 ot
= dflag
+ OT_WORD
;
3758 case 0: /* OP Ev, Gv */
3759 modrm
= ldub_code(s
->pc
++);
3760 reg
= ((modrm
>> 3) & 7) | rex_r
;
3761 mod
= (modrm
>> 6) & 3;
3762 rm
= (modrm
& 7) | REX_B(s
);
3764 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3766 } else if (op
== OP_XORL
&& rm
== reg
) {
3768 /* xor reg, reg optimisation */
3770 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3771 gen_op_mov_reg_T0(ot
, reg
);
3772 gen_op_update1_cc();
3777 gen_op_mov_TN_reg(ot
, 1, reg
);
3778 gen_op(s
, op
, ot
, opreg
);
3780 case 1: /* OP Gv, Ev */
3781 modrm
= ldub_code(s
->pc
++);
3782 mod
= (modrm
>> 6) & 3;
3783 reg
= ((modrm
>> 3) & 7) | rex_r
;
3784 rm
= (modrm
& 7) | REX_B(s
);
3786 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3787 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3788 } else if (op
== OP_XORL
&& rm
== reg
) {
3791 gen_op_mov_TN_reg(ot
, 1, rm
);
3793 gen_op(s
, op
, ot
, reg
);
3795 case 2: /* OP A, Iv */
3796 val
= insn_get(s
, ot
);
3797 gen_op_movl_T1_im(val
);
3798 gen_op(s
, op
, ot
, OR_EAX
);
3804 case 0x80: /* GRP1 */
3814 ot
= dflag
+ OT_WORD
;
3816 modrm
= ldub_code(s
->pc
++);
3817 mod
= (modrm
>> 6) & 3;
3818 rm
= (modrm
& 7) | REX_B(s
);
3819 op
= (modrm
>> 3) & 7;
3825 s
->rip_offset
= insn_const_size(ot
);
3826 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3837 val
= insn_get(s
, ot
);
3840 val
= (int8_t)insn_get(s
, OT_BYTE
);
3843 gen_op_movl_T1_im(val
);
3844 gen_op(s
, op
, ot
, opreg
);
3848 /**************************/
3849 /* inc, dec, and other misc arith */
3850 case 0x40 ... 0x47: /* inc Gv */
3851 ot
= dflag
? OT_LONG
: OT_WORD
;
3852 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
3854 case 0x48 ... 0x4f: /* dec Gv */
3855 ot
= dflag
? OT_LONG
: OT_WORD
;
3856 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
3858 case 0xf6: /* GRP3 */
3863 ot
= dflag
+ OT_WORD
;
3865 modrm
= ldub_code(s
->pc
++);
3866 mod
= (modrm
>> 6) & 3;
3867 rm
= (modrm
& 7) | REX_B(s
);
3868 op
= (modrm
>> 3) & 7;
3871 s
->rip_offset
= insn_const_size(ot
);
3872 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3873 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3875 gen_op_mov_TN_reg(ot
, 0, rm
);
3880 val
= insn_get(s
, ot
);
3881 gen_op_movl_T1_im(val
);
3882 gen_op_testl_T0_T1_cc();
3883 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3886 tcg_gen_not_tl(cpu_T
[0], cpu_T
[0]);
3888 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3890 gen_op_mov_reg_T0(ot
, rm
);
3894 tcg_gen_neg_tl(cpu_T
[0], cpu_T
[0]);
3896 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3898 gen_op_mov_reg_T0(ot
, rm
);
3900 gen_op_update_neg_cc();
3901 s
->cc_op
= CC_OP_SUBB
+ ot
;
3906 gen_op_mulb_AL_T0();
3907 s
->cc_op
= CC_OP_MULB
;
3910 gen_op_mulw_AX_T0();
3911 s
->cc_op
= CC_OP_MULW
;
3915 gen_op_mull_EAX_T0();
3916 s
->cc_op
= CC_OP_MULL
;
3918 #ifdef TARGET_X86_64
3920 gen_op_mulq_EAX_T0();
3921 s
->cc_op
= CC_OP_MULQ
;
3929 gen_op_imulb_AL_T0();
3930 s
->cc_op
= CC_OP_MULB
;
3933 gen_op_imulw_AX_T0();
3934 s
->cc_op
= CC_OP_MULW
;
3938 gen_op_imull_EAX_T0();
3939 s
->cc_op
= CC_OP_MULL
;
3941 #ifdef TARGET_X86_64
3943 gen_op_imulq_EAX_T0();
3944 s
->cc_op
= CC_OP_MULQ
;
3952 gen_jmp_im(pc_start
- s
->cs_base
);
3953 tcg_gen_helper_0_1(helper_divb_AL
, cpu_T
[0]);
3956 gen_jmp_im(pc_start
- s
->cs_base
);
3957 tcg_gen_helper_0_1(helper_divw_AX
, cpu_T
[0]);
3961 gen_jmp_im(pc_start
- s
->cs_base
);
3962 tcg_gen_helper_0_1(helper_divl_EAX
, cpu_T
[0]);
3964 #ifdef TARGET_X86_64
3966 gen_jmp_im(pc_start
- s
->cs_base
);
3967 tcg_gen_helper_0_1(helper_divq_EAX
, cpu_T
[0]);
3975 gen_jmp_im(pc_start
- s
->cs_base
);
3976 tcg_gen_helper_0_1(helper_idivb_AL
, cpu_T
[0]);
3979 gen_jmp_im(pc_start
- s
->cs_base
);
3980 tcg_gen_helper_0_1(helper_idivw_AX
, cpu_T
[0]);
3984 gen_jmp_im(pc_start
- s
->cs_base
);
3985 tcg_gen_helper_0_1(helper_idivl_EAX
, cpu_T
[0]);
3987 #ifdef TARGET_X86_64
3989 gen_jmp_im(pc_start
- s
->cs_base
);
3990 tcg_gen_helper_0_1(helper_idivq_EAX
, cpu_T
[0]);
4000 case 0xfe: /* GRP4 */
4001 case 0xff: /* GRP5 */
4005 ot
= dflag
+ OT_WORD
;
4007 modrm
= ldub_code(s
->pc
++);
4008 mod
= (modrm
>> 6) & 3;
4009 rm
= (modrm
& 7) | REX_B(s
);
4010 op
= (modrm
>> 3) & 7;
4011 if (op
>= 2 && b
== 0xfe) {
4015 if (op
== 2 || op
== 4) {
4016 /* operand size for jumps is 64 bit */
4018 } else if (op
== 3 || op
== 5) {
4019 /* for call calls, the operand is 16 or 32 bit, even
4021 ot
= dflag
? OT_LONG
: OT_WORD
;
4022 } else if (op
== 6) {
4023 /* default push size is 64 bit */
4024 ot
= dflag
? OT_QUAD
: OT_WORD
;
4028 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4029 if (op
>= 2 && op
!= 3 && op
!= 5)
4030 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4032 gen_op_mov_TN_reg(ot
, 0, rm
);
4036 case 0: /* inc Ev */
4041 gen_inc(s
, ot
, opreg
, 1);
4043 case 1: /* dec Ev */
4048 gen_inc(s
, ot
, opreg
, -1);
4050 case 2: /* call Ev */
4051 /* XXX: optimize if memory (no 'and' is necessary) */
4053 gen_op_andl_T0_ffff();
4054 next_eip
= s
->pc
- s
->cs_base
;
4055 gen_movtl_T1_im(next_eip
);
4060 case 3: /* lcall Ev */
4061 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4062 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4063 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4065 if (s
->pe
&& !s
->vm86
) {
4066 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4067 gen_op_set_cc_op(s
->cc_op
);
4068 gen_jmp_im(pc_start
- s
->cs_base
);
4069 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4070 tcg_gen_helper_0_4(helper_lcall_protected
,
4071 cpu_tmp2_i32
, cpu_T
[1],
4072 tcg_const_i32(dflag
),
4073 tcg_const_i32(s
->pc
- pc_start
));
4075 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4076 tcg_gen_helper_0_4(helper_lcall_real
,
4077 cpu_tmp2_i32
, cpu_T
[1],
4078 tcg_const_i32(dflag
),
4079 tcg_const_i32(s
->pc
- s
->cs_base
));
4083 case 4: /* jmp Ev */
4085 gen_op_andl_T0_ffff();
4089 case 5: /* ljmp Ev */
4090 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4091 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4092 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4094 if (s
->pe
&& !s
->vm86
) {
4095 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4096 gen_op_set_cc_op(s
->cc_op
);
4097 gen_jmp_im(pc_start
- s
->cs_base
);
4098 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4099 tcg_gen_helper_0_3(helper_ljmp_protected
,
4102 tcg_const_i32(s
->pc
- pc_start
));
4104 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
4105 gen_op_movl_T0_T1();
4110 case 6: /* push Ev */
4118 case 0x84: /* test Ev, Gv */
4123 ot
= dflag
+ OT_WORD
;
4125 modrm
= ldub_code(s
->pc
++);
4126 mod
= (modrm
>> 6) & 3;
4127 rm
= (modrm
& 7) | REX_B(s
);
4128 reg
= ((modrm
>> 3) & 7) | rex_r
;
4130 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4131 gen_op_mov_TN_reg(ot
, 1, reg
);
4132 gen_op_testl_T0_T1_cc();
4133 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4136 case 0xa8: /* test eAX, Iv */
4141 ot
= dflag
+ OT_WORD
;
4142 val
= insn_get(s
, ot
);
4144 gen_op_mov_TN_reg(ot
, 0, OR_EAX
);
4145 gen_op_movl_T1_im(val
);
4146 gen_op_testl_T0_T1_cc();
4147 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4150 case 0x98: /* CWDE/CBW */
4151 #ifdef TARGET_X86_64
4153 gen_op_movslq_RAX_EAX();
4157 gen_op_movswl_EAX_AX();
4159 gen_op_movsbw_AX_AL();
4161 case 0x99: /* CDQ/CWD */
4162 #ifdef TARGET_X86_64
4164 gen_op_movsqo_RDX_RAX();
4168 gen_op_movslq_EDX_EAX();
4170 gen_op_movswl_DX_AX();
4172 case 0x1af: /* imul Gv, Ev */
4173 case 0x69: /* imul Gv, Ev, I */
4175 ot
= dflag
+ OT_WORD
;
4176 modrm
= ldub_code(s
->pc
++);
4177 reg
= ((modrm
>> 3) & 7) | rex_r
;
4179 s
->rip_offset
= insn_const_size(ot
);
4182 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4184 val
= insn_get(s
, ot
);
4185 gen_op_movl_T1_im(val
);
4186 } else if (b
== 0x6b) {
4187 val
= (int8_t)insn_get(s
, OT_BYTE
);
4188 gen_op_movl_T1_im(val
);
4190 gen_op_mov_TN_reg(ot
, 1, reg
);
4193 #ifdef TARGET_X86_64
4194 if (ot
== OT_QUAD
) {
4195 gen_op_imulq_T0_T1();
4198 if (ot
== OT_LONG
) {
4199 gen_op_imull_T0_T1();
4201 gen_op_imulw_T0_T1();
4203 gen_op_mov_reg_T0(ot
, reg
);
4204 s
->cc_op
= CC_OP_MULB
+ ot
;
4207 case 0x1c1: /* xadd Ev, Gv */
4211 ot
= dflag
+ OT_WORD
;
4212 modrm
= ldub_code(s
->pc
++);
4213 reg
= ((modrm
>> 3) & 7) | rex_r
;
4214 mod
= (modrm
>> 6) & 3;
4216 rm
= (modrm
& 7) | REX_B(s
);
4217 gen_op_mov_TN_reg(ot
, 0, reg
);
4218 gen_op_mov_TN_reg(ot
, 1, rm
);
4219 gen_op_addl_T0_T1();
4220 gen_op_mov_reg_T1(ot
, reg
);
4221 gen_op_mov_reg_T0(ot
, rm
);
4223 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4224 gen_op_mov_TN_reg(ot
, 0, reg
);
4225 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4226 gen_op_addl_T0_T1();
4227 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4228 gen_op_mov_reg_T1(ot
, reg
);
4230 gen_op_update2_cc();
4231 s
->cc_op
= CC_OP_ADDB
+ ot
;
4234 case 0x1b1: /* cmpxchg Ev, Gv */
4241 ot
= dflag
+ OT_WORD
;
4242 modrm
= ldub_code(s
->pc
++);
4243 reg
= ((modrm
>> 3) & 7) | rex_r
;
4244 mod
= (modrm
>> 6) & 3;
4245 gen_op_mov_TN_reg(ot
, 1, reg
);
4247 rm
= (modrm
& 7) | REX_B(s
);
4248 gen_op_mov_TN_reg(ot
, 0, rm
);
4250 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4251 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4252 rm
= 0; /* avoid warning */
4254 label1
= gen_new_label();
4255 tcg_gen_ld_tl(cpu_T3
, cpu_env
, offsetof(CPUState
, regs
[R_EAX
]));
4256 tcg_gen_sub_tl(cpu_T3
, cpu_T3
, cpu_T
[0]);
4257 gen_extu(ot
, cpu_T3
);
4258 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T3
, tcg_const_tl(0), label1
);
4259 tcg_gen_mov_tl(cpu_T
[1], cpu_T
[0]);
4260 gen_op_mov_reg_T0(ot
, R_EAX
);
4261 gen_set_label(label1
);
4263 gen_op_mov_reg_T1(ot
, rm
);
4265 gen_op_st_T1_A0(ot
+ s
->mem_index
);
4267 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
4268 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T3
);
4269 s
->cc_op
= CC_OP_SUBB
+ ot
;
4272 case 0x1c7: /* cmpxchg8b */
4273 modrm
= ldub_code(s
->pc
++);
4274 mod
= (modrm
>> 6) & 3;
4275 if ((mod
== 3) || ((modrm
& 0x38) != 0x8))
4277 gen_jmp_im(pc_start
- s
->cs_base
);
4278 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4279 gen_op_set_cc_op(s
->cc_op
);
4280 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4282 s
->cc_op
= CC_OP_EFLAGS
;
4285 /**************************/
4287 case 0x50 ... 0x57: /* push */
4288 gen_op_mov_TN_reg(OT_LONG
, 0, (b
& 7) | REX_B(s
));
4291 case 0x58 ... 0x5f: /* pop */
4293 ot
= dflag
? OT_QUAD
: OT_WORD
;
4295 ot
= dflag
+ OT_WORD
;
4298 /* NOTE: order is important for pop %sp */
4300 gen_op_mov_reg_T0(ot
, (b
& 7) | REX_B(s
));
4302 case 0x60: /* pusha */
4307 case 0x61: /* popa */
4312 case 0x68: /* push Iv */
4315 ot
= dflag
? OT_QUAD
: OT_WORD
;
4317 ot
= dflag
+ OT_WORD
;
4320 val
= insn_get(s
, ot
);
4322 val
= (int8_t)insn_get(s
, OT_BYTE
);
4323 gen_op_movl_T0_im(val
);
4326 case 0x8f: /* pop Ev */
4328 ot
= dflag
? OT_QUAD
: OT_WORD
;
4330 ot
= dflag
+ OT_WORD
;
4332 modrm
= ldub_code(s
->pc
++);
4333 mod
= (modrm
>> 6) & 3;
4336 /* NOTE: order is important for pop %sp */
4338 rm
= (modrm
& 7) | REX_B(s
);
4339 gen_op_mov_reg_T0(ot
, rm
);
4341 /* NOTE: order is important too for MMU exceptions */
4342 s
->popl_esp_hack
= 1 << ot
;
4343 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4344 s
->popl_esp_hack
= 0;
4348 case 0xc8: /* enter */
4351 val
= lduw_code(s
->pc
);
4353 level
= ldub_code(s
->pc
++);
4354 gen_enter(s
, val
, level
);
4357 case 0xc9: /* leave */
4358 /* XXX: exception not precise (ESP is updated before potential exception) */
4360 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EBP
);
4361 gen_op_mov_reg_T0(OT_QUAD
, R_ESP
);
4362 } else if (s
->ss32
) {
4363 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
4364 gen_op_mov_reg_T0(OT_LONG
, R_ESP
);
4366 gen_op_mov_TN_reg(OT_WORD
, 0, R_EBP
);
4367 gen_op_mov_reg_T0(OT_WORD
, R_ESP
);
4371 ot
= dflag
? OT_QUAD
: OT_WORD
;
4373 ot
= dflag
+ OT_WORD
;
4375 gen_op_mov_reg_T0(ot
, R_EBP
);
4378 case 0x06: /* push es */
4379 case 0x0e: /* push cs */
4380 case 0x16: /* push ss */
4381 case 0x1e: /* push ds */
4384 gen_op_movl_T0_seg(b
>> 3);
4387 case 0x1a0: /* push fs */
4388 case 0x1a8: /* push gs */
4389 gen_op_movl_T0_seg((b
>> 3) & 7);
4392 case 0x07: /* pop es */
4393 case 0x17: /* pop ss */
4394 case 0x1f: /* pop ds */
4399 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4402 /* if reg == SS, inhibit interrupts/trace. */
4403 /* If several instructions disable interrupts, only the
4405 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4406 tcg_gen_helper_0_0(helper_set_inhibit_irq
);
4410 gen_jmp_im(s
->pc
- s
->cs_base
);
4414 case 0x1a1: /* pop fs */
4415 case 0x1a9: /* pop gs */
4417 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
4420 gen_jmp_im(s
->pc
- s
->cs_base
);
4425 /**************************/
4428 case 0x89: /* mov Gv, Ev */
4432 ot
= dflag
+ OT_WORD
;
4433 modrm
= ldub_code(s
->pc
++);
4434 reg
= ((modrm
>> 3) & 7) | rex_r
;
4436 /* generate a generic store */
4437 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
4440 case 0xc7: /* mov Ev, Iv */
4444 ot
= dflag
+ OT_WORD
;
4445 modrm
= ldub_code(s
->pc
++);
4446 mod
= (modrm
>> 6) & 3;
4448 s
->rip_offset
= insn_const_size(ot
);
4449 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4451 val
= insn_get(s
, ot
);
4452 gen_op_movl_T0_im(val
);
4454 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4456 gen_op_mov_reg_T0(ot
, (modrm
& 7) | REX_B(s
));
4459 case 0x8b: /* mov Ev, Gv */
4463 ot
= OT_WORD
+ dflag
;
4464 modrm
= ldub_code(s
->pc
++);
4465 reg
= ((modrm
>> 3) & 7) | rex_r
;
4467 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4468 gen_op_mov_reg_T0(ot
, reg
);
4470 case 0x8e: /* mov seg, Gv */
4471 modrm
= ldub_code(s
->pc
++);
4472 reg
= (modrm
>> 3) & 7;
4473 if (reg
>= 6 || reg
== R_CS
)
4475 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
4476 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4478 /* if reg == SS, inhibit interrupts/trace */
4479 /* If several instructions disable interrupts, only the
4481 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4482 tcg_gen_helper_0_0(helper_set_inhibit_irq
);
4486 gen_jmp_im(s
->pc
- s
->cs_base
);
4490 case 0x8c: /* mov Gv, seg */
4491 modrm
= ldub_code(s
->pc
++);
4492 reg
= (modrm
>> 3) & 7;
4493 mod
= (modrm
>> 6) & 3;
4496 gen_op_movl_T0_seg(reg
);
4498 ot
= OT_WORD
+ dflag
;
4501 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4504 case 0x1b6: /* movzbS Gv, Eb */
4505 case 0x1b7: /* movzwS Gv, Eb */
4506 case 0x1be: /* movsbS Gv, Eb */
4507 case 0x1bf: /* movswS Gv, Eb */
4510 /* d_ot is the size of destination */
4511 d_ot
= dflag
+ OT_WORD
;
4512 /* ot is the size of source */
4513 ot
= (b
& 1) + OT_BYTE
;
4514 modrm
= ldub_code(s
->pc
++);
4515 reg
= ((modrm
>> 3) & 7) | rex_r
;
4516 mod
= (modrm
>> 6) & 3;
4517 rm
= (modrm
& 7) | REX_B(s
);
4520 gen_op_mov_TN_reg(ot
, 0, rm
);
4521 switch(ot
| (b
& 8)) {
4523 gen_op_movzbl_T0_T0();
4526 gen_op_movsbl_T0_T0();
4529 gen_op_movzwl_T0_T0();
4533 gen_op_movswl_T0_T0();
4536 gen_op_mov_reg_T0(d_ot
, reg
);
4538 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4540 gen_op_lds_T0_A0(ot
+ s
->mem_index
);
4542 gen_op_ldu_T0_A0(ot
+ s
->mem_index
);
4544 gen_op_mov_reg_T0(d_ot
, reg
);
4549 case 0x8d: /* lea */
4550 ot
= dflag
+ OT_WORD
;
4551 modrm
= ldub_code(s
->pc
++);
4552 mod
= (modrm
>> 6) & 3;
4555 reg
= ((modrm
>> 3) & 7) | rex_r
;
4556 /* we must ensure that no segment is added */
4560 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4562 gen_op_mov_reg_A0(ot
- OT_WORD
, reg
);
4565 case 0xa0: /* mov EAX, Ov */
4567 case 0xa2: /* mov Ov, EAX */
4570 target_ulong offset_addr
;
4575 ot
= dflag
+ OT_WORD
;
4576 #ifdef TARGET_X86_64
4577 if (s
->aflag
== 2) {
4578 offset_addr
= ldq_code(s
->pc
);
4580 gen_op_movq_A0_im(offset_addr
);
4585 offset_addr
= insn_get(s
, OT_LONG
);
4587 offset_addr
= insn_get(s
, OT_WORD
);
4589 gen_op_movl_A0_im(offset_addr
);
4591 gen_add_A0_ds_seg(s
);
4593 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4594 gen_op_mov_reg_T0(ot
, R_EAX
);
4596 gen_op_mov_TN_reg(ot
, 0, R_EAX
);
4597 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4601 case 0xd7: /* xlat */
4602 #ifdef TARGET_X86_64
4603 if (s
->aflag
== 2) {
4604 gen_op_movq_A0_reg(R_EBX
);
4605 gen_op_addq_A0_AL();
4609 gen_op_movl_A0_reg(R_EBX
);
4610 gen_op_addl_A0_AL();
4612 gen_op_andl_A0_ffff();
4614 gen_add_A0_ds_seg(s
);
4615 gen_op_ldu_T0_A0(OT_BYTE
+ s
->mem_index
);
4616 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
4618 case 0xb0 ... 0xb7: /* mov R, Ib */
4619 val
= insn_get(s
, OT_BYTE
);
4620 gen_op_movl_T0_im(val
);
4621 gen_op_mov_reg_T0(OT_BYTE
, (b
& 7) | REX_B(s
));
4623 case 0xb8 ... 0xbf: /* mov R, Iv */
4624 #ifdef TARGET_X86_64
4628 tmp
= ldq_code(s
->pc
);
4630 reg
= (b
& 7) | REX_B(s
);
4631 gen_movtl_T0_im(tmp
);
4632 gen_op_mov_reg_T0(OT_QUAD
, reg
);
4636 ot
= dflag
? OT_LONG
: OT_WORD
;
4637 val
= insn_get(s
, ot
);
4638 reg
= (b
& 7) | REX_B(s
);
4639 gen_op_movl_T0_im(val
);
4640 gen_op_mov_reg_T0(ot
, reg
);
4644 case 0x91 ... 0x97: /* xchg R, EAX */
4645 ot
= dflag
+ OT_WORD
;
4646 reg
= (b
& 7) | REX_B(s
);
4650 case 0x87: /* xchg Ev, Gv */
4654 ot
= dflag
+ OT_WORD
;
4655 modrm
= ldub_code(s
->pc
++);
4656 reg
= ((modrm
>> 3) & 7) | rex_r
;
4657 mod
= (modrm
>> 6) & 3;
4659 rm
= (modrm
& 7) | REX_B(s
);
4661 gen_op_mov_TN_reg(ot
, 0, reg
);
4662 gen_op_mov_TN_reg(ot
, 1, rm
);
4663 gen_op_mov_reg_T0(ot
, rm
);
4664 gen_op_mov_reg_T1(ot
, reg
);
4666 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4667 gen_op_mov_TN_reg(ot
, 0, reg
);
4668 /* for xchg, lock is implicit */
4669 if (!(prefixes
& PREFIX_LOCK
))
4670 tcg_gen_helper_0_0(helper_lock
);
4671 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4672 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4673 if (!(prefixes
& PREFIX_LOCK
))
4674 tcg_gen_helper_0_0(helper_unlock
);
4675 gen_op_mov_reg_T1(ot
, reg
);
4678 case 0xc4: /* les Gv */
4683 case 0xc5: /* lds Gv */
4688 case 0x1b2: /* lss Gv */
4691 case 0x1b4: /* lfs Gv */
4694 case 0x1b5: /* lgs Gv */
4697 ot
= dflag
? OT_LONG
: OT_WORD
;
4698 modrm
= ldub_code(s
->pc
++);
4699 reg
= ((modrm
>> 3) & 7) | rex_r
;
4700 mod
= (modrm
>> 6) & 3;
4703 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4704 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4705 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4706 /* load the segment first to handle exceptions properly */
4707 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4708 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
4709 /* then put the data */
4710 gen_op_mov_reg_T1(ot
, reg
);
4712 gen_jmp_im(s
->pc
- s
->cs_base
);
4717 /************************/
4728 ot
= dflag
+ OT_WORD
;
4730 modrm
= ldub_code(s
->pc
++);
4731 mod
= (modrm
>> 6) & 3;
4732 op
= (modrm
>> 3) & 7;
4738 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4741 opreg
= (modrm
& 7) | REX_B(s
);
4746 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
4749 shift
= ldub_code(s
->pc
++);
4751 gen_shifti(s
, op
, ot
, opreg
, shift
);
4766 case 0x1a4: /* shld imm */
4770 case 0x1a5: /* shld cl */
4774 case 0x1ac: /* shrd imm */
4778 case 0x1ad: /* shrd cl */
4782 ot
= dflag
+ OT_WORD
;
4783 modrm
= ldub_code(s
->pc
++);
4784 mod
= (modrm
>> 6) & 3;
4785 rm
= (modrm
& 7) | REX_B(s
);
4786 reg
= ((modrm
>> 3) & 7) | rex_r
;
4788 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4793 gen_op_mov_TN_reg(ot
, 1, reg
);
4796 val
= ldub_code(s
->pc
++);
4797 tcg_gen_movi_tl(cpu_T3
, val
);
4799 tcg_gen_ld_tl(cpu_T3
, cpu_env
, offsetof(CPUState
, regs
[R_ECX
]));
4801 gen_shiftd_rm_T1_T3(s
, ot
, opreg
, op
);
4804 /************************/
4807 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
4808 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4809 /* XXX: what to do if illegal op ? */
4810 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
4813 modrm
= ldub_code(s
->pc
++);
4814 mod
= (modrm
>> 6) & 3;
4816 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
4819 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4821 case 0x00 ... 0x07: /* fxxxs */
4822 case 0x10 ... 0x17: /* fixxxl */
4823 case 0x20 ... 0x27: /* fxxxl */
4824 case 0x30 ... 0x37: /* fixxx */
4831 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
4832 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4833 tcg_gen_helper_0_1(helper_flds_FT0
, cpu_tmp2_i32
);
4836 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
4837 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4838 tcg_gen_helper_0_1(helper_fildl_FT0
, cpu_tmp2_i32
);
4841 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
4842 (s
->mem_index
>> 2) - 1);
4843 tcg_gen_helper_0_1(helper_fldl_FT0
, cpu_tmp1_i64
);
4847 gen_op_lds_T0_A0(OT_WORD
+ s
->mem_index
);
4848 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4849 tcg_gen_helper_0_1(helper_fildl_FT0
, cpu_tmp2_i32
);
4853 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0
[op1
]);
4855 /* fcomp needs pop */
4856 tcg_gen_helper_0_0(helper_fpop
);
4860 case 0x08: /* flds */
4861 case 0x0a: /* fsts */
4862 case 0x0b: /* fstps */
4863 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4864 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4865 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4870 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
4871 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4872 tcg_gen_helper_0_1(helper_flds_ST0
, cpu_tmp2_i32
);
4875 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
4876 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4877 tcg_gen_helper_0_1(helper_fildl_ST0
, cpu_tmp2_i32
);
4880 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
4881 (s
->mem_index
>> 2) - 1);
4882 tcg_gen_helper_0_1(helper_fldl_ST0
, cpu_tmp1_i64
);
4886 gen_op_lds_T0_A0(OT_WORD
+ s
->mem_index
);
4887 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4888 tcg_gen_helper_0_1(helper_fildl_ST0
, cpu_tmp2_i32
);
4893 /* XXX: the corresponding CPUID bit must be tested ! */
4896 tcg_gen_helper_1_0(helper_fisttl_ST0
, cpu_tmp2_i32
);
4897 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
4898 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
4901 tcg_gen_helper_1_0(helper_fisttll_ST0
, cpu_tmp1_i64
);
4902 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
4903 (s
->mem_index
>> 2) - 1);
4907 tcg_gen_helper_1_0(helper_fistt_ST0
, cpu_tmp2_i32
);
4908 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
4909 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
4912 tcg_gen_helper_0_0(helper_fpop
);
4917 tcg_gen_helper_1_0(helper_fsts_ST0
, cpu_tmp2_i32
);
4918 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
4919 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
4922 tcg_gen_helper_1_0(helper_fistl_ST0
, cpu_tmp2_i32
);
4923 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
4924 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
4927 tcg_gen_helper_1_0(helper_fstl_ST0
, cpu_tmp1_i64
);
4928 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
4929 (s
->mem_index
>> 2) - 1);
4933 tcg_gen_helper_1_0(helper_fist_ST0
, cpu_tmp2_i32
);
4934 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
4935 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
4939 tcg_gen_helper_0_0(helper_fpop
);
4943 case 0x0c: /* fldenv mem */
4944 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4945 gen_op_set_cc_op(s
->cc_op
);
4946 gen_jmp_im(pc_start
- s
->cs_base
);
4947 tcg_gen_helper_0_2(helper_fldenv
,
4948 cpu_A0
, tcg_const_i32(s
->dflag
));
4950 case 0x0d: /* fldcw mem */
4951 gen_op_ld_T0_A0(OT_WORD
+ s
->mem_index
);
4952 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4953 tcg_gen_helper_0_1(helper_fldcw
, cpu_tmp2_i32
);
4955 case 0x0e: /* fnstenv mem */
4956 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4957 gen_op_set_cc_op(s
->cc_op
);
4958 gen_jmp_im(pc_start
- s
->cs_base
);
4959 tcg_gen_helper_0_2(helper_fstenv
,
4960 cpu_A0
, tcg_const_i32(s
->dflag
));
4962 case 0x0f: /* fnstcw mem */
4963 tcg_gen_helper_1_0(helper_fnstcw
, cpu_tmp2_i32
);
4964 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
4965 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
4967 case 0x1d: /* fldt mem */
4968 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4969 gen_op_set_cc_op(s
->cc_op
);
4970 gen_jmp_im(pc_start
- s
->cs_base
);
4971 tcg_gen_helper_0_1(helper_fldt_ST0
, cpu_A0
);
4973 case 0x1f: /* fstpt mem */
4974 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4975 gen_op_set_cc_op(s
->cc_op
);
4976 gen_jmp_im(pc_start
- s
->cs_base
);
4977 tcg_gen_helper_0_1(helper_fstt_ST0
, cpu_A0
);
4978 tcg_gen_helper_0_0(helper_fpop
);
4980 case 0x2c: /* frstor mem */
4981 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4982 gen_op_set_cc_op(s
->cc_op
);
4983 gen_jmp_im(pc_start
- s
->cs_base
);
4984 tcg_gen_helper_0_2(helper_frstor
,
4985 cpu_A0
, tcg_const_i32(s
->dflag
));
4987 case 0x2e: /* fnsave mem */
4988 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4989 gen_op_set_cc_op(s
->cc_op
);
4990 gen_jmp_im(pc_start
- s
->cs_base
);
4991 tcg_gen_helper_0_2(helper_fsave
,
4992 cpu_A0
, tcg_const_i32(s
->dflag
));
4994 case 0x2f: /* fnstsw mem */
4995 tcg_gen_helper_1_0(helper_fnstsw
, cpu_tmp2_i32
);
4996 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
4997 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
4999 case 0x3c: /* fbld */
5000 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5001 gen_op_set_cc_op(s
->cc_op
);
5002 gen_jmp_im(pc_start
- s
->cs_base
);
5003 tcg_gen_helper_0_1(helper_fbld_ST0
, cpu_A0
);
5005 case 0x3e: /* fbstp */
5006 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5007 gen_op_set_cc_op(s
->cc_op
);
5008 gen_jmp_im(pc_start
- s
->cs_base
);
5009 tcg_gen_helper_0_1(helper_fbst_ST0
, cpu_A0
);
5010 tcg_gen_helper_0_0(helper_fpop
);
5012 case 0x3d: /* fildll */
5013 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
5014 (s
->mem_index
>> 2) - 1);
5015 tcg_gen_helper_0_1(helper_fildll_ST0
, cpu_tmp1_i64
);
5017 case 0x3f: /* fistpll */
5018 tcg_gen_helper_1_0(helper_fistll_ST0
, cpu_tmp1_i64
);
5019 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
5020 (s
->mem_index
>> 2) - 1);
5021 tcg_gen_helper_0_0(helper_fpop
);
5027 /* register float ops */
5031 case 0x08: /* fld sti */
5032 tcg_gen_helper_0_0(helper_fpush
);
5033 tcg_gen_helper_0_1(helper_fmov_ST0_STN
, tcg_const_i32((opreg
+ 1) & 7));
5035 case 0x09: /* fxchg sti */
5036 case 0x29: /* fxchg4 sti, undocumented op */
5037 case 0x39: /* fxchg7 sti, undocumented op */
5038 tcg_gen_helper_0_1(helper_fxchg_ST0_STN
, tcg_const_i32(opreg
));
5040 case 0x0a: /* grp d9/2 */
5043 /* check exceptions (FreeBSD FPU probe) */
5044 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5045 gen_op_set_cc_op(s
->cc_op
);
5046 gen_jmp_im(pc_start
- s
->cs_base
);
5047 tcg_gen_helper_0_0(helper_fwait
);
5053 case 0x0c: /* grp d9/4 */
5056 tcg_gen_helper_0_0(helper_fchs_ST0
);
5059 tcg_gen_helper_0_0(helper_fabs_ST0
);
5062 tcg_gen_helper_0_0(helper_fldz_FT0
);
5063 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5066 tcg_gen_helper_0_0(helper_fxam_ST0
);
5072 case 0x0d: /* grp d9/5 */
5076 tcg_gen_helper_0_0(helper_fpush
);
5077 tcg_gen_helper_0_0(helper_fld1_ST0
);
5080 tcg_gen_helper_0_0(helper_fpush
);
5081 tcg_gen_helper_0_0(helper_fldl2t_ST0
);
5084 tcg_gen_helper_0_0(helper_fpush
);
5085 tcg_gen_helper_0_0(helper_fldl2e_ST0
);
5088 tcg_gen_helper_0_0(helper_fpush
);
5089 tcg_gen_helper_0_0(helper_fldpi_ST0
);
5092 tcg_gen_helper_0_0(helper_fpush
);
5093 tcg_gen_helper_0_0(helper_fldlg2_ST0
);
5096 tcg_gen_helper_0_0(helper_fpush
);
5097 tcg_gen_helper_0_0(helper_fldln2_ST0
);
5100 tcg_gen_helper_0_0(helper_fpush
);
5101 tcg_gen_helper_0_0(helper_fldz_ST0
);
5108 case 0x0e: /* grp d9/6 */
5111 tcg_gen_helper_0_0(helper_f2xm1
);
5114 tcg_gen_helper_0_0(helper_fyl2x
);
5117 tcg_gen_helper_0_0(helper_fptan
);
5119 case 3: /* fpatan */
5120 tcg_gen_helper_0_0(helper_fpatan
);
5122 case 4: /* fxtract */
5123 tcg_gen_helper_0_0(helper_fxtract
);
5125 case 5: /* fprem1 */
5126 tcg_gen_helper_0_0(helper_fprem1
);
5128 case 6: /* fdecstp */
5129 tcg_gen_helper_0_0(helper_fdecstp
);
5132 case 7: /* fincstp */
5133 tcg_gen_helper_0_0(helper_fincstp
);
5137 case 0x0f: /* grp d9/7 */
5140 tcg_gen_helper_0_0(helper_fprem
);
5142 case 1: /* fyl2xp1 */
5143 tcg_gen_helper_0_0(helper_fyl2xp1
);
5146 tcg_gen_helper_0_0(helper_fsqrt
);
5148 case 3: /* fsincos */
5149 tcg_gen_helper_0_0(helper_fsincos
);
5151 case 5: /* fscale */
5152 tcg_gen_helper_0_0(helper_fscale
);
5154 case 4: /* frndint */
5155 tcg_gen_helper_0_0(helper_frndint
);
5158 tcg_gen_helper_0_0(helper_fsin
);
5162 tcg_gen_helper_0_0(helper_fcos
);
5166 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5167 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5168 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5174 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0
[op1
], tcg_const_i32(opreg
));
5176 tcg_gen_helper_0_0(helper_fpop
);
5178 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5179 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0
[op1
]);
5183 case 0x02: /* fcom */
5184 case 0x22: /* fcom2, undocumented op */
5185 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5186 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5188 case 0x03: /* fcomp */
5189 case 0x23: /* fcomp3, undocumented op */
5190 case 0x32: /* fcomp5, undocumented op */
5191 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5192 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5193 tcg_gen_helper_0_0(helper_fpop
);
5195 case 0x15: /* da/5 */
5197 case 1: /* fucompp */
5198 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(1));
5199 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
5200 tcg_gen_helper_0_0(helper_fpop
);
5201 tcg_gen_helper_0_0(helper_fpop
);
5209 case 0: /* feni (287 only, just do nop here) */
5211 case 1: /* fdisi (287 only, just do nop here) */
5214 tcg_gen_helper_0_0(helper_fclex
);
5216 case 3: /* fninit */
5217 tcg_gen_helper_0_0(helper_fninit
);
5219 case 4: /* fsetpm (287 only, just do nop here) */
5225 case 0x1d: /* fucomi */
5226 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5227 gen_op_set_cc_op(s
->cc_op
);
5228 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5229 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0
);
5230 gen_op_fcomi_dummy();
5231 s
->cc_op
= CC_OP_EFLAGS
;
5233 case 0x1e: /* fcomi */
5234 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5235 gen_op_set_cc_op(s
->cc_op
);
5236 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5237 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0
);
5238 gen_op_fcomi_dummy();
5239 s
->cc_op
= CC_OP_EFLAGS
;
5241 case 0x28: /* ffree sti */
5242 tcg_gen_helper_0_1(helper_ffree_STN
, tcg_const_i32(opreg
));
5244 case 0x2a: /* fst sti */
5245 tcg_gen_helper_0_1(helper_fmov_STN_ST0
, tcg_const_i32(opreg
));
5247 case 0x2b: /* fstp sti */
5248 case 0x0b: /* fstp1 sti, undocumented op */
5249 case 0x3a: /* fstp8 sti, undocumented op */
5250 case 0x3b: /* fstp9 sti, undocumented op */
5251 tcg_gen_helper_0_1(helper_fmov_STN_ST0
, tcg_const_i32(opreg
));
5252 tcg_gen_helper_0_0(helper_fpop
);
5254 case 0x2c: /* fucom st(i) */
5255 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5256 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
5258 case 0x2d: /* fucomp st(i) */
5259 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5260 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
5261 tcg_gen_helper_0_0(helper_fpop
);
5263 case 0x33: /* de/3 */
5265 case 1: /* fcompp */
5266 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(1));
5267 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5268 tcg_gen_helper_0_0(helper_fpop
);
5269 tcg_gen_helper_0_0(helper_fpop
);
5275 case 0x38: /* ffreep sti, undocumented op */
5276 tcg_gen_helper_0_1(helper_ffree_STN
, tcg_const_i32(opreg
));
5277 tcg_gen_helper_0_0(helper_fpop
);
5279 case 0x3c: /* df/4 */
5282 tcg_gen_helper_1_0(helper_fnstsw
, cpu_tmp2_i32
);
5283 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5284 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
5290 case 0x3d: /* fucomip */
5291 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5292 gen_op_set_cc_op(s
->cc_op
);
5293 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5294 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0
);
5295 tcg_gen_helper_0_0(helper_fpop
);
5296 gen_op_fcomi_dummy();
5297 s
->cc_op
= CC_OP_EFLAGS
;
5299 case 0x3e: /* fcomip */
5300 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5301 gen_op_set_cc_op(s
->cc_op
);
5302 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5303 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0
);
5304 tcg_gen_helper_0_0(helper_fpop
);
5305 gen_op_fcomi_dummy();
5306 s
->cc_op
= CC_OP_EFLAGS
;
5308 case 0x10 ... 0x13: /* fcmovxx */
5312 const static uint8_t fcmov_cc
[8] = {
5318 op1
= fcmov_cc
[op
& 3] | ((op
>> 3) & 1);
5320 l1
= gen_new_label();
5321 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T
[0], tcg_const_tl(0), l1
);
5322 tcg_gen_helper_0_1(helper_fmov_ST0_STN
, tcg_const_i32(opreg
));
5331 /************************/
5334 case 0xa4: /* movsS */
5339 ot
= dflag
+ OT_WORD
;
5341 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5342 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5348 case 0xaa: /* stosS */
5353 ot
= dflag
+ OT_WORD
;
5355 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5356 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5361 case 0xac: /* lodsS */
5366 ot
= dflag
+ OT_WORD
;
5367 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5368 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5373 case 0xae: /* scasS */
5378 ot
= dflag
+ OT_WORD
;
5379 if (prefixes
& PREFIX_REPNZ
) {
5380 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
5381 } else if (prefixes
& PREFIX_REPZ
) {
5382 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
5385 s
->cc_op
= CC_OP_SUBB
+ ot
;
5389 case 0xa6: /* cmpsS */
5394 ot
= dflag
+ OT_WORD
;
5395 if (prefixes
& PREFIX_REPNZ
) {
5396 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
5397 } else if (prefixes
& PREFIX_REPZ
) {
5398 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
5401 s
->cc_op
= CC_OP_SUBB
+ ot
;
5404 case 0x6c: /* insS */
5409 ot
= dflag
? OT_LONG
: OT_WORD
;
5410 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5411 gen_op_andl_T0_ffff();
5412 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5413 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) | 4);
5414 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5415 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5420 case 0x6e: /* outsS */
5425 ot
= dflag
? OT_LONG
: OT_WORD
;
5426 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5427 gen_op_andl_T0_ffff();
5428 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5429 svm_is_rep(prefixes
) | 4);
5430 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5431 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5437 /************************/
5445 ot
= dflag
? OT_LONG
: OT_WORD
;
5446 val
= ldub_code(s
->pc
++);
5447 gen_op_movl_T0_im(val
);
5448 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5449 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
));
5450 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5451 tcg_gen_helper_1_1(helper_in_func
[ot
], cpu_T
[1], cpu_tmp2_i32
);
5452 gen_op_mov_reg_T1(ot
, R_EAX
);
5459 ot
= dflag
? OT_LONG
: OT_WORD
;
5460 val
= ldub_code(s
->pc
++);
5461 gen_op_movl_T0_im(val
);
5462 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5463 svm_is_rep(prefixes
));
5464 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5466 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5467 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
5468 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[1]);
5469 tcg_gen_helper_0_2(helper_out_func
[ot
], cpu_tmp2_i32
, cpu_tmp3_i32
);
5476 ot
= dflag
? OT_LONG
: OT_WORD
;
5477 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5478 gen_op_andl_T0_ffff();
5479 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5480 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
));
5481 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5482 tcg_gen_helper_1_1(helper_in_func
[ot
], cpu_T
[1], cpu_tmp2_i32
);
5483 gen_op_mov_reg_T1(ot
, R_EAX
);
5490 ot
= dflag
? OT_LONG
: OT_WORD
;
5491 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5492 gen_op_andl_T0_ffff();
5493 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5494 svm_is_rep(prefixes
));
5495 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5497 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5498 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
5499 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[1]);
5500 tcg_gen_helper_0_2(helper_out_func
[ot
], cpu_tmp2_i32
, cpu_tmp3_i32
);
5503 /************************/
5505 case 0xc2: /* ret im */
5506 val
= ldsw_code(s
->pc
);
5509 if (CODE64(s
) && s
->dflag
)
5511 gen_stack_update(s
, val
+ (2 << s
->dflag
));
5513 gen_op_andl_T0_ffff();
5517 case 0xc3: /* ret */
5521 gen_op_andl_T0_ffff();
5525 case 0xca: /* lret im */
5526 val
= ldsw_code(s
->pc
);
5529 if (s
->pe
&& !s
->vm86
) {
5530 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5531 gen_op_set_cc_op(s
->cc_op
);
5532 gen_jmp_im(pc_start
- s
->cs_base
);
5533 tcg_gen_helper_0_2(helper_lret_protected
,
5534 tcg_const_i32(s
->dflag
),
5535 tcg_const_i32(val
));
5539 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5541 gen_op_andl_T0_ffff();
5542 /* NOTE: keeping EIP updated is not a problem in case of
5546 gen_op_addl_A0_im(2 << s
->dflag
);
5547 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5548 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
5549 /* add stack offset */
5550 gen_stack_update(s
, val
+ (4 << s
->dflag
));
5554 case 0xcb: /* lret */
5557 case 0xcf: /* iret */
5558 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IRET
))
5562 tcg_gen_helper_0_1(helper_iret_real
, tcg_const_i32(s
->dflag
));
5563 s
->cc_op
= CC_OP_EFLAGS
;
5564 } else if (s
->vm86
) {
5566 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5568 tcg_gen_helper_0_1(helper_iret_real
, tcg_const_i32(s
->dflag
));
5569 s
->cc_op
= CC_OP_EFLAGS
;
5572 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5573 gen_op_set_cc_op(s
->cc_op
);
5574 gen_jmp_im(pc_start
- s
->cs_base
);
5575 tcg_gen_helper_0_2(helper_iret_protected
,
5576 tcg_const_i32(s
->dflag
),
5577 tcg_const_i32(s
->pc
- s
->cs_base
));
5578 s
->cc_op
= CC_OP_EFLAGS
;
5582 case 0xe8: /* call im */
5585 tval
= (int32_t)insn_get(s
, OT_LONG
);
5587 tval
= (int16_t)insn_get(s
, OT_WORD
);
5588 next_eip
= s
->pc
- s
->cs_base
;
5592 gen_movtl_T0_im(next_eip
);
5597 case 0x9a: /* lcall im */
5599 unsigned int selector
, offset
;
5603 ot
= dflag
? OT_LONG
: OT_WORD
;
5604 offset
= insn_get(s
, ot
);
5605 selector
= insn_get(s
, OT_WORD
);
5607 gen_op_movl_T0_im(selector
);
5608 gen_op_movl_T1_imu(offset
);
5611 case 0xe9: /* jmp im */
5613 tval
= (int32_t)insn_get(s
, OT_LONG
);
5615 tval
= (int16_t)insn_get(s
, OT_WORD
);
5616 tval
+= s
->pc
- s
->cs_base
;
5621 case 0xea: /* ljmp im */
5623 unsigned int selector
, offset
;
5627 ot
= dflag
? OT_LONG
: OT_WORD
;
5628 offset
= insn_get(s
, ot
);
5629 selector
= insn_get(s
, OT_WORD
);
5631 gen_op_movl_T0_im(selector
);
5632 gen_op_movl_T1_imu(offset
);
5635 case 0xeb: /* jmp Jb */
5636 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5637 tval
+= s
->pc
- s
->cs_base
;
5642 case 0x70 ... 0x7f: /* jcc Jb */
5643 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5645 case 0x180 ... 0x18f: /* jcc Jv */
5647 tval
= (int32_t)insn_get(s
, OT_LONG
);
5649 tval
= (int16_t)insn_get(s
, OT_WORD
);
5652 next_eip
= s
->pc
- s
->cs_base
;
5656 gen_jcc(s
, b
, tval
, next_eip
);
5659 case 0x190 ... 0x19f: /* setcc Gv */
5660 modrm
= ldub_code(s
->pc
++);
5662 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
5664 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5665 ot
= dflag
+ OT_WORD
;
5666 modrm
= ldub_code(s
->pc
++);
5667 reg
= ((modrm
>> 3) & 7) | rex_r
;
5668 mod
= (modrm
>> 6) & 3;
5671 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5672 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
5674 rm
= (modrm
& 7) | REX_B(s
);
5675 gen_op_mov_TN_reg(ot
, 1, rm
);
5677 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
5680 /************************/
5682 case 0x9c: /* pushf */
5683 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PUSHF
))
5685 if (s
->vm86
&& s
->iopl
!= 3) {
5686 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5688 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5689 gen_op_set_cc_op(s
->cc_op
);
5690 gen_op_movl_T0_eflags();
5694 case 0x9d: /* popf */
5695 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_POPF
))
5697 if (s
->vm86
&& s
->iopl
!= 3) {
5698 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5703 gen_op_movl_eflags_T0_cpl0();
5705 gen_op_movw_eflags_T0_cpl0();
5708 if (s
->cpl
<= s
->iopl
) {
5710 gen_op_movl_eflags_T0_io();
5712 gen_op_movw_eflags_T0_io();
5716 gen_op_movl_eflags_T0();
5718 gen_op_movw_eflags_T0();
5723 s
->cc_op
= CC_OP_EFLAGS
;
5724 /* abort translation because TF flag may change */
5725 gen_jmp_im(s
->pc
- s
->cs_base
);
5729 case 0x9e: /* sahf */
5732 gen_op_mov_TN_reg(OT_BYTE
, 0, R_AH
);
5733 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5734 gen_op_set_cc_op(s
->cc_op
);
5735 gen_op_movb_eflags_T0();
5736 s
->cc_op
= CC_OP_EFLAGS
;
5738 case 0x9f: /* lahf */
5741 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5742 gen_op_set_cc_op(s
->cc_op
);
5743 gen_op_movl_T0_eflags();
5744 gen_op_mov_reg_T0(OT_BYTE
, R_AH
);
5746 case 0xf5: /* cmc */
5747 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5748 gen_op_set_cc_op(s
->cc_op
);
5750 s
->cc_op
= CC_OP_EFLAGS
;
5752 case 0xf8: /* clc */
5753 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5754 gen_op_set_cc_op(s
->cc_op
);
5756 s
->cc_op
= CC_OP_EFLAGS
;
5758 case 0xf9: /* stc */
5759 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5760 gen_op_set_cc_op(s
->cc_op
);
5762 s
->cc_op
= CC_OP_EFLAGS
;
5764 case 0xfc: /* cld */
5765 tcg_gen_movi_i32(cpu_tmp2_i32
, 1);
5766 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, offsetof(CPUState
, df
));
5768 case 0xfd: /* std */
5769 tcg_gen_movi_i32(cpu_tmp2_i32
, -1);
5770 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, offsetof(CPUState
, df
));
5773 /************************/
5774 /* bit operations */
5775 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5776 ot
= dflag
+ OT_WORD
;
5777 modrm
= ldub_code(s
->pc
++);
5778 op
= (modrm
>> 3) & 7;
5779 mod
= (modrm
>> 6) & 3;
5780 rm
= (modrm
& 7) | REX_B(s
);
5783 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5784 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5786 gen_op_mov_TN_reg(ot
, 0, rm
);
5789 val
= ldub_code(s
->pc
++);
5790 gen_op_movl_T1_im(val
);
5794 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5795 s
->cc_op
= CC_OP_SARB
+ ot
;
5798 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5800 gen_op_mov_reg_T0(ot
, rm
);
5801 gen_op_update_bt_cc();
5804 case 0x1a3: /* bt Gv, Ev */
5807 case 0x1ab: /* bts */
5810 case 0x1b3: /* btr */
5813 case 0x1bb: /* btc */
5816 ot
= dflag
+ OT_WORD
;
5817 modrm
= ldub_code(s
->pc
++);
5818 reg
= ((modrm
>> 3) & 7) | rex_r
;
5819 mod
= (modrm
>> 6) & 3;
5820 rm
= (modrm
& 7) | REX_B(s
);
5821 gen_op_mov_TN_reg(OT_LONG
, 1, reg
);
5823 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5824 /* specific case: we need to add a displacement */
5825 gen_op_add_bit_A0_T1
[ot
- OT_WORD
]();
5826 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5828 gen_op_mov_TN_reg(ot
, 0, rm
);
5830 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5831 s
->cc_op
= CC_OP_SARB
+ ot
;
5834 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5836 gen_op_mov_reg_T0(ot
, rm
);
5837 gen_op_update_bt_cc();
5840 case 0x1bc: /* bsf */
5841 case 0x1bd: /* bsr */
5842 ot
= dflag
+ OT_WORD
;
5843 modrm
= ldub_code(s
->pc
++);
5844 reg
= ((modrm
>> 3) & 7) | rex_r
;
5845 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5846 /* NOTE: in order to handle the 0 case, we must load the
5847 result. It could be optimized with a generated jump */
5848 gen_op_mov_TN_reg(ot
, 1, reg
);
5849 gen_op_bsx_T0_cc
[ot
- OT_WORD
][b
& 1]();
5850 gen_op_mov_reg_T1(ot
, reg
);
5851 s
->cc_op
= CC_OP_LOGICB
+ ot
;
5853 /************************/
5855 case 0x27: /* daa */
5858 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5859 gen_op_set_cc_op(s
->cc_op
);
5861 s
->cc_op
= CC_OP_EFLAGS
;
5863 case 0x2f: /* das */
5866 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5867 gen_op_set_cc_op(s
->cc_op
);
5869 s
->cc_op
= CC_OP_EFLAGS
;
5871 case 0x37: /* aaa */
5874 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5875 gen_op_set_cc_op(s
->cc_op
);
5877 s
->cc_op
= CC_OP_EFLAGS
;
5879 case 0x3f: /* aas */
5882 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5883 gen_op_set_cc_op(s
->cc_op
);
5885 s
->cc_op
= CC_OP_EFLAGS
;
5887 case 0xd4: /* aam */
5890 val
= ldub_code(s
->pc
++);
5892 gen_exception(s
, EXCP00_DIVZ
, pc_start
- s
->cs_base
);
5895 s
->cc_op
= CC_OP_LOGICB
;
5898 case 0xd5: /* aad */
5901 val
= ldub_code(s
->pc
++);
5903 s
->cc_op
= CC_OP_LOGICB
;
5905 /************************/
5907 case 0x90: /* nop */
5908 /* XXX: xchg + rex handling */
5909 /* XXX: correct lock test for all insn */
5910 if (prefixes
& PREFIX_LOCK
)
5912 if (prefixes
& PREFIX_REPZ
) {
5913 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PAUSE
);
5916 case 0x9b: /* fwait */
5917 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
5918 (HF_MP_MASK
| HF_TS_MASK
)) {
5919 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5921 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5922 gen_op_set_cc_op(s
->cc_op
);
5923 gen_jmp_im(pc_start
- s
->cs_base
);
5924 tcg_gen_helper_0_0(helper_fwait
);
5927 case 0xcc: /* int3 */
5928 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5930 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5932 case 0xcd: /* int N */
5933 val
= ldub_code(s
->pc
++);
5934 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5936 if (s
->vm86
&& s
->iopl
!= 3) {
5937 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5939 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5942 case 0xce: /* into */
5945 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5947 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5948 gen_op_set_cc_op(s
->cc_op
);
5949 gen_jmp_im(pc_start
- s
->cs_base
);
5950 gen_op_into(s
->pc
- pc_start
);
5952 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5953 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_ICEBP
))
5956 gen_debug(s
, pc_start
- s
->cs_base
);
5959 tb_flush(cpu_single_env
);
5960 cpu_set_log(CPU_LOG_INT
| CPU_LOG_TB_IN_ASM
);
5963 case 0xfa: /* cli */
5965 if (s
->cpl
<= s
->iopl
) {
5966 tcg_gen_helper_0_0(helper_cli
);
5968 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5972 tcg_gen_helper_0_0(helper_cli
);
5974 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5978 case 0xfb: /* sti */
5980 if (s
->cpl
<= s
->iopl
) {
5982 tcg_gen_helper_0_0(helper_sti
);
5983 /* interruptions are enabled only the first insn after sti */
5984 /* If several instructions disable interrupts, only the
5986 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5987 tcg_gen_helper_0_0(helper_set_inhibit_irq
);
5988 /* give a chance to handle pending irqs */
5989 gen_jmp_im(s
->pc
- s
->cs_base
);
5992 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5998 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6002 case 0x62: /* bound */
6005 ot
= dflag
? OT_LONG
: OT_WORD
;
6006 modrm
= ldub_code(s
->pc
++);
6007 reg
= (modrm
>> 3) & 7;
6008 mod
= (modrm
>> 6) & 3;
6011 gen_op_mov_TN_reg(ot
, 0, reg
);
6012 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6013 gen_jmp_im(pc_start
- s
->cs_base
);
6014 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6016 tcg_gen_helper_0_2(helper_boundw
, cpu_A0
, cpu_tmp2_i32
);
6018 tcg_gen_helper_0_2(helper_boundl
, cpu_A0
, cpu_tmp2_i32
);
6020 case 0x1c8 ... 0x1cf: /* bswap reg */
6021 reg
= (b
& 7) | REX_B(s
);
6022 #ifdef TARGET_X86_64
6024 gen_op_mov_TN_reg(OT_QUAD
, 0, reg
);
6025 tcg_gen_bswap_i64(cpu_T
[0], cpu_T
[0]);
6026 gen_op_mov_reg_T0(OT_QUAD
, reg
);
6030 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
6032 tmp0
= tcg_temp_new(TCG_TYPE_I32
);
6033 tcg_gen_trunc_i64_i32(tmp0
, cpu_T
[0]);
6034 tcg_gen_bswap_i32(tmp0
, tmp0
);
6035 tcg_gen_extu_i32_i64(cpu_T
[0], tmp0
);
6036 gen_op_mov_reg_T0(OT_LONG
, reg
);
6040 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
6041 tcg_gen_bswap_i32(cpu_T
[0], cpu_T
[0]);
6042 gen_op_mov_reg_T0(OT_LONG
, reg
);
6046 case 0xd6: /* salc */
6049 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6050 gen_op_set_cc_op(s
->cc_op
);
6053 case 0xe0: /* loopnz */
6054 case 0xe1: /* loopz */
6055 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6056 gen_op_set_cc_op(s
->cc_op
);
6058 case 0xe2: /* loop */
6059 case 0xe3: /* jecxz */
6063 tval
= (int8_t)insn_get(s
, OT_BYTE
);
6064 next_eip
= s
->pc
- s
->cs_base
;
6069 l1
= gen_new_label();
6070 l2
= gen_new_label();
6073 gen_op_jz_ecx
[s
->aflag
](l1
);
6075 gen_op_dec_ECX
[s
->aflag
]();
6078 gen_op_loop
[s
->aflag
][b
](l1
);
6081 gen_jmp_im(next_eip
);
6082 gen_op_jmp_label(l2
);
6089 case 0x130: /* wrmsr */
6090 case 0x132: /* rdmsr */
6092 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6096 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 0);
6097 tcg_gen_helper_0_0(helper_rdmsr
);
6099 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 1);
6100 tcg_gen_helper_0_0(helper_wrmsr
);
6106 case 0x131: /* rdtsc */
6107 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RDTSC
))
6109 gen_jmp_im(pc_start
- s
->cs_base
);
6110 tcg_gen_helper_0_0(helper_rdtsc
);
6112 case 0x133: /* rdpmc */
6113 gen_jmp_im(pc_start
- s
->cs_base
);
6114 tcg_gen_helper_0_0(helper_rdpmc
);
6116 case 0x134: /* sysenter */
6120 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6122 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6123 gen_op_set_cc_op(s
->cc_op
);
6124 s
->cc_op
= CC_OP_DYNAMIC
;
6126 gen_jmp_im(pc_start
- s
->cs_base
);
6127 tcg_gen_helper_0_0(helper_sysenter
);
6131 case 0x135: /* sysexit */
6135 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6137 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6138 gen_op_set_cc_op(s
->cc_op
);
6139 s
->cc_op
= CC_OP_DYNAMIC
;
6141 gen_jmp_im(pc_start
- s
->cs_base
);
6142 tcg_gen_helper_0_0(helper_sysexit
);
6146 #ifdef TARGET_X86_64
6147 case 0x105: /* syscall */
6148 /* XXX: is it usable in real mode ? */
6149 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6150 gen_op_set_cc_op(s
->cc_op
);
6151 s
->cc_op
= CC_OP_DYNAMIC
;
6153 gen_jmp_im(pc_start
- s
->cs_base
);
6154 tcg_gen_helper_0_1(helper_syscall
, tcg_const_i32(s
->pc
- pc_start
));
6157 case 0x107: /* sysret */
6159 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6161 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6162 gen_op_set_cc_op(s
->cc_op
);
6163 s
->cc_op
= CC_OP_DYNAMIC
;
6165 gen_jmp_im(pc_start
- s
->cs_base
);
6166 tcg_gen_helper_0_1(helper_sysret
, tcg_const_i32(s
->dflag
));
6167 /* condition codes are modified only in long mode */
6169 s
->cc_op
= CC_OP_EFLAGS
;
6174 case 0x1a2: /* cpuid */
6175 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CPUID
))
6177 tcg_gen_helper_0_0(helper_cpuid
);
6179 case 0xf4: /* hlt */
6181 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6183 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_HLT
))
6185 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6186 gen_op_set_cc_op(s
->cc_op
);
6187 gen_jmp_im(s
->pc
- s
->cs_base
);
6188 tcg_gen_helper_0_0(helper_hlt
);
6193 modrm
= ldub_code(s
->pc
++);
6194 mod
= (modrm
>> 6) & 3;
6195 op
= (modrm
>> 3) & 7;
6198 if (!s
->pe
|| s
->vm86
)
6200 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_READ
))
6202 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
6206 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
6209 if (!s
->pe
|| s
->vm86
)
6212 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6214 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_WRITE
))
6216 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6217 gen_jmp_im(pc_start
- s
->cs_base
);
6218 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6219 tcg_gen_helper_0_1(helper_lldt
, cpu_tmp2_i32
);
6223 if (!s
->pe
|| s
->vm86
)
6225 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_READ
))
6227 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
6231 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
6234 if (!s
->pe
|| s
->vm86
)
6237 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6239 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_WRITE
))
6241 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6242 gen_jmp_im(pc_start
- s
->cs_base
);
6243 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6244 tcg_gen_helper_0_1(helper_ltr
, cpu_tmp2_i32
);
6249 if (!s
->pe
|| s
->vm86
)
6251 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6252 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6253 gen_op_set_cc_op(s
->cc_op
);
6258 s
->cc_op
= CC_OP_EFLAGS
;
6265 modrm
= ldub_code(s
->pc
++);
6266 mod
= (modrm
>> 6) & 3;
6267 op
= (modrm
>> 3) & 7;
6273 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_GDTR_READ
))
6275 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6276 gen_op_movl_T0_env(offsetof(CPUX86State
, gdt
.limit
));
6277 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
6278 gen_add_A0_im(s
, 2);
6279 gen_op_movtl_T0_env(offsetof(CPUX86State
, gdt
.base
));
6281 gen_op_andl_T0_im(0xffffff);
6282 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6287 case 0: /* monitor */
6288 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
6291 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MONITOR
))
6293 gen_jmp_im(pc_start
- s
->cs_base
);
6294 #ifdef TARGET_X86_64
6295 if (s
->aflag
== 2) {
6296 gen_op_movq_A0_reg(R_EBX
);
6297 gen_op_addq_A0_AL();
6301 gen_op_movl_A0_reg(R_EBX
);
6302 gen_op_addl_A0_AL();
6304 gen_op_andl_A0_ffff();
6306 gen_add_A0_ds_seg(s
);
6307 tcg_gen_helper_0_1(helper_monitor
, cpu_A0
);
6310 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
6313 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6314 gen_op_set_cc_op(s
->cc_op
);
6315 s
->cc_op
= CC_OP_DYNAMIC
;
6317 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MWAIT
))
6319 gen_jmp_im(s
->pc
- s
->cs_base
);
6320 tcg_gen_helper_0_0(helper_mwait
);
6327 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IDTR_READ
))
6329 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6330 gen_op_movl_T0_env(offsetof(CPUX86State
, idt
.limit
));
6331 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
6332 gen_add_A0_im(s
, 2);
6333 gen_op_movtl_T0_env(offsetof(CPUX86State
, idt
.base
));
6335 gen_op_andl_T0_im(0xffffff);
6336 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6344 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMRUN
))
6346 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6347 gen_op_set_cc_op(s
->cc_op
);
6348 gen_jmp_im(s
->pc
- s
->cs_base
);
6349 tcg_gen_helper_0_0(helper_vmrun
);
6350 s
->cc_op
= CC_OP_EFLAGS
;
6353 case 1: /* VMMCALL */
6354 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMMCALL
))
6356 /* FIXME: cause #UD if hflags & SVM */
6357 tcg_gen_helper_0_0(helper_vmmcall
);
6359 case 2: /* VMLOAD */
6360 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMLOAD
))
6362 tcg_gen_helper_0_0(helper_vmload
);
6364 case 3: /* VMSAVE */
6365 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMSAVE
))
6367 tcg_gen_helper_0_0(helper_vmsave
);
6370 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_STGI
))
6372 tcg_gen_helper_0_0(helper_stgi
);
6375 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CLGI
))
6377 tcg_gen_helper_0_0(helper_clgi
);
6379 case 6: /* SKINIT */
6380 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SKINIT
))
6382 tcg_gen_helper_0_0(helper_skinit
);
6384 case 7: /* INVLPGA */
6385 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPGA
))
6387 tcg_gen_helper_0_0(helper_invlpga
);
6392 } else if (s
->cpl
!= 0) {
6393 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6395 if (gen_svm_check_intercept(s
, pc_start
,
6396 op
==2 ? SVM_EXIT_GDTR_WRITE
: SVM_EXIT_IDTR_WRITE
))
6398 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6399 gen_op_ld_T1_A0(OT_WORD
+ s
->mem_index
);
6400 gen_add_A0_im(s
, 2);
6401 gen_op_ld_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6403 gen_op_andl_T0_im(0xffffff);
6405 gen_op_movtl_env_T0(offsetof(CPUX86State
,gdt
.base
));
6406 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
6408 gen_op_movtl_env_T0(offsetof(CPUX86State
,idt
.base
));
6409 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
6414 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
))
6416 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
6417 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
6421 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6423 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
))
6425 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6426 tcg_gen_helper_0_1(helper_lmsw
, cpu_T
[0]);
6427 gen_jmp_im(s
->pc
- s
->cs_base
);
6431 case 7: /* invlpg */
6433 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6436 #ifdef TARGET_X86_64
6437 if (CODE64(s
) && rm
== 0) {
6439 gen_op_movtl_T0_env(offsetof(CPUX86State
,segs
[R_GS
].base
));
6440 gen_op_movtl_T1_env(offsetof(CPUX86State
,kernelgsbase
));
6441 gen_op_movtl_env_T1(offsetof(CPUX86State
,segs
[R_GS
].base
));
6442 gen_op_movtl_env_T0(offsetof(CPUX86State
,kernelgsbase
));
6449 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPG
))
6451 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6452 tcg_gen_helper_0_1(helper_invlpg
, cpu_A0
);
6453 gen_jmp_im(s
->pc
- s
->cs_base
);
6462 case 0x108: /* invd */
6463 case 0x109: /* wbinvd */
6465 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6467 if (gen_svm_check_intercept(s
, pc_start
, (b
& 2) ? SVM_EXIT_INVD
: SVM_EXIT_WBINVD
))
6472 case 0x63: /* arpl or movslS (x86_64) */
6473 #ifdef TARGET_X86_64
6476 /* d_ot is the size of destination */
6477 d_ot
= dflag
+ OT_WORD
;
6479 modrm
= ldub_code(s
->pc
++);
6480 reg
= ((modrm
>> 3) & 7) | rex_r
;
6481 mod
= (modrm
>> 6) & 3;
6482 rm
= (modrm
& 7) | REX_B(s
);
6485 gen_op_mov_TN_reg(OT_LONG
, 0, rm
);
6487 if (d_ot
== OT_QUAD
)
6488 gen_op_movslq_T0_T0();
6489 gen_op_mov_reg_T0(d_ot
, reg
);
6491 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6492 if (d_ot
== OT_QUAD
) {
6493 gen_op_lds_T0_A0(OT_LONG
+ s
->mem_index
);
6495 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6497 gen_op_mov_reg_T0(d_ot
, reg
);
6502 if (!s
->pe
|| s
->vm86
)
6504 ot
= dflag
? OT_LONG
: OT_WORD
;
6505 modrm
= ldub_code(s
->pc
++);
6506 reg
= (modrm
>> 3) & 7;
6507 mod
= (modrm
>> 6) & 3;
6510 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6511 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
6513 gen_op_mov_TN_reg(ot
, 0, rm
);
6515 gen_op_mov_TN_reg(ot
, 1, reg
);
6516 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6517 gen_op_set_cc_op(s
->cc_op
);
6519 s
->cc_op
= CC_OP_EFLAGS
;
6521 gen_op_st_T0_A0(ot
+ s
->mem_index
);
6523 gen_op_mov_reg_T0(ot
, rm
);
6525 gen_op_arpl_update();
6528 case 0x102: /* lar */
6529 case 0x103: /* lsl */
6530 if (!s
->pe
|| s
->vm86
)
6532 ot
= dflag
? OT_LONG
: OT_WORD
;
6533 modrm
= ldub_code(s
->pc
++);
6534 reg
= ((modrm
>> 3) & 7) | rex_r
;
6535 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
6536 gen_op_mov_TN_reg(ot
, 1, reg
);
6537 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6538 gen_op_set_cc_op(s
->cc_op
);
6543 s
->cc_op
= CC_OP_EFLAGS
;
6544 gen_op_mov_reg_T1(ot
, reg
);
6547 modrm
= ldub_code(s
->pc
++);
6548 mod
= (modrm
>> 6) & 3;
6549 op
= (modrm
>> 3) & 7;
6551 case 0: /* prefetchnta */
6552 case 1: /* prefetchnt0 */
6553 case 2: /* prefetchnt0 */
6554 case 3: /* prefetchnt0 */
6557 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6558 /* nothing more to do */
6560 default: /* nop (multi byte) */
6561 gen_nop_modrm(s
, modrm
);
6565 case 0x119 ... 0x11f: /* nop (multi byte) */
6566 modrm
= ldub_code(s
->pc
++);
6567 gen_nop_modrm(s
, modrm
);
6569 case 0x120: /* mov reg, crN */
6570 case 0x122: /* mov crN, reg */
6572 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6574 modrm
= ldub_code(s
->pc
++);
6575 if ((modrm
& 0xc0) != 0xc0)
6577 rm
= (modrm
& 7) | REX_B(s
);
6578 reg
= ((modrm
>> 3) & 7) | rex_r
;
6590 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
+ reg
);
6591 gen_op_mov_TN_reg(ot
, 0, rm
);
6592 tcg_gen_helper_0_2(helper_movl_crN_T0
,
6593 tcg_const_i32(reg
), cpu_T
[0]);
6594 gen_jmp_im(s
->pc
- s
->cs_base
);
6597 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
+ reg
);
6598 #if !defined(CONFIG_USER_ONLY)
6600 tcg_gen_helper_1_0(helper_movtl_T0_cr8
, cpu_T
[0]);
6603 gen_op_movtl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
6604 gen_op_mov_reg_T0(ot
, rm
);
6612 case 0x121: /* mov reg, drN */
6613 case 0x123: /* mov drN, reg */
6615 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6617 modrm
= ldub_code(s
->pc
++);
6618 if ((modrm
& 0xc0) != 0xc0)
6620 rm
= (modrm
& 7) | REX_B(s
);
6621 reg
= ((modrm
>> 3) & 7) | rex_r
;
6626 /* XXX: do it dynamically with CR4.DE bit */
6627 if (reg
== 4 || reg
== 5 || reg
>= 8)
6630 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_DR0
+ reg
);
6631 gen_op_mov_TN_reg(ot
, 0, rm
);
6632 tcg_gen_helper_0_2(helper_movl_drN_T0
,
6633 tcg_const_i32(reg
), cpu_T
[0]);
6634 gen_jmp_im(s
->pc
- s
->cs_base
);
6637 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_DR0
+ reg
);
6638 gen_op_movtl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
6639 gen_op_mov_reg_T0(ot
, rm
);
6643 case 0x106: /* clts */
6645 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6647 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
6648 tcg_gen_helper_0_0(helper_clts
);
6649 /* abort block because static cpu state changed */
6650 gen_jmp_im(s
->pc
- s
->cs_base
);
6654 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6655 case 0x1c3: /* MOVNTI reg, mem */
6656 if (!(s
->cpuid_features
& CPUID_SSE2
))
6658 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
6659 modrm
= ldub_code(s
->pc
++);
6660 mod
= (modrm
>> 6) & 3;
6663 reg
= ((modrm
>> 3) & 7) | rex_r
;
6664 /* generate a generic store */
6665 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
6668 modrm
= ldub_code(s
->pc
++);
6669 mod
= (modrm
>> 6) & 3;
6670 op
= (modrm
>> 3) & 7;
6672 case 0: /* fxsave */
6673 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6674 (s
->flags
& HF_EM_MASK
))
6676 if (s
->flags
& HF_TS_MASK
) {
6677 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6680 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6681 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6682 gen_op_set_cc_op(s
->cc_op
);
6683 gen_jmp_im(pc_start
- s
->cs_base
);
6684 tcg_gen_helper_0_2(helper_fxsave
,
6685 cpu_A0
, tcg_const_i32((s
->dflag
== 2)));
6687 case 1: /* fxrstor */
6688 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6689 (s
->flags
& HF_EM_MASK
))
6691 if (s
->flags
& HF_TS_MASK
) {
6692 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6695 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6696 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6697 gen_op_set_cc_op(s
->cc_op
);
6698 gen_jmp_im(pc_start
- s
->cs_base
);
6699 tcg_gen_helper_0_2(helper_fxrstor
,
6700 cpu_A0
, tcg_const_i32((s
->dflag
== 2)));
6702 case 2: /* ldmxcsr */
6703 case 3: /* stmxcsr */
6704 if (s
->flags
& HF_TS_MASK
) {
6705 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6708 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
6711 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6713 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6714 gen_op_movl_env_T0(offsetof(CPUX86State
, mxcsr
));
6716 gen_op_movl_T0_env(offsetof(CPUX86State
, mxcsr
));
6717 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
6720 case 5: /* lfence */
6721 case 6: /* mfence */
6722 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
6725 case 7: /* sfence / clflush */
6726 if ((modrm
& 0xc7) == 0xc0) {
6728 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6729 if (!(s
->cpuid_features
& CPUID_SSE
))
6733 if (!(s
->cpuid_features
& CPUID_CLFLUSH
))
6735 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6742 case 0x10d: /* 3DNow! prefetch(w) */
6743 modrm
= ldub_code(s
->pc
++);
6744 mod
= (modrm
>> 6) & 3;
6747 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6748 /* ignore for now */
6750 case 0x1aa: /* rsm */
6751 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RSM
))
6753 if (!(s
->flags
& HF_SMM_MASK
))
6755 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6756 gen_op_set_cc_op(s
->cc_op
);
6757 s
->cc_op
= CC_OP_DYNAMIC
;
6759 gen_jmp_im(s
->pc
- s
->cs_base
);
6760 tcg_gen_helper_0_0(helper_rsm
);
6763 case 0x10e ... 0x10f:
6764 /* 3DNow! instructions, ignore prefixes */
6765 s
->prefix
&= ~(PREFIX_REPZ
| PREFIX_REPNZ
| PREFIX_DATA
);
6766 case 0x110 ... 0x117:
6767 case 0x128 ... 0x12f:
6768 case 0x150 ... 0x177:
6769 case 0x17c ... 0x17f:
6771 case 0x1c4 ... 0x1c6:
6772 case 0x1d0 ... 0x1fe:
6773 gen_sse(s
, b
, pc_start
, rex_r
);
6778 /* lock generation */
6779 if (s
->prefix
& PREFIX_LOCK
)
6780 tcg_gen_helper_0_0(helper_unlock
);
6783 if (s
->prefix
& PREFIX_LOCK
)
6784 tcg_gen_helper_0_0(helper_unlock
);
6785 /* XXX: ensure that no lock was generated */
6786 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
6790 static void tcg_macro_func(TCGContext
*s
, int macro_id
, const int *dead_args
)
6795 tcg_gen_helper_0_1(helper_divl_EAX_T0
, cpu_T
[0]);
6801 void optimize_flags_init(void)
6803 #if TCG_TARGET_REG_BITS == 32
6804 assert(sizeof(CCTable
) == (1 << 3));
6806 assert(sizeof(CCTable
) == (1 << 4));
6808 tcg_set_macro_func(&tcg_ctx
, tcg_macro_func
);
6810 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
6811 #if TARGET_LONG_BITS > HOST_LONG_BITS
6812 cpu_T
[0] = tcg_global_mem_new(TCG_TYPE_TL
,
6813 TCG_AREG0
, offsetof(CPUState
, t0
), "T0");
6814 cpu_T
[1] = tcg_global_mem_new(TCG_TYPE_TL
,
6815 TCG_AREG0
, offsetof(CPUState
, t1
), "T1");
6816 cpu_A0
= tcg_global_mem_new(TCG_TYPE_TL
,
6817 TCG_AREG0
, offsetof(CPUState
, t2
), "A0");
6819 cpu_T
[0] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG1
, "T0");
6820 cpu_T
[1] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG2
, "T1");
6821 cpu_A0
= tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG3
, "A0");
6823 cpu_T3
= tcg_global_mem_new(TCG_TYPE_TL
,
6824 TCG_AREG0
, offsetof(CPUState
, t3
), "T3");
6825 #if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6826 /* XXX: must be suppressed once there are less fixed registers */
6827 cpu_tmp1_i64
= tcg_global_reg2_new_hack(TCG_TYPE_I64
, TCG_AREG1
, TCG_AREG2
, "tmp1");
6829 cpu_cc_op
= tcg_global_mem_new(TCG_TYPE_I32
,
6830 TCG_AREG0
, offsetof(CPUState
, cc_op
), "cc_op");
6831 cpu_cc_src
= tcg_global_mem_new(TCG_TYPE_TL
,
6832 TCG_AREG0
, offsetof(CPUState
, cc_src
), "cc_src");
6833 cpu_cc_dst
= tcg_global_mem_new(TCG_TYPE_TL
,
6834 TCG_AREG0
, offsetof(CPUState
, cc_dst
), "cc_dst");
6837 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6838 basic block 'tb'. If search_pc is TRUE, also generate PC
6839 information for each intermediate instruction. */
6840 static inline int gen_intermediate_code_internal(CPUState
*env
,
6841 TranslationBlock
*tb
,
6844 DisasContext dc1
, *dc
= &dc1
;
6845 target_ulong pc_ptr
;
6846 uint16_t *gen_opc_end
;
6849 target_ulong pc_start
;
6850 target_ulong cs_base
;
6852 /* generate intermediate code */
6854 cs_base
= tb
->cs_base
;
6856 cflags
= tb
->cflags
;
6858 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
6859 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
6860 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
6861 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
6863 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
6864 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
6865 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
6866 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
6867 dc
->singlestep_enabled
= env
->singlestep_enabled
;
6868 dc
->cc_op
= CC_OP_DYNAMIC
;
6869 dc
->cs_base
= cs_base
;
6871 dc
->popl_esp_hack
= 0;
6872 /* select memory access functions */
6874 if (flags
& HF_SOFTMMU_MASK
) {
6876 dc
->mem_index
= 2 * 4;
6878 dc
->mem_index
= 1 * 4;
6880 dc
->cpuid_features
= env
->cpuid_features
;
6881 dc
->cpuid_ext_features
= env
->cpuid_ext_features
;
6882 dc
->cpuid_ext2_features
= env
->cpuid_ext2_features
;
6883 #ifdef TARGET_X86_64
6884 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
6885 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
6888 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
6889 (flags
& HF_INHIBIT_IRQ_MASK
)
6890 #ifndef CONFIG_SOFTMMU
6891 || (flags
& HF_SOFTMMU_MASK
)
6895 /* check addseg logic */
6896 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
6897 printf("ERROR addseg\n");
6900 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
6901 #if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
6902 cpu_tmp1_i64
= tcg_temp_new(TCG_TYPE_I64
);
6904 cpu_tmp2_i32
= tcg_temp_new(TCG_TYPE_I32
);
6905 cpu_tmp3_i32
= tcg_temp_new(TCG_TYPE_I32
);
6906 cpu_tmp4
= tcg_temp_new(TCG_TYPE_TL
);
6907 cpu_tmp5
= tcg_temp_new(TCG_TYPE_TL
);
6908 cpu_tmp6
= tcg_temp_new(TCG_TYPE_TL
);
6909 cpu_ptr0
= tcg_temp_new(TCG_TYPE_PTR
);
6910 cpu_ptr1
= tcg_temp_new(TCG_TYPE_PTR
);
6912 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
6914 dc
->is_jmp
= DISAS_NEXT
;
6919 if (env
->nb_breakpoints
> 0) {
6920 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
6921 if (env
->breakpoints
[j
] == pc_ptr
) {
6922 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
6928 j
= gen_opc_ptr
- gen_opc_buf
;
6932 gen_opc_instr_start
[lj
++] = 0;
6934 gen_opc_pc
[lj
] = pc_ptr
;
6935 gen_opc_cc_op
[lj
] = dc
->cc_op
;
6936 gen_opc_instr_start
[lj
] = 1;
6938 pc_ptr
= disas_insn(dc
, pc_ptr
);
6939 /* stop translation if indicated */
6942 /* if single step mode, we generate only one instruction and
6943 generate an exception */
6944 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6945 the flag and abort the translation to give the irqs a
6946 change to be happen */
6947 if (dc
->tf
|| dc
->singlestep_enabled
||
6948 (flags
& HF_INHIBIT_IRQ_MASK
) ||
6949 (cflags
& CF_SINGLE_INSN
)) {
6950 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6954 /* if too long translation, stop generation too */
6955 if (gen_opc_ptr
>= gen_opc_end
||
6956 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
6957 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6962 *gen_opc_ptr
= INDEX_op_end
;
6963 /* we don't forget to fill the last values */
6965 j
= gen_opc_ptr
- gen_opc_buf
;
6968 gen_opc_instr_start
[lj
++] = 0;
6972 if (loglevel
& CPU_LOG_TB_CPU
) {
6973 cpu_dump_state(env
, logfile
, fprintf
, X86_DUMP_CCOP
);
6975 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
6977 fprintf(logfile
, "----------------\n");
6978 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
6979 #ifdef TARGET_X86_64
6984 disas_flags
= !dc
->code32
;
6985 target_disas(logfile
, pc_start
, pc_ptr
- pc_start
, disas_flags
);
6986 fprintf(logfile
, "\n");
6987 if (loglevel
& CPU_LOG_TB_OP_OPT
) {
6988 fprintf(logfile
, "OP before opt:\n");
6989 tcg_dump_ops(&tcg_ctx
, logfile
);
6990 fprintf(logfile
, "\n");
6996 tb
->size
= pc_ptr
- pc_start
;
7000 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
7002 return gen_intermediate_code_internal(env
, tb
, 0);
7005 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
7007 return gen_intermediate_code_internal(env
, tb
, 1);
7010 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
7011 unsigned long searched_pc
, int pc_pos
, void *puc
)
7015 if (loglevel
& CPU_LOG_TB_OP
) {
7017 fprintf(logfile
, "RESTORE:\n");
7018 for(i
= 0;i
<= pc_pos
; i
++) {
7019 if (gen_opc_instr_start
[i
]) {
7020 fprintf(logfile
, "0x%04x: " TARGET_FMT_lx
"\n", i
, gen_opc_pc
[i
]);
7023 fprintf(logfile
, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx
" cs_base=%x\n",
7024 searched_pc
, pc_pos
, gen_opc_pc
[pc_pos
] - tb
->cs_base
,
7025 (uint32_t)tb
->cs_base
);
7028 env
->eip
= gen_opc_pc
[pc_pos
] - tb
->cs_base
;
7029 cc_op
= gen_opc_cc_op
[pc_pos
];
7030 if (cc_op
!= CC_OP_DYNAMIC
)