4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env
, cpu_A0
, cpu_cc_op
, cpu_cc_src
, cpu_cc_dst
, cpu_cc_tmp
;
63 static TCGv cpu_T
[2], cpu_T3
;
64 /* local register indexes (only used inside old micro ops) */
65 static TCGv cpu_tmp0
, cpu_tmp1_i64
, cpu_tmp2_i32
, cpu_tmp3_i32
, cpu_tmp4
, cpu_ptr0
, cpu_ptr1
;
66 static TCGv cpu_tmp5
, cpu_tmp6
;
69 static int x86_64_hregs
;
72 typedef struct DisasContext
{
73 /* current insn context */
74 int override
; /* -1 if no override */
77 target_ulong pc
; /* pc = eip + cs_base */
78 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
79 static state change (stop translation) */
80 /* current block context */
81 target_ulong cs_base
; /* base of CS segment */
82 int pe
; /* protected mode */
83 int code32
; /* 32 bit code segment */
85 int lma
; /* long mode active */
86 int code64
; /* 64 bit code segment */
89 int ss32
; /* 32 bit stack segment */
90 int cc_op
; /* current CC operation */
91 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
92 int f_st
; /* currently unused */
93 int vm86
; /* vm86 mode */
96 int tf
; /* TF cpu flag */
97 int singlestep_enabled
; /* "hardware" single step enabled */
98 int jmp_opt
; /* use direct block chaining for direct jumps */
99 int mem_index
; /* select memory access functions */
100 uint64_t flags
; /* all execution flags */
101 struct TranslationBlock
*tb
;
102 int popl_esp_hack
; /* for correct popl with esp base handling */
103 int rip_offset
; /* only used in x86_64, but left for simplicity */
105 int cpuid_ext_features
;
106 int cpuid_ext2_features
;
107 int cpuid_ext3_features
;
110 static void gen_eob(DisasContext
*s
);
111 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
112 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
114 /* i386 arith/logic operations */
134 OP_SHL1
, /* undocumented */
158 /* I386 int registers */
159 OR_EAX
, /* MUST be even numbered */
168 OR_TMP0
= 16, /* temporary operand register */
170 OR_A0
, /* temporary register used when doing address evaluation */
173 static inline void gen_op_movl_T0_0(void)
175 tcg_gen_movi_tl(cpu_T
[0], 0);
178 static inline void gen_op_movl_T0_im(int32_t val
)
180 tcg_gen_movi_tl(cpu_T
[0], val
);
183 static inline void gen_op_movl_T0_imu(uint32_t val
)
185 tcg_gen_movi_tl(cpu_T
[0], val
);
188 static inline void gen_op_movl_T1_im(int32_t val
)
190 tcg_gen_movi_tl(cpu_T
[1], val
);
193 static inline void gen_op_movl_T1_imu(uint32_t val
)
195 tcg_gen_movi_tl(cpu_T
[1], val
);
198 static inline void gen_op_movl_A0_im(uint32_t val
)
200 tcg_gen_movi_tl(cpu_A0
, val
);
204 static inline void gen_op_movq_A0_im(int64_t val
)
206 tcg_gen_movi_tl(cpu_A0
, val
);
210 static inline void gen_movtl_T0_im(target_ulong val
)
212 tcg_gen_movi_tl(cpu_T
[0], val
);
215 static inline void gen_movtl_T1_im(target_ulong val
)
217 tcg_gen_movi_tl(cpu_T
[1], val
);
220 static inline void gen_op_andl_T0_ffff(void)
222 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
225 static inline void gen_op_andl_T0_im(uint32_t val
)
227 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], val
);
230 static inline void gen_op_movl_T0_T1(void)
232 tcg_gen_mov_tl(cpu_T
[0], cpu_T
[1]);
235 static inline void gen_op_andl_A0_ffff(void)
237 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffff);
242 #define NB_OP_SIZES 4
244 #else /* !TARGET_X86_64 */
246 #define NB_OP_SIZES 3
248 #endif /* !TARGET_X86_64 */
250 #if defined(WORDS_BIGENDIAN)
251 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
252 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
253 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
254 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
255 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
257 #define REG_B_OFFSET 0
258 #define REG_H_OFFSET 1
259 #define REG_W_OFFSET 0
260 #define REG_L_OFFSET 0
261 #define REG_LH_OFFSET 4
264 static inline void gen_op_mov_reg_v(int ot
, int reg
, TCGv t0
)
268 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
269 tcg_gen_st8_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_B_OFFSET
);
271 tcg_gen_st8_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
275 tcg_gen_st16_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
279 tcg_gen_st32_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
280 /* high part of register set to zero */
281 tcg_gen_movi_tl(cpu_tmp0
, 0);
282 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
286 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
291 tcg_gen_st32_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
297 static inline void gen_op_mov_reg_T0(int ot
, int reg
)
299 gen_op_mov_reg_v(ot
, reg
, cpu_T
[0]);
302 static inline void gen_op_mov_reg_T1(int ot
, int reg
)
304 gen_op_mov_reg_v(ot
, reg
, cpu_T
[1]);
307 static inline void gen_op_mov_reg_A0(int size
, int reg
)
311 tcg_gen_st16_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
315 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
316 /* high part of register set to zero */
317 tcg_gen_movi_tl(cpu_tmp0
, 0);
318 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
322 tcg_gen_st_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
327 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
333 static inline void gen_op_mov_v_reg(int ot
, TCGv t0
, int reg
)
337 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
340 tcg_gen_ld8u_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
345 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
350 static inline void gen_op_mov_TN_reg(int ot
, int t_index
, int reg
)
352 gen_op_mov_v_reg(ot
, cpu_T
[t_index
], reg
);
355 static inline void gen_op_movl_A0_reg(int reg
)
357 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
360 static inline void gen_op_addl_A0_im(int32_t val
)
362 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
364 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
369 static inline void gen_op_addq_A0_im(int64_t val
)
371 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
375 static void gen_add_A0_im(DisasContext
*s
, int val
)
379 gen_op_addq_A0_im(val
);
382 gen_op_addl_A0_im(val
);
385 static inline void gen_op_addl_T0_T1(void)
387 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
390 static inline void gen_op_jmp_T0(void)
392 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUState
, eip
));
395 static inline void gen_op_add_reg_im(int size
, int reg
, int32_t val
)
399 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
400 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
401 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
404 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
405 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
407 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffff);
409 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
413 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
414 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
415 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
421 static inline void gen_op_add_reg_T0(int size
, int reg
)
425 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
426 tcg_gen_add_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[0]);
427 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
430 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
431 tcg_gen_add_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[0]);
433 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffff);
435 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
439 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
440 tcg_gen_add_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[0]);
441 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
447 static inline void gen_op_set_cc_op(int32_t val
)
449 tcg_gen_movi_i32(cpu_cc_op
, val
);
452 static inline void gen_op_addl_A0_reg_sN(int shift
, int reg
)
454 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
456 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
457 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
459 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
463 static inline void gen_op_movl_A0_seg(int reg
)
465 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
) + REG_L_OFFSET
);
468 static inline void gen_op_addl_A0_seg(int reg
)
470 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
471 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
473 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
478 static inline void gen_op_movq_A0_seg(int reg
)
480 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
483 static inline void gen_op_addq_A0_seg(int reg
)
485 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
486 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
489 static inline void gen_op_movq_A0_reg(int reg
)
491 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
494 static inline void gen_op_addq_A0_reg_sN(int shift
, int reg
)
496 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
498 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
499 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
503 static inline void gen_op_lds_T0_A0(int idx
)
505 int mem_index
= (idx
>> 2) - 1;
508 tcg_gen_qemu_ld8s(cpu_T
[0], cpu_A0
, mem_index
);
511 tcg_gen_qemu_ld16s(cpu_T
[0], cpu_A0
, mem_index
);
515 tcg_gen_qemu_ld32s(cpu_T
[0], cpu_A0
, mem_index
);
520 static inline void gen_op_ld_v(int idx
, TCGv t0
, TCGv a0
)
522 int mem_index
= (idx
>> 2) - 1;
525 tcg_gen_qemu_ld8u(t0
, a0
, mem_index
);
528 tcg_gen_qemu_ld16u(t0
, a0
, mem_index
);
531 tcg_gen_qemu_ld32u(t0
, a0
, mem_index
);
535 tcg_gen_qemu_ld64(t0
, a0
, mem_index
);
540 /* XXX: always use ldu or lds */
541 static inline void gen_op_ld_T0_A0(int idx
)
543 gen_op_ld_v(idx
, cpu_T
[0], cpu_A0
);
546 static inline void gen_op_ldu_T0_A0(int idx
)
548 gen_op_ld_v(idx
, cpu_T
[0], cpu_A0
);
551 static inline void gen_op_ld_T1_A0(int idx
)
553 gen_op_ld_v(idx
, cpu_T
[1], cpu_A0
);
556 static inline void gen_op_st_v(int idx
, TCGv t0
, TCGv a0
)
558 int mem_index
= (idx
>> 2) - 1;
561 tcg_gen_qemu_st8(t0
, a0
, mem_index
);
564 tcg_gen_qemu_st16(t0
, a0
, mem_index
);
567 tcg_gen_qemu_st32(t0
, a0
, mem_index
);
571 tcg_gen_qemu_st64(t0
, a0
, mem_index
);
576 static inline void gen_op_st_T0_A0(int idx
)
578 gen_op_st_v(idx
, cpu_T
[0], cpu_A0
);
581 static inline void gen_op_st_T1_A0(int idx
)
583 gen_op_st_v(idx
, cpu_T
[1], cpu_A0
);
586 static inline void gen_jmp_im(target_ulong pc
)
588 tcg_gen_movi_tl(cpu_tmp0
, pc
);
589 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, eip
));
592 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
596 override
= s
->override
;
600 gen_op_movq_A0_seg(override
);
601 gen_op_addq_A0_reg_sN(0, R_ESI
);
603 gen_op_movq_A0_reg(R_ESI
);
609 if (s
->addseg
&& override
< 0)
612 gen_op_movl_A0_seg(override
);
613 gen_op_addl_A0_reg_sN(0, R_ESI
);
615 gen_op_movl_A0_reg(R_ESI
);
618 /* 16 address, always override */
621 gen_op_movl_A0_reg(R_ESI
);
622 gen_op_andl_A0_ffff();
623 gen_op_addl_A0_seg(override
);
627 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
631 gen_op_movq_A0_reg(R_EDI
);
636 gen_op_movl_A0_seg(R_ES
);
637 gen_op_addl_A0_reg_sN(0, R_EDI
);
639 gen_op_movl_A0_reg(R_EDI
);
642 gen_op_movl_A0_reg(R_EDI
);
643 gen_op_andl_A0_ffff();
644 gen_op_addl_A0_seg(R_ES
);
648 static inline void gen_op_movl_T0_Dshift(int ot
)
650 tcg_gen_ld32s_tl(cpu_T
[0], cpu_env
, offsetof(CPUState
, df
));
651 tcg_gen_shli_tl(cpu_T
[0], cpu_T
[0], ot
);
654 static void gen_extu(int ot
, TCGv reg
)
658 tcg_gen_ext8u_tl(reg
, reg
);
661 tcg_gen_ext16u_tl(reg
, reg
);
664 tcg_gen_ext32u_tl(reg
, reg
);
671 static void gen_exts(int ot
, TCGv reg
)
675 tcg_gen_ext8s_tl(reg
, reg
);
678 tcg_gen_ext16s_tl(reg
, reg
);
681 tcg_gen_ext32s_tl(reg
, reg
);
688 static inline void gen_op_jnz_ecx(int size
, int label1
)
690 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ECX
]));
691 gen_extu(size
+ 1, cpu_tmp0
);
692 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_tmp0
, 0, label1
);
695 static inline void gen_op_jz_ecx(int size
, int label1
)
697 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ECX
]));
698 gen_extu(size
+ 1, cpu_tmp0
);
699 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, label1
);
702 static void *helper_in_func
[3] = {
708 static void *helper_out_func
[3] = {
714 static void *gen_check_io_func
[3] = {
720 static void gen_check_io(DisasContext
*s
, int ot
, target_ulong cur_eip
,
724 target_ulong next_eip
;
727 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
728 if (s
->cc_op
!= CC_OP_DYNAMIC
)
729 gen_op_set_cc_op(s
->cc_op
);
732 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
733 tcg_gen_helper_0_1(gen_check_io_func
[ot
],
736 if(s
->flags
& HF_SVMI_MASK
) {
738 if (s
->cc_op
!= CC_OP_DYNAMIC
)
739 gen_op_set_cc_op(s
->cc_op
);
743 svm_flags
|= (1 << (4 + ot
));
744 next_eip
= s
->pc
- s
->cs_base
;
745 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
746 tcg_gen_helper_0_3(helper_svm_check_io
,
748 tcg_const_i32(svm_flags
),
749 tcg_const_i32(next_eip
- cur_eip
));
753 static inline void gen_movs(DisasContext
*s
, int ot
)
755 gen_string_movl_A0_ESI(s
);
756 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
757 gen_string_movl_A0_EDI(s
);
758 gen_op_st_T0_A0(ot
+ s
->mem_index
);
759 gen_op_movl_T0_Dshift(ot
);
760 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
761 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
764 static inline void gen_update_cc_op(DisasContext
*s
)
766 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
767 gen_op_set_cc_op(s
->cc_op
);
768 s
->cc_op
= CC_OP_DYNAMIC
;
772 static void gen_op_update1_cc(void)
774 tcg_gen_discard_tl(cpu_cc_src
);
775 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
778 static void gen_op_update2_cc(void)
780 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
781 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
784 static inline void gen_op_cmpl_T0_T1_cc(void)
786 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
787 tcg_gen_sub_tl(cpu_cc_dst
, cpu_T
[0], cpu_T
[1]);
790 static inline void gen_op_testl_T0_T1_cc(void)
792 tcg_gen_discard_tl(cpu_cc_src
);
793 tcg_gen_and_tl(cpu_cc_dst
, cpu_T
[0], cpu_T
[1]);
796 static void gen_op_update_neg_cc(void)
798 tcg_gen_neg_tl(cpu_cc_src
, cpu_T
[0]);
799 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
802 /* compute eflags.C to reg */
803 static void gen_compute_eflags_c(TCGv reg
)
805 #if TCG_TARGET_REG_BITS == 32
806 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_cc_op
, 3);
807 tcg_gen_addi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
,
808 (long)cc_table
+ offsetof(CCTable
, compute_c
));
809 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0);
810 tcg_gen_call(&tcg_ctx
, cpu_tmp2_i32
, TCG_CALL_PURE
,
811 1, &cpu_tmp2_i32
, 0, NULL
);
813 tcg_gen_extu_i32_tl(cpu_tmp1_i64
, cpu_cc_op
);
814 tcg_gen_shli_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 4);
815 tcg_gen_addi_i64(cpu_tmp1_i64
, cpu_tmp1_i64
,
816 (long)cc_table
+ offsetof(CCTable
, compute_c
));
817 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 0);
818 tcg_gen_call(&tcg_ctx
, cpu_tmp1_i64
, TCG_CALL_PURE
,
819 1, &cpu_tmp2_i32
, 0, NULL
);
821 tcg_gen_extu_i32_tl(reg
, cpu_tmp2_i32
);
824 /* compute all eflags to cc_src */
825 static void gen_compute_eflags(TCGv reg
)
827 #if TCG_TARGET_REG_BITS == 32
828 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_cc_op
, 3);
829 tcg_gen_addi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
,
830 (long)cc_table
+ offsetof(CCTable
, compute_all
));
831 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0);
832 tcg_gen_call(&tcg_ctx
, cpu_tmp2_i32
, TCG_CALL_PURE
,
833 1, &cpu_tmp2_i32
, 0, NULL
);
835 tcg_gen_extu_i32_tl(cpu_tmp1_i64
, cpu_cc_op
);
836 tcg_gen_shli_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 4);
837 tcg_gen_addi_i64(cpu_tmp1_i64
, cpu_tmp1_i64
,
838 (long)cc_table
+ offsetof(CCTable
, compute_all
));
839 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 0);
840 tcg_gen_call(&tcg_ctx
, cpu_tmp1_i64
, TCG_CALL_PURE
,
841 1, &cpu_tmp2_i32
, 0, NULL
);
843 tcg_gen_extu_i32_tl(reg
, cpu_tmp2_i32
);
846 static inline void gen_setcc_slow_T0(DisasContext
*s
, int jcc_op
)
848 if (s
->cc_op
!= CC_OP_DYNAMIC
)
849 gen_op_set_cc_op(s
->cc_op
);
852 gen_compute_eflags(cpu_T
[0]);
853 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 11);
854 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
857 gen_compute_eflags_c(cpu_T
[0]);
860 gen_compute_eflags(cpu_T
[0]);
861 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 6);
862 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
865 gen_compute_eflags(cpu_tmp0
);
866 tcg_gen_shri_tl(cpu_T
[0], cpu_tmp0
, 6);
867 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
868 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
871 gen_compute_eflags(cpu_T
[0]);
872 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 7);
873 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
876 gen_compute_eflags(cpu_T
[0]);
877 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 2);
878 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
881 gen_compute_eflags(cpu_tmp0
);
882 tcg_gen_shri_tl(cpu_T
[0], cpu_tmp0
, 11); /* CC_O */
883 tcg_gen_shri_tl(cpu_tmp0
, cpu_tmp0
, 7); /* CC_S */
884 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
885 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
889 gen_compute_eflags(cpu_tmp0
);
890 tcg_gen_shri_tl(cpu_T
[0], cpu_tmp0
, 11); /* CC_O */
891 tcg_gen_shri_tl(cpu_tmp4
, cpu_tmp0
, 7); /* CC_S */
892 tcg_gen_shri_tl(cpu_tmp0
, cpu_tmp0
, 6); /* CC_Z */
893 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
894 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
895 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
900 /* return true if setcc_slow is not needed (WARNING: must be kept in
901 sync with gen_jcc1) */
902 static int is_fast_jcc_case(DisasContext
*s
, int b
)
905 jcc_op
= (b
>> 1) & 7;
907 /* we optimize the cmp/jcc case */
912 if (jcc_op
== JCC_O
|| jcc_op
== JCC_P
)
916 /* some jumps are easy to compute */
941 if (jcc_op
!= JCC_Z
&& jcc_op
!= JCC_S
)
951 /* generate a conditional jump to label 'l1' according to jump opcode
952 value 'b'. In the fast case, T0 is guaranted not to be used. */
953 static inline void gen_jcc1(DisasContext
*s
, int cc_op
, int b
, int l1
)
955 int inv
, jcc_op
, size
, cond
;
959 jcc_op
= (b
>> 1) & 7;
962 /* we optimize the cmp/jcc case */
968 size
= cc_op
- CC_OP_SUBB
;
974 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0xff);
978 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0xffff);
983 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0xffffffff);
991 tcg_gen_brcondi_tl(inv
? TCG_COND_NE
: TCG_COND_EQ
, t0
, 0, l1
);
997 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0x80);
998 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
, cpu_tmp0
,
1002 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0x8000);
1003 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
, cpu_tmp0
,
1006 #ifdef TARGET_X86_64
1008 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0x80000000);
1009 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
, cpu_tmp0
,
1014 tcg_gen_brcondi_tl(inv
? TCG_COND_GE
: TCG_COND_LT
, cpu_cc_dst
,
1021 cond
= inv
? TCG_COND_GEU
: TCG_COND_LTU
;
1024 cond
= inv
? TCG_COND_GTU
: TCG_COND_LEU
;
1026 tcg_gen_add_tl(cpu_tmp4
, cpu_cc_dst
, cpu_cc_src
);
1030 tcg_gen_andi_tl(cpu_tmp4
, cpu_tmp4
, 0xff);
1031 tcg_gen_andi_tl(t0
, cpu_cc_src
, 0xff);
1035 tcg_gen_andi_tl(cpu_tmp4
, cpu_tmp4
, 0xffff);
1036 tcg_gen_andi_tl(t0
, cpu_cc_src
, 0xffff);
1038 #ifdef TARGET_X86_64
1041 tcg_gen_andi_tl(cpu_tmp4
, cpu_tmp4
, 0xffffffff);
1042 tcg_gen_andi_tl(t0
, cpu_cc_src
, 0xffffffff);
1049 tcg_gen_brcond_tl(cond
, cpu_tmp4
, t0
, l1
);
1053 cond
= inv
? TCG_COND_GE
: TCG_COND_LT
;
1056 cond
= inv
? TCG_COND_GT
: TCG_COND_LE
;
1058 tcg_gen_add_tl(cpu_tmp4
, cpu_cc_dst
, cpu_cc_src
);
1062 tcg_gen_ext8s_tl(cpu_tmp4
, cpu_tmp4
);
1063 tcg_gen_ext8s_tl(t0
, cpu_cc_src
);
1067 tcg_gen_ext16s_tl(cpu_tmp4
, cpu_tmp4
);
1068 tcg_gen_ext16s_tl(t0
, cpu_cc_src
);
1070 #ifdef TARGET_X86_64
1073 tcg_gen_ext32s_tl(cpu_tmp4
, cpu_tmp4
);
1074 tcg_gen_ext32s_tl(t0
, cpu_cc_src
);
1081 tcg_gen_brcond_tl(cond
, cpu_tmp4
, t0
, l1
);
1089 /* some jumps are easy to compute */
1131 size
= (cc_op
- CC_OP_ADDB
) & 3;
1134 size
= (cc_op
- CC_OP_ADDB
) & 3;
1142 gen_setcc_slow_T0(s
, jcc_op
);
1143 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
,
1149 /* XXX: does not work with gdbstub "ice" single step - not a
1151 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
1155 l1
= gen_new_label();
1156 l2
= gen_new_label();
1157 gen_op_jnz_ecx(s
->aflag
, l1
);
1159 gen_jmp_tb(s
, next_eip
, 1);
1164 static inline void gen_stos(DisasContext
*s
, int ot
)
1166 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1167 gen_string_movl_A0_EDI(s
);
1168 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1169 gen_op_movl_T0_Dshift(ot
);
1170 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1173 static inline void gen_lods(DisasContext
*s
, int ot
)
1175 gen_string_movl_A0_ESI(s
);
1176 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1177 gen_op_mov_reg_T0(ot
, R_EAX
);
1178 gen_op_movl_T0_Dshift(ot
);
1179 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
1182 static inline void gen_scas(DisasContext
*s
, int ot
)
1184 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1185 gen_string_movl_A0_EDI(s
);
1186 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1187 gen_op_cmpl_T0_T1_cc();
1188 gen_op_movl_T0_Dshift(ot
);
1189 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1192 static inline void gen_cmps(DisasContext
*s
, int ot
)
1194 gen_string_movl_A0_ESI(s
);
1195 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1196 gen_string_movl_A0_EDI(s
);
1197 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1198 gen_op_cmpl_T0_T1_cc();
1199 gen_op_movl_T0_Dshift(ot
);
1200 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
1201 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1204 static inline void gen_ins(DisasContext
*s
, int ot
)
1206 gen_string_movl_A0_EDI(s
);
1207 /* Note: we must do this dummy write first to be restartable in
1208 case of page fault. */
1210 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1211 gen_op_mov_TN_reg(OT_WORD
, 1, R_EDX
);
1212 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[1]);
1213 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
1214 tcg_gen_helper_1_1(helper_in_func
[ot
], cpu_T
[0], cpu_tmp2_i32
);
1215 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1216 gen_op_movl_T0_Dshift(ot
);
1217 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1220 static inline void gen_outs(DisasContext
*s
, int ot
)
1222 gen_string_movl_A0_ESI(s
);
1223 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1225 gen_op_mov_TN_reg(OT_WORD
, 1, R_EDX
);
1226 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[1]);
1227 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
1228 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[0]);
1229 tcg_gen_helper_0_2(helper_out_func
[ot
], cpu_tmp2_i32
, cpu_tmp3_i32
);
1231 gen_op_movl_T0_Dshift(ot
);
1232 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
1235 /* same method as Valgrind : we generate jumps to current or next
1237 #define GEN_REPZ(op) \
1238 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1239 target_ulong cur_eip, target_ulong next_eip) \
1242 gen_update_cc_op(s); \
1243 l2 = gen_jz_ecx_string(s, next_eip); \
1244 gen_ ## op(s, ot); \
1245 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1246 /* a loop would cause two single step exceptions if ECX = 1 \
1247 before rep string_insn */ \
1249 gen_op_jz_ecx(s->aflag, l2); \
1250 gen_jmp(s, cur_eip); \
1253 #define GEN_REPZ2(op) \
1254 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1255 target_ulong cur_eip, \
1256 target_ulong next_eip, \
1260 gen_update_cc_op(s); \
1261 l2 = gen_jz_ecx_string(s, next_eip); \
1262 gen_ ## op(s, ot); \
1263 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1264 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1265 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1267 gen_op_jz_ecx(s->aflag, l2); \
1268 gen_jmp(s, cur_eip); \
1279 static void *helper_fp_arith_ST0_FT0
[8] = {
1280 helper_fadd_ST0_FT0
,
1281 helper_fmul_ST0_FT0
,
1282 helper_fcom_ST0_FT0
,
1283 helper_fcom_ST0_FT0
,
1284 helper_fsub_ST0_FT0
,
1285 helper_fsubr_ST0_FT0
,
1286 helper_fdiv_ST0_FT0
,
1287 helper_fdivr_ST0_FT0
,
1290 /* NOTE the exception in "r" op ordering */
1291 static void *helper_fp_arith_STN_ST0
[8] = {
1292 helper_fadd_STN_ST0
,
1293 helper_fmul_STN_ST0
,
1296 helper_fsubr_STN_ST0
,
1297 helper_fsub_STN_ST0
,
1298 helper_fdivr_STN_ST0
,
1299 helper_fdiv_STN_ST0
,
1302 /* if d == OR_TMP0, it means memory operand (address in A0) */
1303 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1306 gen_op_mov_TN_reg(ot
, 0, d
);
1308 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1312 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1313 gen_op_set_cc_op(s1
->cc_op
);
1314 gen_compute_eflags_c(cpu_tmp4
);
1315 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1316 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
1318 gen_op_mov_reg_T0(ot
, d
);
1320 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1321 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
1322 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1323 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp4
);
1324 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 2);
1325 tcg_gen_addi_i32(cpu_cc_op
, cpu_tmp2_i32
, CC_OP_ADDB
+ ot
);
1326 s1
->cc_op
= CC_OP_DYNAMIC
;
1329 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1330 gen_op_set_cc_op(s1
->cc_op
);
1331 gen_compute_eflags_c(cpu_tmp4
);
1332 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1333 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
1335 gen_op_mov_reg_T0(ot
, d
);
1337 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1338 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
1339 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1340 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp4
);
1341 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 2);
1342 tcg_gen_addi_i32(cpu_cc_op
, cpu_tmp2_i32
, CC_OP_SUBB
+ ot
);
1343 s1
->cc_op
= CC_OP_DYNAMIC
;
1346 gen_op_addl_T0_T1();
1348 gen_op_mov_reg_T0(ot
, d
);
1350 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1351 gen_op_update2_cc();
1352 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1355 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1357 gen_op_mov_reg_T0(ot
, d
);
1359 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1360 gen_op_update2_cc();
1361 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1365 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1367 gen_op_mov_reg_T0(ot
, d
);
1369 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1370 gen_op_update1_cc();
1371 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1374 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1376 gen_op_mov_reg_T0(ot
, d
);
1378 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1379 gen_op_update1_cc();
1380 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1383 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1385 gen_op_mov_reg_T0(ot
, d
);
1387 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1388 gen_op_update1_cc();
1389 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1392 gen_op_cmpl_T0_T1_cc();
1393 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1398 /* if d == OR_TMP0, it means memory operand (address in A0) */
1399 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1402 gen_op_mov_TN_reg(ot
, 0, d
);
1404 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1405 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1406 gen_op_set_cc_op(s1
->cc_op
);
1408 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], 1);
1409 s1
->cc_op
= CC_OP_INCB
+ ot
;
1411 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], -1);
1412 s1
->cc_op
= CC_OP_DECB
+ ot
;
1415 gen_op_mov_reg_T0(ot
, d
);
1417 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1418 gen_compute_eflags_c(cpu_cc_src
);
1419 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1422 static void gen_shift_rm_T1(DisasContext
*s
, int ot
, int op1
,
1423 int is_right
, int is_arith
)
1436 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1438 gen_op_mov_TN_reg(ot
, 0, op1
);
1440 tcg_gen_andi_tl(cpu_T
[1], cpu_T
[1], mask
);
1442 tcg_gen_addi_tl(cpu_tmp5
, cpu_T
[1], -1);
1446 gen_exts(ot
, cpu_T
[0]);
1447 tcg_gen_sar_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1448 tcg_gen_sar_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1450 gen_extu(ot
, cpu_T
[0]);
1451 tcg_gen_shr_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1452 tcg_gen_shr_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1455 tcg_gen_shl_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1456 tcg_gen_shl_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1461 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1463 gen_op_mov_reg_T0(ot
, op1
);
1465 /* update eflags if non zero shift */
1466 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1467 gen_op_set_cc_op(s
->cc_op
);
1469 /* XXX: inefficient */
1470 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
1471 t1
= tcg_temp_local_new(TCG_TYPE_TL
);
1473 tcg_gen_mov_tl(t0
, cpu_T
[0]);
1474 tcg_gen_mov_tl(t1
, cpu_T3
);
1476 shift_label
= gen_new_label();
1477 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_T
[1], 0, shift_label
);
1479 tcg_gen_mov_tl(cpu_cc_src
, t1
);
1480 tcg_gen_mov_tl(cpu_cc_dst
, t0
);
1482 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SARB
+ ot
);
1484 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SHLB
+ ot
);
1486 gen_set_label(shift_label
);
1487 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1493 static void gen_shift_rm_im(DisasContext
*s
, int ot
, int op1
, int op2
,
1494 int is_right
, int is_arith
)
1505 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1507 gen_op_mov_TN_reg(ot
, 0, op1
);
1513 gen_exts(ot
, cpu_T
[0]);
1514 tcg_gen_sari_tl(cpu_tmp4
, cpu_T
[0], op2
- 1);
1515 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], op2
);
1517 gen_extu(ot
, cpu_T
[0]);
1518 tcg_gen_shri_tl(cpu_tmp4
, cpu_T
[0], op2
- 1);
1519 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], op2
);
1522 tcg_gen_shli_tl(cpu_tmp4
, cpu_T
[0], op2
- 1);
1523 tcg_gen_shli_tl(cpu_T
[0], cpu_T
[0], op2
);
1529 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1531 gen_op_mov_reg_T0(ot
, op1
);
1533 /* update eflags if non zero shift */
1535 tcg_gen_mov_tl(cpu_cc_src
, cpu_tmp4
);
1536 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1538 s
->cc_op
= CC_OP_SARB
+ ot
;
1540 s
->cc_op
= CC_OP_SHLB
+ ot
;
1544 static inline void tcg_gen_lshift(TCGv ret
, TCGv arg1
, target_long arg2
)
1547 tcg_gen_shli_tl(ret
, arg1
, arg2
);
1549 tcg_gen_shri_tl(ret
, arg1
, -arg2
);
1552 /* XXX: add faster immediate case */
1553 static void gen_rot_rm_T1(DisasContext
*s
, int ot
, int op1
,
1557 int label1
, label2
, data_bits
;
1558 TCGv t0
, t1
, t2
, a0
;
1560 /* XXX: inefficient, but we must use local temps */
1561 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
1562 t1
= tcg_temp_local_new(TCG_TYPE_TL
);
1563 t2
= tcg_temp_local_new(TCG_TYPE_TL
);
1564 a0
= tcg_temp_local_new(TCG_TYPE_TL
);
1572 if (op1
== OR_TMP0
) {
1573 tcg_gen_mov_tl(a0
, cpu_A0
);
1574 gen_op_ld_v(ot
+ s
->mem_index
, t0
, a0
);
1576 gen_op_mov_v_reg(ot
, t0
, op1
);
1579 tcg_gen_mov_tl(t1
, cpu_T
[1]);
1581 tcg_gen_andi_tl(t1
, t1
, mask
);
1583 /* Must test zero case to avoid using undefined behaviour in TCG
1585 label1
= gen_new_label();
1586 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, label1
);
1589 tcg_gen_andi_tl(cpu_tmp0
, t1
, (1 << (3 + ot
)) - 1);
1591 tcg_gen_mov_tl(cpu_tmp0
, t1
);
1594 tcg_gen_mov_tl(t2
, t0
);
1596 data_bits
= 8 << ot
;
1597 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1598 fix TCG definition) */
1600 tcg_gen_shr_tl(cpu_tmp4
, t0
, cpu_tmp0
);
1601 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(data_bits
), cpu_tmp0
);
1602 tcg_gen_shl_tl(t0
, t0
, cpu_tmp0
);
1604 tcg_gen_shl_tl(cpu_tmp4
, t0
, cpu_tmp0
);
1605 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(data_bits
), cpu_tmp0
);
1606 tcg_gen_shr_tl(t0
, t0
, cpu_tmp0
);
1608 tcg_gen_or_tl(t0
, t0
, cpu_tmp4
);
1610 gen_set_label(label1
);
1612 if (op1
== OR_TMP0
) {
1613 gen_op_st_v(ot
+ s
->mem_index
, t0
, a0
);
1615 gen_op_mov_reg_v(ot
, op1
, t0
);
1619 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1620 gen_op_set_cc_op(s
->cc_op
);
1622 label2
= gen_new_label();
1623 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, label2
);
1625 gen_compute_eflags(cpu_cc_src
);
1626 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~(CC_O
| CC_C
));
1627 tcg_gen_xor_tl(cpu_tmp0
, t2
, t0
);
1628 tcg_gen_lshift(cpu_tmp0
, cpu_tmp0
, 11 - (data_bits
- 1));
1629 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, CC_O
);
1630 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
1632 tcg_gen_shri_tl(t0
, t0
, data_bits
- 1);
1634 tcg_gen_andi_tl(t0
, t0
, CC_C
);
1635 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, t0
);
1637 tcg_gen_discard_tl(cpu_cc_dst
);
1638 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_EFLAGS
);
1640 gen_set_label(label2
);
1641 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1649 static void *helper_rotc
[8] = {
1653 X86_64_ONLY(helper_rclq
),
1657 X86_64_ONLY(helper_rcrq
),
1660 /* XXX: add faster immediate = 1 case */
1661 static void gen_rotc_rm_T1(DisasContext
*s
, int ot
, int op1
,
1666 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1667 gen_op_set_cc_op(s
->cc_op
);
1671 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1673 gen_op_mov_TN_reg(ot
, 0, op1
);
1675 tcg_gen_helper_1_2(helper_rotc
[ot
+ (is_right
* 4)],
1676 cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1679 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1681 gen_op_mov_reg_T0(ot
, op1
);
1684 label1
= gen_new_label();
1685 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cc_tmp
, -1, label1
);
1687 tcg_gen_mov_tl(cpu_cc_src
, cpu_cc_tmp
);
1688 tcg_gen_discard_tl(cpu_cc_dst
);
1689 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_EFLAGS
);
1691 gen_set_label(label1
);
1692 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1695 /* XXX: add faster immediate case */
1696 static void gen_shiftd_rm_T1_T3(DisasContext
*s
, int ot
, int op1
,
1699 int label1
, label2
, data_bits
;
1701 TCGv t0
, t1
, t2
, a0
;
1703 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
1704 t1
= tcg_temp_local_new(TCG_TYPE_TL
);
1705 t2
= tcg_temp_local_new(TCG_TYPE_TL
);
1706 a0
= tcg_temp_local_new(TCG_TYPE_TL
);
1714 if (op1
== OR_TMP0
) {
1715 tcg_gen_mov_tl(a0
, cpu_A0
);
1716 gen_op_ld_v(ot
+ s
->mem_index
, t0
, a0
);
1718 gen_op_mov_v_reg(ot
, t0
, op1
);
1721 tcg_gen_andi_tl(cpu_T3
, cpu_T3
, mask
);
1723 tcg_gen_mov_tl(t1
, cpu_T
[1]);
1724 tcg_gen_mov_tl(t2
, cpu_T3
);
1726 /* Must test zero case to avoid using undefined behaviour in TCG
1728 label1
= gen_new_label();
1729 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, label1
);
1731 tcg_gen_addi_tl(cpu_tmp5
, t2
, -1);
1732 if (ot
== OT_WORD
) {
1733 /* Note: we implement the Intel behaviour for shift count > 16 */
1735 tcg_gen_andi_tl(t0
, t0
, 0xffff);
1736 tcg_gen_shli_tl(cpu_tmp0
, t1
, 16);
1737 tcg_gen_or_tl(t0
, t0
, cpu_tmp0
);
1738 tcg_gen_ext32u_tl(t0
, t0
);
1740 tcg_gen_shr_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1742 /* only needed if count > 16, but a test would complicate */
1743 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(32), t2
);
1744 tcg_gen_shl_tl(cpu_tmp0
, t0
, cpu_tmp5
);
1746 tcg_gen_shr_tl(t0
, t0
, t2
);
1748 tcg_gen_or_tl(t0
, t0
, cpu_tmp0
);
1750 /* XXX: not optimal */
1751 tcg_gen_andi_tl(t0
, t0
, 0xffff);
1752 tcg_gen_shli_tl(t1
, t1
, 16);
1753 tcg_gen_or_tl(t1
, t1
, t0
);
1754 tcg_gen_ext32u_tl(t1
, t1
);
1756 tcg_gen_shl_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1757 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(32), cpu_tmp5
);
1758 tcg_gen_shr_tl(cpu_tmp6
, t1
, cpu_tmp0
);
1759 tcg_gen_or_tl(cpu_tmp4
, cpu_tmp4
, cpu_tmp6
);
1761 tcg_gen_shl_tl(t0
, t0
, t2
);
1762 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(32), t2
);
1763 tcg_gen_shr_tl(t1
, t1
, cpu_tmp5
);
1764 tcg_gen_or_tl(t0
, t0
, t1
);
1767 data_bits
= 8 << ot
;
1770 tcg_gen_ext32u_tl(t0
, t0
);
1772 tcg_gen_shr_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1774 tcg_gen_shr_tl(t0
, t0
, t2
);
1775 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(data_bits
), t2
);
1776 tcg_gen_shl_tl(t1
, t1
, cpu_tmp5
);
1777 tcg_gen_or_tl(t0
, t0
, t1
);
1781 tcg_gen_ext32u_tl(t1
, t1
);
1783 tcg_gen_shl_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1785 tcg_gen_shl_tl(t0
, t0
, t2
);
1786 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(data_bits
), t2
);
1787 tcg_gen_shr_tl(t1
, t1
, cpu_tmp5
);
1788 tcg_gen_or_tl(t0
, t0
, t1
);
1791 tcg_gen_mov_tl(t1
, cpu_tmp4
);
1793 gen_set_label(label1
);
1795 if (op1
== OR_TMP0
) {
1796 gen_op_st_v(ot
+ s
->mem_index
, t0
, a0
);
1798 gen_op_mov_reg_v(ot
, op1
, t0
);
1802 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1803 gen_op_set_cc_op(s
->cc_op
);
1805 label2
= gen_new_label();
1806 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, label2
);
1808 tcg_gen_mov_tl(cpu_cc_src
, t1
);
1809 tcg_gen_mov_tl(cpu_cc_dst
, t0
);
1811 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SARB
+ ot
);
1813 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SHLB
+ ot
);
1815 gen_set_label(label2
);
1816 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1824 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1827 gen_op_mov_TN_reg(ot
, 1, s
);
1830 gen_rot_rm_T1(s1
, ot
, d
, 0);
1833 gen_rot_rm_T1(s1
, ot
, d
, 1);
1837 gen_shift_rm_T1(s1
, ot
, d
, 0, 0);
1840 gen_shift_rm_T1(s1
, ot
, d
, 1, 0);
1843 gen_shift_rm_T1(s1
, ot
, d
, 1, 1);
1846 gen_rotc_rm_T1(s1
, ot
, d
, 0);
1849 gen_rotc_rm_T1(s1
, ot
, d
, 1);
1854 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1859 gen_shift_rm_im(s1
, ot
, d
, c
, 0, 0);
1862 gen_shift_rm_im(s1
, ot
, d
, c
, 1, 0);
1865 gen_shift_rm_im(s1
, ot
, d
, c
, 1, 1);
1868 /* currently not optimized */
1869 gen_op_movl_T1_im(c
);
1870 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1875 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1883 int mod
, rm
, code
, override
, must_add_seg
;
1885 override
= s
->override
;
1886 must_add_seg
= s
->addseg
;
1889 mod
= (modrm
>> 6) & 3;
1901 code
= ldub_code(s
->pc
++);
1902 scale
= (code
>> 6) & 3;
1903 index
= ((code
>> 3) & 7) | REX_X(s
);
1910 if ((base
& 7) == 5) {
1912 disp
= (int32_t)ldl_code(s
->pc
);
1914 if (CODE64(s
) && !havesib
) {
1915 disp
+= s
->pc
+ s
->rip_offset
;
1922 disp
= (int8_t)ldub_code(s
->pc
++);
1926 disp
= ldl_code(s
->pc
);
1932 /* for correct popl handling with esp */
1933 if (base
== 4 && s
->popl_esp_hack
)
1934 disp
+= s
->popl_esp_hack
;
1935 #ifdef TARGET_X86_64
1936 if (s
->aflag
== 2) {
1937 gen_op_movq_A0_reg(base
);
1939 gen_op_addq_A0_im(disp
);
1944 gen_op_movl_A0_reg(base
);
1946 gen_op_addl_A0_im(disp
);
1949 #ifdef TARGET_X86_64
1950 if (s
->aflag
== 2) {
1951 gen_op_movq_A0_im(disp
);
1955 gen_op_movl_A0_im(disp
);
1958 /* XXX: index == 4 is always invalid */
1959 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1960 #ifdef TARGET_X86_64
1961 if (s
->aflag
== 2) {
1962 gen_op_addq_A0_reg_sN(scale
, index
);
1966 gen_op_addl_A0_reg_sN(scale
, index
);
1971 if (base
== R_EBP
|| base
== R_ESP
)
1976 #ifdef TARGET_X86_64
1977 if (s
->aflag
== 2) {
1978 gen_op_addq_A0_seg(override
);
1982 gen_op_addl_A0_seg(override
);
1989 disp
= lduw_code(s
->pc
);
1991 gen_op_movl_A0_im(disp
);
1992 rm
= 0; /* avoid SS override */
1999 disp
= (int8_t)ldub_code(s
->pc
++);
2003 disp
= lduw_code(s
->pc
);
2009 gen_op_movl_A0_reg(R_EBX
);
2010 gen_op_addl_A0_reg_sN(0, R_ESI
);
2013 gen_op_movl_A0_reg(R_EBX
);
2014 gen_op_addl_A0_reg_sN(0, R_EDI
);
2017 gen_op_movl_A0_reg(R_EBP
);
2018 gen_op_addl_A0_reg_sN(0, R_ESI
);
2021 gen_op_movl_A0_reg(R_EBP
);
2022 gen_op_addl_A0_reg_sN(0, R_EDI
);
2025 gen_op_movl_A0_reg(R_ESI
);
2028 gen_op_movl_A0_reg(R_EDI
);
2031 gen_op_movl_A0_reg(R_EBP
);
2035 gen_op_movl_A0_reg(R_EBX
);
2039 gen_op_addl_A0_im(disp
);
2040 gen_op_andl_A0_ffff();
2044 if (rm
== 2 || rm
== 3 || rm
== 6)
2049 gen_op_addl_A0_seg(override
);
2059 static void gen_nop_modrm(DisasContext
*s
, int modrm
)
2061 int mod
, rm
, base
, code
;
2063 mod
= (modrm
>> 6) & 3;
2073 code
= ldub_code(s
->pc
++);
2109 /* used for LEA and MOV AX, mem */
2110 static void gen_add_A0_ds_seg(DisasContext
*s
)
2112 int override
, must_add_seg
;
2113 must_add_seg
= s
->addseg
;
2115 if (s
->override
>= 0) {
2116 override
= s
->override
;
2122 #ifdef TARGET_X86_64
2124 gen_op_addq_A0_seg(override
);
2128 gen_op_addl_A0_seg(override
);
2133 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
2135 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
2137 int mod
, rm
, opreg
, disp
;
2139 mod
= (modrm
>> 6) & 3;
2140 rm
= (modrm
& 7) | REX_B(s
);
2144 gen_op_mov_TN_reg(ot
, 0, reg
);
2145 gen_op_mov_reg_T0(ot
, rm
);
2147 gen_op_mov_TN_reg(ot
, 0, rm
);
2149 gen_op_mov_reg_T0(ot
, reg
);
2152 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
2155 gen_op_mov_TN_reg(ot
, 0, reg
);
2156 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2158 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
2160 gen_op_mov_reg_T0(ot
, reg
);
2165 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
2171 ret
= ldub_code(s
->pc
);
2175 ret
= lduw_code(s
->pc
);
2180 ret
= ldl_code(s
->pc
);
2187 static inline int insn_const_size(unsigned int ot
)
2195 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong eip
)
2197 TranslationBlock
*tb
;
2200 pc
= s
->cs_base
+ eip
;
2202 /* NOTE: we handle the case where the TB spans two pages here */
2203 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
2204 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
2205 /* jump to same page: we can use a direct jump */
2206 tcg_gen_goto_tb(tb_num
);
2208 tcg_gen_exit_tb((long)tb
+ tb_num
);
2210 /* jump to another page: currently not optimized */
2216 static inline void gen_jcc(DisasContext
*s
, int b
,
2217 target_ulong val
, target_ulong next_eip
)
2222 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2223 gen_op_set_cc_op(s
->cc_op
);
2224 s
->cc_op
= CC_OP_DYNAMIC
;
2227 l1
= gen_new_label();
2228 gen_jcc1(s
, cc_op
, b
, l1
);
2230 gen_goto_tb(s
, 0, next_eip
);
2233 gen_goto_tb(s
, 1, val
);
2237 l1
= gen_new_label();
2238 l2
= gen_new_label();
2239 gen_jcc1(s
, cc_op
, b
, l1
);
2241 gen_jmp_im(next_eip
);
2251 static void gen_setcc(DisasContext
*s
, int b
)
2253 int inv
, jcc_op
, l1
;
2256 if (is_fast_jcc_case(s
, b
)) {
2257 /* nominal case: we use a jump */
2258 /* XXX: make it faster by adding new instructions in TCG */
2259 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
2260 tcg_gen_movi_tl(t0
, 0);
2261 l1
= gen_new_label();
2262 gen_jcc1(s
, s
->cc_op
, b
^ 1, l1
);
2263 tcg_gen_movi_tl(t0
, 1);
2265 tcg_gen_mov_tl(cpu_T
[0], t0
);
2268 /* slow case: it is more efficient not to generate a jump,
2269 although it is questionnable whether this optimization is
2272 jcc_op
= (b
>> 1) & 7;
2273 gen_setcc_slow_T0(s
, jcc_op
);
2275 tcg_gen_xori_tl(cpu_T
[0], cpu_T
[0], 1);
2280 static inline void gen_op_movl_T0_seg(int seg_reg
)
2282 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
2283 offsetof(CPUX86State
,segs
[seg_reg
].selector
));
2286 static inline void gen_op_movl_seg_T0_vm(int seg_reg
)
2288 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
2289 tcg_gen_st32_tl(cpu_T
[0], cpu_env
,
2290 offsetof(CPUX86State
,segs
[seg_reg
].selector
));
2291 tcg_gen_shli_tl(cpu_T
[0], cpu_T
[0], 4);
2292 tcg_gen_st_tl(cpu_T
[0], cpu_env
,
2293 offsetof(CPUX86State
,segs
[seg_reg
].base
));
2296 /* move T0 to seg_reg and compute if the CPU state may change. Never
2297 call this function with seg_reg == R_CS */
2298 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
2300 if (s
->pe
&& !s
->vm86
) {
2301 /* XXX: optimize by finding processor state dynamically */
2302 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2303 gen_op_set_cc_op(s
->cc_op
);
2304 gen_jmp_im(cur_eip
);
2305 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
2306 tcg_gen_helper_0_2(helper_load_seg
, tcg_const_i32(seg_reg
), cpu_tmp2_i32
);
2307 /* abort translation because the addseg value may change or
2308 because ss32 may change. For R_SS, translation must always
2309 stop as a special handling must be done to disable hardware
2310 interrupts for the next instruction */
2311 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
2314 gen_op_movl_seg_T0_vm(seg_reg
);
2315 if (seg_reg
== R_SS
)
2320 static inline int svm_is_rep(int prefixes
)
2322 return ((prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) ? 8 : 0);
2326 gen_svm_check_intercept_param(DisasContext
*s
, target_ulong pc_start
,
2327 uint32_t type
, uint64_t param
)
2329 /* no SVM activated; fast case */
2330 if (likely(!(s
->flags
& HF_SVMI_MASK
)))
2332 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2333 gen_op_set_cc_op(s
->cc_op
);
2334 gen_jmp_im(pc_start
- s
->cs_base
);
2335 tcg_gen_helper_0_2(helper_svm_check_intercept_param
,
2336 tcg_const_i32(type
), tcg_const_i64(param
));
2340 gen_svm_check_intercept(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
2342 gen_svm_check_intercept_param(s
, pc_start
, type
, 0);
2345 static inline void gen_stack_update(DisasContext
*s
, int addend
)
2347 #ifdef TARGET_X86_64
2349 gen_op_add_reg_im(2, R_ESP
, addend
);
2353 gen_op_add_reg_im(1, R_ESP
, addend
);
2355 gen_op_add_reg_im(0, R_ESP
, addend
);
2359 /* generate a push. It depends on ss32, addseg and dflag */
2360 static void gen_push_T0(DisasContext
*s
)
2362 #ifdef TARGET_X86_64
2364 gen_op_movq_A0_reg(R_ESP
);
2366 gen_op_addq_A0_im(-8);
2367 gen_op_st_T0_A0(OT_QUAD
+ s
->mem_index
);
2369 gen_op_addq_A0_im(-2);
2370 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2372 gen_op_mov_reg_A0(2, R_ESP
);
2376 gen_op_movl_A0_reg(R_ESP
);
2378 gen_op_addl_A0_im(-2);
2380 gen_op_addl_A0_im(-4);
2383 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2384 gen_op_addl_A0_seg(R_SS
);
2387 gen_op_andl_A0_ffff();
2388 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2389 gen_op_addl_A0_seg(R_SS
);
2391 gen_op_st_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2392 if (s
->ss32
&& !s
->addseg
)
2393 gen_op_mov_reg_A0(1, R_ESP
);
2395 gen_op_mov_reg_T1(s
->ss32
+ 1, R_ESP
);
2399 /* generate a push. It depends on ss32, addseg and dflag */
2400 /* slower version for T1, only used for call Ev */
2401 static void gen_push_T1(DisasContext
*s
)
2403 #ifdef TARGET_X86_64
2405 gen_op_movq_A0_reg(R_ESP
);
2407 gen_op_addq_A0_im(-8);
2408 gen_op_st_T1_A0(OT_QUAD
+ s
->mem_index
);
2410 gen_op_addq_A0_im(-2);
2411 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2413 gen_op_mov_reg_A0(2, R_ESP
);
2417 gen_op_movl_A0_reg(R_ESP
);
2419 gen_op_addl_A0_im(-2);
2421 gen_op_addl_A0_im(-4);
2424 gen_op_addl_A0_seg(R_SS
);
2427 gen_op_andl_A0_ffff();
2428 gen_op_addl_A0_seg(R_SS
);
2430 gen_op_st_T1_A0(s
->dflag
+ 1 + s
->mem_index
);
2432 if (s
->ss32
&& !s
->addseg
)
2433 gen_op_mov_reg_A0(1, R_ESP
);
2435 gen_stack_update(s
, (-2) << s
->dflag
);
2439 /* two step pop is necessary for precise exceptions */
2440 static void gen_pop_T0(DisasContext
*s
)
2442 #ifdef TARGET_X86_64
2444 gen_op_movq_A0_reg(R_ESP
);
2445 gen_op_ld_T0_A0((s
->dflag
? OT_QUAD
: OT_WORD
) + s
->mem_index
);
2449 gen_op_movl_A0_reg(R_ESP
);
2452 gen_op_addl_A0_seg(R_SS
);
2454 gen_op_andl_A0_ffff();
2455 gen_op_addl_A0_seg(R_SS
);
2457 gen_op_ld_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2461 static void gen_pop_update(DisasContext
*s
)
2463 #ifdef TARGET_X86_64
2464 if (CODE64(s
) && s
->dflag
) {
2465 gen_stack_update(s
, 8);
2469 gen_stack_update(s
, 2 << s
->dflag
);
2473 static void gen_stack_A0(DisasContext
*s
)
2475 gen_op_movl_A0_reg(R_ESP
);
2477 gen_op_andl_A0_ffff();
2478 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2480 gen_op_addl_A0_seg(R_SS
);
2483 /* NOTE: wrap around in 16 bit not fully handled */
2484 static void gen_pusha(DisasContext
*s
)
2487 gen_op_movl_A0_reg(R_ESP
);
2488 gen_op_addl_A0_im(-16 << s
->dflag
);
2490 gen_op_andl_A0_ffff();
2491 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2493 gen_op_addl_A0_seg(R_SS
);
2494 for(i
= 0;i
< 8; i
++) {
2495 gen_op_mov_TN_reg(OT_LONG
, 0, 7 - i
);
2496 gen_op_st_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2497 gen_op_addl_A0_im(2 << s
->dflag
);
2499 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2502 /* NOTE: wrap around in 16 bit not fully handled */
2503 static void gen_popa(DisasContext
*s
)
2506 gen_op_movl_A0_reg(R_ESP
);
2508 gen_op_andl_A0_ffff();
2509 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2510 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], 16 << s
->dflag
);
2512 gen_op_addl_A0_seg(R_SS
);
2513 for(i
= 0;i
< 8; i
++) {
2514 /* ESP is not reloaded */
2516 gen_op_ld_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2517 gen_op_mov_reg_T0(OT_WORD
+ s
->dflag
, 7 - i
);
2519 gen_op_addl_A0_im(2 << s
->dflag
);
2521 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2524 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2529 #ifdef TARGET_X86_64
2531 ot
= s
->dflag
? OT_QUAD
: OT_WORD
;
2534 gen_op_movl_A0_reg(R_ESP
);
2535 gen_op_addq_A0_im(-opsize
);
2536 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2539 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2540 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2542 /* XXX: must save state */
2543 tcg_gen_helper_0_3(helper_enter64_level
,
2544 tcg_const_i32(level
),
2545 tcg_const_i32((ot
== OT_QUAD
)),
2548 gen_op_mov_reg_T1(ot
, R_EBP
);
2549 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], -esp_addend
+ (-opsize
* level
));
2550 gen_op_mov_reg_T1(OT_QUAD
, R_ESP
);
2554 ot
= s
->dflag
+ OT_WORD
;
2555 opsize
= 2 << s
->dflag
;
2557 gen_op_movl_A0_reg(R_ESP
);
2558 gen_op_addl_A0_im(-opsize
);
2560 gen_op_andl_A0_ffff();
2561 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2563 gen_op_addl_A0_seg(R_SS
);
2565 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2566 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2568 /* XXX: must save state */
2569 tcg_gen_helper_0_3(helper_enter_level
,
2570 tcg_const_i32(level
),
2571 tcg_const_i32(s
->dflag
),
2574 gen_op_mov_reg_T1(ot
, R_EBP
);
2575 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], -esp_addend
+ (-opsize
* level
));
2576 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2580 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2582 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2583 gen_op_set_cc_op(s
->cc_op
);
2584 gen_jmp_im(cur_eip
);
2585 tcg_gen_helper_0_1(helper_raise_exception
, tcg_const_i32(trapno
));
2589 /* an interrupt is different from an exception because of the
2591 static void gen_interrupt(DisasContext
*s
, int intno
,
2592 target_ulong cur_eip
, target_ulong next_eip
)
2594 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2595 gen_op_set_cc_op(s
->cc_op
);
2596 gen_jmp_im(cur_eip
);
2597 tcg_gen_helper_0_2(helper_raise_interrupt
,
2598 tcg_const_i32(intno
),
2599 tcg_const_i32(next_eip
- cur_eip
));
2603 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2605 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2606 gen_op_set_cc_op(s
->cc_op
);
2607 gen_jmp_im(cur_eip
);
2608 tcg_gen_helper_0_0(helper_debug
);
2612 /* generate a generic end of block. Trace exception is also generated
2614 static void gen_eob(DisasContext
*s
)
2616 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2617 gen_op_set_cc_op(s
->cc_op
);
2618 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2619 tcg_gen_helper_0_0(helper_reset_inhibit_irq
);
2621 if (s
->singlestep_enabled
) {
2622 tcg_gen_helper_0_0(helper_debug
);
2624 tcg_gen_helper_0_0(helper_single_step
);
2631 /* generate a jump to eip. No segment change must happen before as a
2632 direct call to the next block may occur */
2633 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2636 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2637 gen_op_set_cc_op(s
->cc_op
);
2638 s
->cc_op
= CC_OP_DYNAMIC
;
2640 gen_goto_tb(s
, tb_num
, eip
);
2648 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2650 gen_jmp_tb(s
, eip
, 0);
2653 static inline void gen_ldq_env_A0(int idx
, int offset
)
2655 int mem_index
= (idx
>> 2) - 1;
2656 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2657 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
);
2660 static inline void gen_stq_env_A0(int idx
, int offset
)
2662 int mem_index
= (idx
>> 2) - 1;
2663 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
);
2664 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2667 static inline void gen_ldo_env_A0(int idx
, int offset
)
2669 int mem_index
= (idx
>> 2) - 1;
2670 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2671 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2672 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2673 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_tmp0
, mem_index
);
2674 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2677 static inline void gen_sto_env_A0(int idx
, int offset
)
2679 int mem_index
= (idx
>> 2) - 1;
2680 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2681 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2682 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2683 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2684 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_tmp0
, mem_index
);
2687 static inline void gen_op_movo(int d_offset
, int s_offset
)
2689 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
);
2690 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2691 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
+ 8);
2692 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
+ 8);
2695 static inline void gen_op_movq(int d_offset
, int s_offset
)
2697 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
);
2698 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2701 static inline void gen_op_movl(int d_offset
, int s_offset
)
2703 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_env
, s_offset
);
2704 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, d_offset
);
2707 static inline void gen_op_movq_env_0(int d_offset
)
2709 tcg_gen_movi_i64(cpu_tmp1_i64
, 0);
2710 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2713 #define SSE_SPECIAL ((void *)1)
2714 #define SSE_DUMMY ((void *)2)
2716 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2717 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2718 helper_ ## x ## ss, helper_ ## x ## sd, }
2720 static void *sse_op_table1
[256][4] = {
2721 /* 3DNow! extensions */
2722 [0x0e] = { SSE_DUMMY
}, /* femms */
2723 [0x0f] = { SSE_DUMMY
}, /* pf... */
2724 /* pure SSE operations */
2725 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2726 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2727 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd, movsldup, movddup */
2728 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2729 [0x14] = { helper_punpckldq_xmm
, helper_punpcklqdq_xmm
},
2730 [0x15] = { helper_punpckhdq_xmm
, helper_punpckhqdq_xmm
},
2731 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2732 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2734 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2735 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2736 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2737 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2738 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2739 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2740 [0x2e] = { helper_ucomiss
, helper_ucomisd
},
2741 [0x2f] = { helper_comiss
, helper_comisd
},
2742 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2743 [0x51] = SSE_FOP(sqrt
),
2744 [0x52] = { helper_rsqrtps
, NULL
, helper_rsqrtss
, NULL
},
2745 [0x53] = { helper_rcpps
, NULL
, helper_rcpss
, NULL
},
2746 [0x54] = { helper_pand_xmm
, helper_pand_xmm
}, /* andps, andpd */
2747 [0x55] = { helper_pandn_xmm
, helper_pandn_xmm
}, /* andnps, andnpd */
2748 [0x56] = { helper_por_xmm
, helper_por_xmm
}, /* orps, orpd */
2749 [0x57] = { helper_pxor_xmm
, helper_pxor_xmm
}, /* xorps, xorpd */
2750 [0x58] = SSE_FOP(add
),
2751 [0x59] = SSE_FOP(mul
),
2752 [0x5a] = { helper_cvtps2pd
, helper_cvtpd2ps
,
2753 helper_cvtss2sd
, helper_cvtsd2ss
},
2754 [0x5b] = { helper_cvtdq2ps
, helper_cvtps2dq
, helper_cvttps2dq
},
2755 [0x5c] = SSE_FOP(sub
),
2756 [0x5d] = SSE_FOP(min
),
2757 [0x5e] = SSE_FOP(div
),
2758 [0x5f] = SSE_FOP(max
),
2760 [0xc2] = SSE_FOP(cmpeq
),
2761 [0xc6] = { helper_shufps
, helper_shufpd
},
2763 /* MMX ops and their SSE extensions */
2764 [0x60] = MMX_OP2(punpcklbw
),
2765 [0x61] = MMX_OP2(punpcklwd
),
2766 [0x62] = MMX_OP2(punpckldq
),
2767 [0x63] = MMX_OP2(packsswb
),
2768 [0x64] = MMX_OP2(pcmpgtb
),
2769 [0x65] = MMX_OP2(pcmpgtw
),
2770 [0x66] = MMX_OP2(pcmpgtl
),
2771 [0x67] = MMX_OP2(packuswb
),
2772 [0x68] = MMX_OP2(punpckhbw
),
2773 [0x69] = MMX_OP2(punpckhwd
),
2774 [0x6a] = MMX_OP2(punpckhdq
),
2775 [0x6b] = MMX_OP2(packssdw
),
2776 [0x6c] = { NULL
, helper_punpcklqdq_xmm
},
2777 [0x6d] = { NULL
, helper_punpckhqdq_xmm
},
2778 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2779 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2780 [0x70] = { helper_pshufw_mmx
,
2783 helper_pshuflw_xmm
},
2784 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2785 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2786 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2787 [0x74] = MMX_OP2(pcmpeqb
),
2788 [0x75] = MMX_OP2(pcmpeqw
),
2789 [0x76] = MMX_OP2(pcmpeql
),
2790 [0x77] = { SSE_DUMMY
}, /* emms */
2791 [0x7c] = { NULL
, helper_haddpd
, NULL
, helper_haddps
},
2792 [0x7d] = { NULL
, helper_hsubpd
, NULL
, helper_hsubps
},
2793 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2794 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2795 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2796 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2797 [0xd0] = { NULL
, helper_addsubpd
, NULL
, helper_addsubps
},
2798 [0xd1] = MMX_OP2(psrlw
),
2799 [0xd2] = MMX_OP2(psrld
),
2800 [0xd3] = MMX_OP2(psrlq
),
2801 [0xd4] = MMX_OP2(paddq
),
2802 [0xd5] = MMX_OP2(pmullw
),
2803 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2804 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2805 [0xd8] = MMX_OP2(psubusb
),
2806 [0xd9] = MMX_OP2(psubusw
),
2807 [0xda] = MMX_OP2(pminub
),
2808 [0xdb] = MMX_OP2(pand
),
2809 [0xdc] = MMX_OP2(paddusb
),
2810 [0xdd] = MMX_OP2(paddusw
),
2811 [0xde] = MMX_OP2(pmaxub
),
2812 [0xdf] = MMX_OP2(pandn
),
2813 [0xe0] = MMX_OP2(pavgb
),
2814 [0xe1] = MMX_OP2(psraw
),
2815 [0xe2] = MMX_OP2(psrad
),
2816 [0xe3] = MMX_OP2(pavgw
),
2817 [0xe4] = MMX_OP2(pmulhuw
),
2818 [0xe5] = MMX_OP2(pmulhw
),
2819 [0xe6] = { NULL
, helper_cvttpd2dq
, helper_cvtdq2pd
, helper_cvtpd2dq
},
2820 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2821 [0xe8] = MMX_OP2(psubsb
),
2822 [0xe9] = MMX_OP2(psubsw
),
2823 [0xea] = MMX_OP2(pminsw
),
2824 [0xeb] = MMX_OP2(por
),
2825 [0xec] = MMX_OP2(paddsb
),
2826 [0xed] = MMX_OP2(paddsw
),
2827 [0xee] = MMX_OP2(pmaxsw
),
2828 [0xef] = MMX_OP2(pxor
),
2829 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu */
2830 [0xf1] = MMX_OP2(psllw
),
2831 [0xf2] = MMX_OP2(pslld
),
2832 [0xf3] = MMX_OP2(psllq
),
2833 [0xf4] = MMX_OP2(pmuludq
),
2834 [0xf5] = MMX_OP2(pmaddwd
),
2835 [0xf6] = MMX_OP2(psadbw
),
2836 [0xf7] = MMX_OP2(maskmov
),
2837 [0xf8] = MMX_OP2(psubb
),
2838 [0xf9] = MMX_OP2(psubw
),
2839 [0xfa] = MMX_OP2(psubl
),
2840 [0xfb] = MMX_OP2(psubq
),
2841 [0xfc] = MMX_OP2(paddb
),
2842 [0xfd] = MMX_OP2(paddw
),
2843 [0xfe] = MMX_OP2(paddl
),
2846 static void *sse_op_table2
[3 * 8][2] = {
2847 [0 + 2] = MMX_OP2(psrlw
),
2848 [0 + 4] = MMX_OP2(psraw
),
2849 [0 + 6] = MMX_OP2(psllw
),
2850 [8 + 2] = MMX_OP2(psrld
),
2851 [8 + 4] = MMX_OP2(psrad
),
2852 [8 + 6] = MMX_OP2(pslld
),
2853 [16 + 2] = MMX_OP2(psrlq
),
2854 [16 + 3] = { NULL
, helper_psrldq_xmm
},
2855 [16 + 6] = MMX_OP2(psllq
),
2856 [16 + 7] = { NULL
, helper_pslldq_xmm
},
2859 static void *sse_op_table3
[4 * 3] = {
2862 X86_64_ONLY(helper_cvtsq2ss
),
2863 X86_64_ONLY(helper_cvtsq2sd
),
2867 X86_64_ONLY(helper_cvttss2sq
),
2868 X86_64_ONLY(helper_cvttsd2sq
),
2872 X86_64_ONLY(helper_cvtss2sq
),
2873 X86_64_ONLY(helper_cvtsd2sq
),
2876 static void *sse_op_table4
[8][4] = {
2887 static void *sse_op_table5
[256] = {
2888 [0x0c] = helper_pi2fw
,
2889 [0x0d] = helper_pi2fd
,
2890 [0x1c] = helper_pf2iw
,
2891 [0x1d] = helper_pf2id
,
2892 [0x8a] = helper_pfnacc
,
2893 [0x8e] = helper_pfpnacc
,
2894 [0x90] = helper_pfcmpge
,
2895 [0x94] = helper_pfmin
,
2896 [0x96] = helper_pfrcp
,
2897 [0x97] = helper_pfrsqrt
,
2898 [0x9a] = helper_pfsub
,
2899 [0x9e] = helper_pfadd
,
2900 [0xa0] = helper_pfcmpgt
,
2901 [0xa4] = helper_pfmax
,
2902 [0xa6] = helper_movq
, /* pfrcpit1; no need to actually increase precision */
2903 [0xa7] = helper_movq
, /* pfrsqit1 */
2904 [0xaa] = helper_pfsubr
,
2905 [0xae] = helper_pfacc
,
2906 [0xb0] = helper_pfcmpeq
,
2907 [0xb4] = helper_pfmul
,
2908 [0xb6] = helper_movq
, /* pfrcpit2 */
2909 [0xb7] = helper_pmulhrw_mmx
,
2910 [0xbb] = helper_pswapd
,
2911 [0xbf] = helper_pavgb_mmx
/* pavgusb */
2914 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
2916 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
2917 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
2921 if (s
->prefix
& PREFIX_DATA
)
2923 else if (s
->prefix
& PREFIX_REPZ
)
2925 else if (s
->prefix
& PREFIX_REPNZ
)
2929 sse_op2
= sse_op_table1
[b
][b1
];
2932 if ((b
<= 0x5f && b
>= 0x10) || b
== 0xc6 || b
== 0xc2) {
2942 /* simple MMX/SSE operation */
2943 if (s
->flags
& HF_TS_MASK
) {
2944 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
2947 if (s
->flags
& HF_EM_MASK
) {
2949 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
2952 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
2955 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
2958 tcg_gen_helper_0_0(helper_emms
);
2963 tcg_gen_helper_0_0(helper_emms
);
2966 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2967 the static cpu state) */
2969 tcg_gen_helper_0_0(helper_enter_mmx
);
2972 modrm
= ldub_code(s
->pc
++);
2973 reg
= ((modrm
>> 3) & 7);
2976 mod
= (modrm
>> 6) & 3;
2977 if (sse_op2
== SSE_SPECIAL
) {
2980 case 0x0e7: /* movntq */
2983 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2984 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2986 case 0x1e7: /* movntdq */
2987 case 0x02b: /* movntps */
2988 case 0x12b: /* movntps */
2989 case 0x3f0: /* lddqu */
2992 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2993 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
2995 case 0x6e: /* movd mm, ea */
2996 #ifdef TARGET_X86_64
2997 if (s
->dflag
== 2) {
2998 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2999 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3003 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
3004 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3005 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3006 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx
, cpu_ptr0
, cpu_T
[0]);
3009 case 0x16e: /* movd xmm, ea */
3010 #ifdef TARGET_X86_64
3011 if (s
->dflag
== 2) {
3012 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
3013 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3014 offsetof(CPUX86State
,xmm_regs
[reg
]));
3015 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm
, cpu_ptr0
, cpu_T
[0]);
3019 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
3020 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3021 offsetof(CPUX86State
,xmm_regs
[reg
]));
3022 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3023 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm
, cpu_ptr0
, cpu_tmp2_i32
);
3026 case 0x6f: /* movq mm, ea */
3028 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3029 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3032 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
,
3033 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3034 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
,
3035 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3038 case 0x010: /* movups */
3039 case 0x110: /* movupd */
3040 case 0x028: /* movaps */
3041 case 0x128: /* movapd */
3042 case 0x16f: /* movdqa xmm, ea */
3043 case 0x26f: /* movdqu xmm, ea */
3045 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3046 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3048 rm
= (modrm
& 7) | REX_B(s
);
3049 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
3050 offsetof(CPUX86State
,xmm_regs
[rm
]));
3053 case 0x210: /* movss xmm, ea */
3055 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3056 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3057 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3059 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
3060 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3061 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3063 rm
= (modrm
& 7) | REX_B(s
);
3064 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3065 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
3068 case 0x310: /* movsd xmm, ea */
3070 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3071 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3073 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3074 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3076 rm
= (modrm
& 7) | REX_B(s
);
3077 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3078 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3081 case 0x012: /* movlps */
3082 case 0x112: /* movlpd */
3084 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3085 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3088 rm
= (modrm
& 7) | REX_B(s
);
3089 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3090 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3093 case 0x212: /* movsldup */
3095 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3096 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3098 rm
= (modrm
& 7) | REX_B(s
);
3099 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3100 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
3101 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
3102 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(2)));
3104 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
3105 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3106 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
3107 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3109 case 0x312: /* movddup */
3111 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3112 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3114 rm
= (modrm
& 7) | REX_B(s
);
3115 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3116 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3118 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
3119 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3121 case 0x016: /* movhps */
3122 case 0x116: /* movhpd */
3124 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3125 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3128 rm
= (modrm
& 7) | REX_B(s
);
3129 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
3130 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3133 case 0x216: /* movshdup */
3135 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3136 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3138 rm
= (modrm
& 7) | REX_B(s
);
3139 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
3140 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
3141 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
3142 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
3144 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3145 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
3146 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
3147 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3149 case 0x7e: /* movd ea, mm */
3150 #ifdef TARGET_X86_64
3151 if (s
->dflag
== 2) {
3152 tcg_gen_ld_i64(cpu_T
[0], cpu_env
,
3153 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3154 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
3158 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
3159 offsetof(CPUX86State
,fpregs
[reg
].mmx
.MMX_L(0)));
3160 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
3163 case 0x17e: /* movd ea, xmm */
3164 #ifdef TARGET_X86_64
3165 if (s
->dflag
== 2) {
3166 tcg_gen_ld_i64(cpu_T
[0], cpu_env
,
3167 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3168 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
3172 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
3173 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3174 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
3177 case 0x27e: /* movq xmm, ea */
3179 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3180 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3182 rm
= (modrm
& 7) | REX_B(s
);
3183 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3184 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3186 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3188 case 0x7f: /* movq ea, mm */
3190 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3191 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3194 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
3195 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3198 case 0x011: /* movups */
3199 case 0x111: /* movupd */
3200 case 0x029: /* movaps */
3201 case 0x129: /* movapd */
3202 case 0x17f: /* movdqa ea, xmm */
3203 case 0x27f: /* movdqu ea, xmm */
3205 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3206 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3208 rm
= (modrm
& 7) | REX_B(s
);
3209 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
3210 offsetof(CPUX86State
,xmm_regs
[reg
]));
3213 case 0x211: /* movss ea, xmm */
3215 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3216 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3217 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
3219 rm
= (modrm
& 7) | REX_B(s
);
3220 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
3221 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3224 case 0x311: /* movsd ea, xmm */
3226 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3227 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3229 rm
= (modrm
& 7) | REX_B(s
);
3230 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3231 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3234 case 0x013: /* movlps */
3235 case 0x113: /* movlpd */
3237 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3238 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3243 case 0x017: /* movhps */
3244 case 0x117: /* movhpd */
3246 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3247 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3252 case 0x71: /* shift mm, im */
3255 case 0x171: /* shift xmm, im */
3258 val
= ldub_code(s
->pc
++);
3260 gen_op_movl_T0_im(val
);
3261 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3263 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
3264 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
3266 gen_op_movl_T0_im(val
);
3267 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
3269 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
3270 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
3272 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
3276 rm
= (modrm
& 7) | REX_B(s
);
3277 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3280 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3282 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op2_offset
);
3283 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op1_offset
);
3284 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3286 case 0x050: /* movmskps */
3287 rm
= (modrm
& 7) | REX_B(s
);
3288 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3289 offsetof(CPUX86State
,xmm_regs
[rm
]));
3290 tcg_gen_helper_1_1(helper_movmskps
, cpu_tmp2_i32
, cpu_ptr0
);
3291 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3292 gen_op_mov_reg_T0(OT_LONG
, reg
);
3294 case 0x150: /* movmskpd */
3295 rm
= (modrm
& 7) | REX_B(s
);
3296 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3297 offsetof(CPUX86State
,xmm_regs
[rm
]));
3298 tcg_gen_helper_1_1(helper_movmskpd
, cpu_tmp2_i32
, cpu_ptr0
);
3299 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3300 gen_op_mov_reg_T0(OT_LONG
, reg
);
3302 case 0x02a: /* cvtpi2ps */
3303 case 0x12a: /* cvtpi2pd */
3304 tcg_gen_helper_0_0(helper_enter_mmx
);
3306 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3307 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3308 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3311 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3313 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3314 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3315 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3318 tcg_gen_helper_0_2(helper_cvtpi2ps
, cpu_ptr0
, cpu_ptr1
);
3322 tcg_gen_helper_0_2(helper_cvtpi2pd
, cpu_ptr0
, cpu_ptr1
);
3326 case 0x22a: /* cvtsi2ss */
3327 case 0x32a: /* cvtsi2sd */
3328 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3329 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3330 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3331 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3332 sse_op2
= sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)];
3333 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3334 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_tmp2_i32
);
3336 case 0x02c: /* cvttps2pi */
3337 case 0x12c: /* cvttpd2pi */
3338 case 0x02d: /* cvtps2pi */
3339 case 0x12d: /* cvtpd2pi */
3340 tcg_gen_helper_0_0(helper_enter_mmx
);
3342 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3343 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3344 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3346 rm
= (modrm
& 7) | REX_B(s
);
3347 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3349 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
3350 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3351 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3354 tcg_gen_helper_0_2(helper_cvttps2pi
, cpu_ptr0
, cpu_ptr1
);
3357 tcg_gen_helper_0_2(helper_cvttpd2pi
, cpu_ptr0
, cpu_ptr1
);
3360 tcg_gen_helper_0_2(helper_cvtps2pi
, cpu_ptr0
, cpu_ptr1
);
3363 tcg_gen_helper_0_2(helper_cvtpd2pi
, cpu_ptr0
, cpu_ptr1
);
3367 case 0x22c: /* cvttss2si */
3368 case 0x32c: /* cvttsd2si */
3369 case 0x22d: /* cvtss2si */
3370 case 0x32d: /* cvtsd2si */
3371 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3373 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3375 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_Q(0)));
3377 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3378 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3380 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3382 rm
= (modrm
& 7) | REX_B(s
);
3383 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3385 sse_op2
= sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
3387 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op2_offset
);
3388 if (ot
== OT_LONG
) {
3389 tcg_gen_helper_1_1(sse_op2
, cpu_tmp2_i32
, cpu_ptr0
);
3390 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3392 tcg_gen_helper_1_1(sse_op2
, cpu_T
[0], cpu_ptr0
);
3394 gen_op_mov_reg_T0(ot
, reg
);
3396 case 0xc4: /* pinsrw */
3399 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3400 val
= ldub_code(s
->pc
++);
3403 tcg_gen_st16_tl(cpu_T
[0], cpu_env
,
3404 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_W(val
)));
3407 tcg_gen_st16_tl(cpu_T
[0], cpu_env
,
3408 offsetof(CPUX86State
,fpregs
[reg
].mmx
.MMX_W(val
)));
3411 case 0xc5: /* pextrw */
3415 val
= ldub_code(s
->pc
++);
3418 rm
= (modrm
& 7) | REX_B(s
);
3419 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
,
3420 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_W(val
)));
3424 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
,
3425 offsetof(CPUX86State
,fpregs
[rm
].mmx
.MMX_W(val
)));
3427 reg
= ((modrm
>> 3) & 7) | rex_r
;
3428 gen_op_mov_reg_T0(OT_LONG
, reg
);
3430 case 0x1d6: /* movq ea, xmm */
3432 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3433 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3435 rm
= (modrm
& 7) | REX_B(s
);
3436 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3437 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3438 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3441 case 0x2d6: /* movq2dq */
3442 tcg_gen_helper_0_0(helper_enter_mmx
);
3444 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3445 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3446 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3448 case 0x3d6: /* movdq2q */
3449 tcg_gen_helper_0_0(helper_enter_mmx
);
3450 rm
= (modrm
& 7) | REX_B(s
);
3451 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
),
3452 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3454 case 0xd7: /* pmovmskb */
3459 rm
= (modrm
& 7) | REX_B(s
);
3460 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, offsetof(CPUX86State
,xmm_regs
[rm
]));
3461 tcg_gen_helper_1_1(helper_pmovmskb_xmm
, cpu_tmp2_i32
, cpu_ptr0
);
3464 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3465 tcg_gen_helper_1_1(helper_pmovmskb_mmx
, cpu_tmp2_i32
, cpu_ptr0
);
3467 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3468 reg
= ((modrm
>> 3) & 7) | rex_r
;
3469 gen_op_mov_reg_T0(OT_LONG
, reg
);
3475 /* generic MMX or SSE operation */
3477 case 0x70: /* pshufx insn */
3478 case 0xc6: /* pshufx insn */
3479 case 0xc2: /* compare insns */
3486 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3488 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3489 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3490 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f && b
!= 0x5b) ||
3492 /* specific case for SSE single instructions */
3495 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3496 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3499 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
3502 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3505 rm
= (modrm
& 7) | REX_B(s
);
3506 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3509 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3511 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3512 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3513 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3516 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3520 case 0x0f: /* 3DNow! data insns */
3521 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
3523 val
= ldub_code(s
->pc
++);
3524 sse_op2
= sse_op_table5
[val
];
3527 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3528 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3529 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3531 case 0x70: /* pshufx insn */
3532 case 0xc6: /* pshufx insn */
3533 val
= ldub_code(s
->pc
++);
3534 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3535 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3536 tcg_gen_helper_0_3(sse_op2
, cpu_ptr0
, cpu_ptr1
, tcg_const_i32(val
));
3540 val
= ldub_code(s
->pc
++);
3543 sse_op2
= sse_op_table4
[val
][b1
];
3544 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3545 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3546 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3549 /* maskmov : we must prepare A0 */
3552 #ifdef TARGET_X86_64
3553 if (s
->aflag
== 2) {
3554 gen_op_movq_A0_reg(R_EDI
);
3558 gen_op_movl_A0_reg(R_EDI
);
3560 gen_op_andl_A0_ffff();
3562 gen_add_A0_ds_seg(s
);
3564 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3565 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3566 tcg_gen_helper_0_3(sse_op2
, cpu_ptr0
, cpu_ptr1
, cpu_A0
);
3569 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3570 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3571 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3574 if (b
== 0x2e || b
== 0x2f) {
3575 s
->cc_op
= CC_OP_EFLAGS
;
3580 /* convert one instruction. s->is_jmp is set if the translation must
3581 be stopped. Return the next pc value */
3582 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
3584 int b
, prefixes
, aflag
, dflag
;
3586 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
3587 target_ulong next_eip
, tval
;
3590 if (unlikely(loglevel
& CPU_LOG_TB_OP
))
3591 tcg_gen_debug_insn_start(pc_start
);
3599 #ifdef TARGET_X86_64
3604 s
->rip_offset
= 0; /* for relative ip address */
3606 b
= ldub_code(s
->pc
);
3608 /* check prefixes */
3609 #ifdef TARGET_X86_64
3613 prefixes
|= PREFIX_REPZ
;
3616 prefixes
|= PREFIX_REPNZ
;
3619 prefixes
|= PREFIX_LOCK
;
3640 prefixes
|= PREFIX_DATA
;
3643 prefixes
|= PREFIX_ADR
;
3647 rex_w
= (b
>> 3) & 1;
3648 rex_r
= (b
& 0x4) << 1;
3649 s
->rex_x
= (b
& 0x2) << 2;
3650 REX_B(s
) = (b
& 0x1) << 3;
3651 x86_64_hregs
= 1; /* select uniform byte register addressing */
3655 /* 0x66 is ignored if rex.w is set */
3658 if (prefixes
& PREFIX_DATA
)
3661 if (!(prefixes
& PREFIX_ADR
))
3668 prefixes
|= PREFIX_REPZ
;
3671 prefixes
|= PREFIX_REPNZ
;
3674 prefixes
|= PREFIX_LOCK
;
3695 prefixes
|= PREFIX_DATA
;
3698 prefixes
|= PREFIX_ADR
;
3701 if (prefixes
& PREFIX_DATA
)
3703 if (prefixes
& PREFIX_ADR
)
3707 s
->prefix
= prefixes
;
3711 /* lock generation */
3712 if (prefixes
& PREFIX_LOCK
)
3713 tcg_gen_helper_0_0(helper_lock
);
3715 /* now check op code */
3719 /**************************/
3720 /* extended op code */
3721 b
= ldub_code(s
->pc
++) | 0x100;
3724 /**************************/
3742 ot
= dflag
+ OT_WORD
;
3745 case 0: /* OP Ev, Gv */
3746 modrm
= ldub_code(s
->pc
++);
3747 reg
= ((modrm
>> 3) & 7) | rex_r
;
3748 mod
= (modrm
>> 6) & 3;
3749 rm
= (modrm
& 7) | REX_B(s
);
3751 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3753 } else if (op
== OP_XORL
&& rm
== reg
) {
3755 /* xor reg, reg optimisation */
3757 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3758 gen_op_mov_reg_T0(ot
, reg
);
3759 gen_op_update1_cc();
3764 gen_op_mov_TN_reg(ot
, 1, reg
);
3765 gen_op(s
, op
, ot
, opreg
);
3767 case 1: /* OP Gv, Ev */
3768 modrm
= ldub_code(s
->pc
++);
3769 mod
= (modrm
>> 6) & 3;
3770 reg
= ((modrm
>> 3) & 7) | rex_r
;
3771 rm
= (modrm
& 7) | REX_B(s
);
3773 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3774 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3775 } else if (op
== OP_XORL
&& rm
== reg
) {
3778 gen_op_mov_TN_reg(ot
, 1, rm
);
3780 gen_op(s
, op
, ot
, reg
);
3782 case 2: /* OP A, Iv */
3783 val
= insn_get(s
, ot
);
3784 gen_op_movl_T1_im(val
);
3785 gen_op(s
, op
, ot
, OR_EAX
);
3791 case 0x80: /* GRP1 */
3801 ot
= dflag
+ OT_WORD
;
3803 modrm
= ldub_code(s
->pc
++);
3804 mod
= (modrm
>> 6) & 3;
3805 rm
= (modrm
& 7) | REX_B(s
);
3806 op
= (modrm
>> 3) & 7;
3812 s
->rip_offset
= insn_const_size(ot
);
3813 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3824 val
= insn_get(s
, ot
);
3827 val
= (int8_t)insn_get(s
, OT_BYTE
);
3830 gen_op_movl_T1_im(val
);
3831 gen_op(s
, op
, ot
, opreg
);
3835 /**************************/
3836 /* inc, dec, and other misc arith */
3837 case 0x40 ... 0x47: /* inc Gv */
3838 ot
= dflag
? OT_LONG
: OT_WORD
;
3839 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
3841 case 0x48 ... 0x4f: /* dec Gv */
3842 ot
= dflag
? OT_LONG
: OT_WORD
;
3843 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
3845 case 0xf6: /* GRP3 */
3850 ot
= dflag
+ OT_WORD
;
3852 modrm
= ldub_code(s
->pc
++);
3853 mod
= (modrm
>> 6) & 3;
3854 rm
= (modrm
& 7) | REX_B(s
);
3855 op
= (modrm
>> 3) & 7;
3858 s
->rip_offset
= insn_const_size(ot
);
3859 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3860 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3862 gen_op_mov_TN_reg(ot
, 0, rm
);
3867 val
= insn_get(s
, ot
);
3868 gen_op_movl_T1_im(val
);
3869 gen_op_testl_T0_T1_cc();
3870 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3873 tcg_gen_not_tl(cpu_T
[0], cpu_T
[0]);
3875 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3877 gen_op_mov_reg_T0(ot
, rm
);
3881 tcg_gen_neg_tl(cpu_T
[0], cpu_T
[0]);
3883 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3885 gen_op_mov_reg_T0(ot
, rm
);
3887 gen_op_update_neg_cc();
3888 s
->cc_op
= CC_OP_SUBB
+ ot
;
3893 gen_op_mov_TN_reg(OT_BYTE
, 1, R_EAX
);
3894 tcg_gen_ext8u_tl(cpu_T
[0], cpu_T
[0]);
3895 tcg_gen_ext8u_tl(cpu_T
[1], cpu_T
[1]);
3896 /* XXX: use 32 bit mul which could be faster */
3897 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3898 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
3899 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3900 tcg_gen_andi_tl(cpu_cc_src
, cpu_T
[0], 0xff00);
3901 s
->cc_op
= CC_OP_MULB
;
3904 gen_op_mov_TN_reg(OT_WORD
, 1, R_EAX
);
3905 tcg_gen_ext16u_tl(cpu_T
[0], cpu_T
[0]);
3906 tcg_gen_ext16u_tl(cpu_T
[1], cpu_T
[1]);
3907 /* XXX: use 32 bit mul which could be faster */
3908 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3909 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
3910 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3911 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 16);
3912 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
3913 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
3914 s
->cc_op
= CC_OP_MULW
;
3918 #ifdef TARGET_X86_64
3919 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
3920 tcg_gen_ext32u_tl(cpu_T
[0], cpu_T
[0]);
3921 tcg_gen_ext32u_tl(cpu_T
[1], cpu_T
[1]);
3922 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3923 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
3924 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3925 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 32);
3926 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
3927 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
3931 t0
= tcg_temp_new(TCG_TYPE_I64
);
3932 t1
= tcg_temp_new(TCG_TYPE_I64
);
3933 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
3934 tcg_gen_extu_i32_i64(t0
, cpu_T
[0]);
3935 tcg_gen_extu_i32_i64(t1
, cpu_T
[1]);
3936 tcg_gen_mul_i64(t0
, t0
, t1
);
3937 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
3938 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
3939 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3940 tcg_gen_shri_i64(t0
, t0
, 32);
3941 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
3942 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
3943 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
3946 s
->cc_op
= CC_OP_MULL
;
3948 #ifdef TARGET_X86_64
3950 tcg_gen_helper_0_1(helper_mulq_EAX_T0
, cpu_T
[0]);
3951 s
->cc_op
= CC_OP_MULQ
;
3959 gen_op_mov_TN_reg(OT_BYTE
, 1, R_EAX
);
3960 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
3961 tcg_gen_ext8s_tl(cpu_T
[1], cpu_T
[1]);
3962 /* XXX: use 32 bit mul which could be faster */
3963 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3964 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
3965 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3966 tcg_gen_ext8s_tl(cpu_tmp0
, cpu_T
[0]);
3967 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
3968 s
->cc_op
= CC_OP_MULB
;
3971 gen_op_mov_TN_reg(OT_WORD
, 1, R_EAX
);
3972 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
3973 tcg_gen_ext16s_tl(cpu_T
[1], cpu_T
[1]);
3974 /* XXX: use 32 bit mul which could be faster */
3975 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3976 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
3977 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3978 tcg_gen_ext16s_tl(cpu_tmp0
, cpu_T
[0]);
3979 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
3980 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 16);
3981 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
3982 s
->cc_op
= CC_OP_MULW
;
3986 #ifdef TARGET_X86_64
3987 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
3988 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
3989 tcg_gen_ext32s_tl(cpu_T
[1], cpu_T
[1]);
3990 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3991 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
3992 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3993 tcg_gen_ext32s_tl(cpu_tmp0
, cpu_T
[0]);
3994 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
3995 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 32);
3996 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4000 t0
= tcg_temp_new(TCG_TYPE_I64
);
4001 t1
= tcg_temp_new(TCG_TYPE_I64
);
4002 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
4003 tcg_gen_ext_i32_i64(t0
, cpu_T
[0]);
4004 tcg_gen_ext_i32_i64(t1
, cpu_T
[1]);
4005 tcg_gen_mul_i64(t0
, t0
, t1
);
4006 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4007 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4008 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4009 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[0], 31);
4010 tcg_gen_shri_i64(t0
, t0
, 32);
4011 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4012 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4013 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4016 s
->cc_op
= CC_OP_MULL
;
4018 #ifdef TARGET_X86_64
4020 tcg_gen_helper_0_1(helper_imulq_EAX_T0
, cpu_T
[0]);
4021 s
->cc_op
= CC_OP_MULQ
;
4029 gen_jmp_im(pc_start
- s
->cs_base
);
4030 tcg_gen_helper_0_1(helper_divb_AL
, cpu_T
[0]);
4033 gen_jmp_im(pc_start
- s
->cs_base
);
4034 tcg_gen_helper_0_1(helper_divw_AX
, cpu_T
[0]);
4038 gen_jmp_im(pc_start
- s
->cs_base
);
4039 tcg_gen_helper_0_1(helper_divl_EAX
, cpu_T
[0]);
4041 #ifdef TARGET_X86_64
4043 gen_jmp_im(pc_start
- s
->cs_base
);
4044 tcg_gen_helper_0_1(helper_divq_EAX
, cpu_T
[0]);
4052 gen_jmp_im(pc_start
- s
->cs_base
);
4053 tcg_gen_helper_0_1(helper_idivb_AL
, cpu_T
[0]);
4056 gen_jmp_im(pc_start
- s
->cs_base
);
4057 tcg_gen_helper_0_1(helper_idivw_AX
, cpu_T
[0]);
4061 gen_jmp_im(pc_start
- s
->cs_base
);
4062 tcg_gen_helper_0_1(helper_idivl_EAX
, cpu_T
[0]);
4064 #ifdef TARGET_X86_64
4066 gen_jmp_im(pc_start
- s
->cs_base
);
4067 tcg_gen_helper_0_1(helper_idivq_EAX
, cpu_T
[0]);
4077 case 0xfe: /* GRP4 */
4078 case 0xff: /* GRP5 */
4082 ot
= dflag
+ OT_WORD
;
4084 modrm
= ldub_code(s
->pc
++);
4085 mod
= (modrm
>> 6) & 3;
4086 rm
= (modrm
& 7) | REX_B(s
);
4087 op
= (modrm
>> 3) & 7;
4088 if (op
>= 2 && b
== 0xfe) {
4092 if (op
== 2 || op
== 4) {
4093 /* operand size for jumps is 64 bit */
4095 } else if (op
== 3 || op
== 5) {
4096 /* for call calls, the operand is 16 or 32 bit, even
4098 ot
= dflag
? OT_LONG
: OT_WORD
;
4099 } else if (op
== 6) {
4100 /* default push size is 64 bit */
4101 ot
= dflag
? OT_QUAD
: OT_WORD
;
4105 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4106 if (op
>= 2 && op
!= 3 && op
!= 5)
4107 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4109 gen_op_mov_TN_reg(ot
, 0, rm
);
4113 case 0: /* inc Ev */
4118 gen_inc(s
, ot
, opreg
, 1);
4120 case 1: /* dec Ev */
4125 gen_inc(s
, ot
, opreg
, -1);
4127 case 2: /* call Ev */
4128 /* XXX: optimize if memory (no 'and' is necessary) */
4130 gen_op_andl_T0_ffff();
4131 next_eip
= s
->pc
- s
->cs_base
;
4132 gen_movtl_T1_im(next_eip
);
4137 case 3: /* lcall Ev */
4138 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4139 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4140 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4142 if (s
->pe
&& !s
->vm86
) {
4143 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4144 gen_op_set_cc_op(s
->cc_op
);
4145 gen_jmp_im(pc_start
- s
->cs_base
);
4146 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4147 tcg_gen_helper_0_4(helper_lcall_protected
,
4148 cpu_tmp2_i32
, cpu_T
[1],
4149 tcg_const_i32(dflag
),
4150 tcg_const_i32(s
->pc
- pc_start
));
4152 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4153 tcg_gen_helper_0_4(helper_lcall_real
,
4154 cpu_tmp2_i32
, cpu_T
[1],
4155 tcg_const_i32(dflag
),
4156 tcg_const_i32(s
->pc
- s
->cs_base
));
4160 case 4: /* jmp Ev */
4162 gen_op_andl_T0_ffff();
4166 case 5: /* ljmp Ev */
4167 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4168 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4169 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4171 if (s
->pe
&& !s
->vm86
) {
4172 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4173 gen_op_set_cc_op(s
->cc_op
);
4174 gen_jmp_im(pc_start
- s
->cs_base
);
4175 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4176 tcg_gen_helper_0_3(helper_ljmp_protected
,
4179 tcg_const_i32(s
->pc
- pc_start
));
4181 gen_op_movl_seg_T0_vm(R_CS
);
4182 gen_op_movl_T0_T1();
4187 case 6: /* push Ev */
4195 case 0x84: /* test Ev, Gv */
4200 ot
= dflag
+ OT_WORD
;
4202 modrm
= ldub_code(s
->pc
++);
4203 mod
= (modrm
>> 6) & 3;
4204 rm
= (modrm
& 7) | REX_B(s
);
4205 reg
= ((modrm
>> 3) & 7) | rex_r
;
4207 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4208 gen_op_mov_TN_reg(ot
, 1, reg
);
4209 gen_op_testl_T0_T1_cc();
4210 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4213 case 0xa8: /* test eAX, Iv */
4218 ot
= dflag
+ OT_WORD
;
4219 val
= insn_get(s
, ot
);
4221 gen_op_mov_TN_reg(ot
, 0, OR_EAX
);
4222 gen_op_movl_T1_im(val
);
4223 gen_op_testl_T0_T1_cc();
4224 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4227 case 0x98: /* CWDE/CBW */
4228 #ifdef TARGET_X86_64
4230 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
4231 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4232 gen_op_mov_reg_T0(OT_QUAD
, R_EAX
);
4236 gen_op_mov_TN_reg(OT_WORD
, 0, R_EAX
);
4237 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4238 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4240 gen_op_mov_TN_reg(OT_BYTE
, 0, R_EAX
);
4241 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
4242 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4245 case 0x99: /* CDQ/CWD */
4246 #ifdef TARGET_X86_64
4248 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EAX
);
4249 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 63);
4250 gen_op_mov_reg_T0(OT_QUAD
, R_EDX
);
4254 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
4255 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4256 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 31);
4257 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4259 gen_op_mov_TN_reg(OT_WORD
, 0, R_EAX
);
4260 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4261 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 15);
4262 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
4265 case 0x1af: /* imul Gv, Ev */
4266 case 0x69: /* imul Gv, Ev, I */
4268 ot
= dflag
+ OT_WORD
;
4269 modrm
= ldub_code(s
->pc
++);
4270 reg
= ((modrm
>> 3) & 7) | rex_r
;
4272 s
->rip_offset
= insn_const_size(ot
);
4275 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4277 val
= insn_get(s
, ot
);
4278 gen_op_movl_T1_im(val
);
4279 } else if (b
== 0x6b) {
4280 val
= (int8_t)insn_get(s
, OT_BYTE
);
4281 gen_op_movl_T1_im(val
);
4283 gen_op_mov_TN_reg(ot
, 1, reg
);
4286 #ifdef TARGET_X86_64
4287 if (ot
== OT_QUAD
) {
4288 tcg_gen_helper_1_2(helper_imulq_T0_T1
, cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4291 if (ot
== OT_LONG
) {
4292 #ifdef TARGET_X86_64
4293 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4294 tcg_gen_ext32s_tl(cpu_T
[1], cpu_T
[1]);
4295 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4296 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4297 tcg_gen_ext32s_tl(cpu_tmp0
, cpu_T
[0]);
4298 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4302 t0
= tcg_temp_new(TCG_TYPE_I64
);
4303 t1
= tcg_temp_new(TCG_TYPE_I64
);
4304 tcg_gen_ext_i32_i64(t0
, cpu_T
[0]);
4305 tcg_gen_ext_i32_i64(t1
, cpu_T
[1]);
4306 tcg_gen_mul_i64(t0
, t0
, t1
);
4307 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4308 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4309 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[0], 31);
4310 tcg_gen_shri_i64(t0
, t0
, 32);
4311 tcg_gen_trunc_i64_i32(cpu_T
[1], t0
);
4312 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[1], cpu_tmp0
);
4316 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4317 tcg_gen_ext16s_tl(cpu_T
[1], cpu_T
[1]);
4318 /* XXX: use 32 bit mul which could be faster */
4319 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4320 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4321 tcg_gen_ext16s_tl(cpu_tmp0
, cpu_T
[0]);
4322 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4324 gen_op_mov_reg_T0(ot
, reg
);
4325 s
->cc_op
= CC_OP_MULB
+ ot
;
4328 case 0x1c1: /* xadd Ev, Gv */
4332 ot
= dflag
+ OT_WORD
;
4333 modrm
= ldub_code(s
->pc
++);
4334 reg
= ((modrm
>> 3) & 7) | rex_r
;
4335 mod
= (modrm
>> 6) & 3;
4337 rm
= (modrm
& 7) | REX_B(s
);
4338 gen_op_mov_TN_reg(ot
, 0, reg
);
4339 gen_op_mov_TN_reg(ot
, 1, rm
);
4340 gen_op_addl_T0_T1();
4341 gen_op_mov_reg_T1(ot
, reg
);
4342 gen_op_mov_reg_T0(ot
, rm
);
4344 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4345 gen_op_mov_TN_reg(ot
, 0, reg
);
4346 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4347 gen_op_addl_T0_T1();
4348 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4349 gen_op_mov_reg_T1(ot
, reg
);
4351 gen_op_update2_cc();
4352 s
->cc_op
= CC_OP_ADDB
+ ot
;
4355 case 0x1b1: /* cmpxchg Ev, Gv */
4358 TCGv t0
, t1
, t2
, a0
;
4363 ot
= dflag
+ OT_WORD
;
4364 modrm
= ldub_code(s
->pc
++);
4365 reg
= ((modrm
>> 3) & 7) | rex_r
;
4366 mod
= (modrm
>> 6) & 3;
4367 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
4368 t1
= tcg_temp_local_new(TCG_TYPE_TL
);
4369 t2
= tcg_temp_local_new(TCG_TYPE_TL
);
4370 a0
= tcg_temp_local_new(TCG_TYPE_TL
);
4371 gen_op_mov_v_reg(ot
, t1
, reg
);
4373 rm
= (modrm
& 7) | REX_B(s
);
4374 gen_op_mov_v_reg(ot
, t0
, rm
);
4376 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4377 tcg_gen_mov_tl(a0
, cpu_A0
);
4378 gen_op_ld_v(ot
+ s
->mem_index
, t0
, a0
);
4379 rm
= 0; /* avoid warning */
4381 label1
= gen_new_label();
4382 tcg_gen_ld_tl(t2
, cpu_env
, offsetof(CPUState
, regs
[R_EAX
]));
4383 tcg_gen_sub_tl(t2
, t2
, t0
);
4385 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, label1
);
4387 label2
= gen_new_label();
4388 gen_op_mov_reg_v(ot
, R_EAX
, t0
);
4390 gen_set_label(label1
);
4391 gen_op_mov_reg_v(ot
, rm
, t1
);
4392 gen_set_label(label2
);
4394 tcg_gen_mov_tl(t1
, t0
);
4395 gen_op_mov_reg_v(ot
, R_EAX
, t0
);
4396 gen_set_label(label1
);
4398 gen_op_st_v(ot
+ s
->mem_index
, t1
, a0
);
4400 tcg_gen_mov_tl(cpu_cc_src
, t0
);
4401 tcg_gen_mov_tl(cpu_cc_dst
, t2
);
4402 s
->cc_op
= CC_OP_SUBB
+ ot
;
4409 case 0x1c7: /* cmpxchg8b */
4410 modrm
= ldub_code(s
->pc
++);
4411 mod
= (modrm
>> 6) & 3;
4412 if ((mod
== 3) || ((modrm
& 0x38) != 0x8))
4414 #ifdef TARGET_X86_64
4416 if (!(s
->cpuid_ext_features
& CPUID_EXT_CX16
))
4418 gen_jmp_im(pc_start
- s
->cs_base
);
4419 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4420 gen_op_set_cc_op(s
->cc_op
);
4421 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4422 tcg_gen_helper_0_1(helper_cmpxchg16b
, cpu_A0
);
4426 if (!(s
->cpuid_features
& CPUID_CX8
))
4428 gen_jmp_im(pc_start
- s
->cs_base
);
4429 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4430 gen_op_set_cc_op(s
->cc_op
);
4431 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4432 tcg_gen_helper_0_1(helper_cmpxchg8b
, cpu_A0
);
4434 s
->cc_op
= CC_OP_EFLAGS
;
4437 /**************************/
4439 case 0x50 ... 0x57: /* push */
4440 gen_op_mov_TN_reg(OT_LONG
, 0, (b
& 7) | REX_B(s
));
4443 case 0x58 ... 0x5f: /* pop */
4445 ot
= dflag
? OT_QUAD
: OT_WORD
;
4447 ot
= dflag
+ OT_WORD
;
4450 /* NOTE: order is important for pop %sp */
4452 gen_op_mov_reg_T0(ot
, (b
& 7) | REX_B(s
));
4454 case 0x60: /* pusha */
4459 case 0x61: /* popa */
4464 case 0x68: /* push Iv */
4467 ot
= dflag
? OT_QUAD
: OT_WORD
;
4469 ot
= dflag
+ OT_WORD
;
4472 val
= insn_get(s
, ot
);
4474 val
= (int8_t)insn_get(s
, OT_BYTE
);
4475 gen_op_movl_T0_im(val
);
4478 case 0x8f: /* pop Ev */
4480 ot
= dflag
? OT_QUAD
: OT_WORD
;
4482 ot
= dflag
+ OT_WORD
;
4484 modrm
= ldub_code(s
->pc
++);
4485 mod
= (modrm
>> 6) & 3;
4488 /* NOTE: order is important for pop %sp */
4490 rm
= (modrm
& 7) | REX_B(s
);
4491 gen_op_mov_reg_T0(ot
, rm
);
4493 /* NOTE: order is important too for MMU exceptions */
4494 s
->popl_esp_hack
= 1 << ot
;
4495 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4496 s
->popl_esp_hack
= 0;
4500 case 0xc8: /* enter */
4503 val
= lduw_code(s
->pc
);
4505 level
= ldub_code(s
->pc
++);
4506 gen_enter(s
, val
, level
);
4509 case 0xc9: /* leave */
4510 /* XXX: exception not precise (ESP is updated before potential exception) */
4512 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EBP
);
4513 gen_op_mov_reg_T0(OT_QUAD
, R_ESP
);
4514 } else if (s
->ss32
) {
4515 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
4516 gen_op_mov_reg_T0(OT_LONG
, R_ESP
);
4518 gen_op_mov_TN_reg(OT_WORD
, 0, R_EBP
);
4519 gen_op_mov_reg_T0(OT_WORD
, R_ESP
);
4523 ot
= dflag
? OT_QUAD
: OT_WORD
;
4525 ot
= dflag
+ OT_WORD
;
4527 gen_op_mov_reg_T0(ot
, R_EBP
);
4530 case 0x06: /* push es */
4531 case 0x0e: /* push cs */
4532 case 0x16: /* push ss */
4533 case 0x1e: /* push ds */
4536 gen_op_movl_T0_seg(b
>> 3);
4539 case 0x1a0: /* push fs */
4540 case 0x1a8: /* push gs */
4541 gen_op_movl_T0_seg((b
>> 3) & 7);
4544 case 0x07: /* pop es */
4545 case 0x17: /* pop ss */
4546 case 0x1f: /* pop ds */
4551 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4554 /* if reg == SS, inhibit interrupts/trace. */
4555 /* If several instructions disable interrupts, only the
4557 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4558 tcg_gen_helper_0_0(helper_set_inhibit_irq
);
4562 gen_jmp_im(s
->pc
- s
->cs_base
);
4566 case 0x1a1: /* pop fs */
4567 case 0x1a9: /* pop gs */
4569 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
4572 gen_jmp_im(s
->pc
- s
->cs_base
);
4577 /**************************/
4580 case 0x89: /* mov Gv, Ev */
4584 ot
= dflag
+ OT_WORD
;
4585 modrm
= ldub_code(s
->pc
++);
4586 reg
= ((modrm
>> 3) & 7) | rex_r
;
4588 /* generate a generic store */
4589 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
4592 case 0xc7: /* mov Ev, Iv */
4596 ot
= dflag
+ OT_WORD
;
4597 modrm
= ldub_code(s
->pc
++);
4598 mod
= (modrm
>> 6) & 3;
4600 s
->rip_offset
= insn_const_size(ot
);
4601 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4603 val
= insn_get(s
, ot
);
4604 gen_op_movl_T0_im(val
);
4606 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4608 gen_op_mov_reg_T0(ot
, (modrm
& 7) | REX_B(s
));
4611 case 0x8b: /* mov Ev, Gv */
4615 ot
= OT_WORD
+ dflag
;
4616 modrm
= ldub_code(s
->pc
++);
4617 reg
= ((modrm
>> 3) & 7) | rex_r
;
4619 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4620 gen_op_mov_reg_T0(ot
, reg
);
4622 case 0x8e: /* mov seg, Gv */
4623 modrm
= ldub_code(s
->pc
++);
4624 reg
= (modrm
>> 3) & 7;
4625 if (reg
>= 6 || reg
== R_CS
)
4627 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
4628 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4630 /* if reg == SS, inhibit interrupts/trace */
4631 /* If several instructions disable interrupts, only the
4633 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4634 tcg_gen_helper_0_0(helper_set_inhibit_irq
);
4638 gen_jmp_im(s
->pc
- s
->cs_base
);
4642 case 0x8c: /* mov Gv, seg */
4643 modrm
= ldub_code(s
->pc
++);
4644 reg
= (modrm
>> 3) & 7;
4645 mod
= (modrm
>> 6) & 3;
4648 gen_op_movl_T0_seg(reg
);
4650 ot
= OT_WORD
+ dflag
;
4653 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4656 case 0x1b6: /* movzbS Gv, Eb */
4657 case 0x1b7: /* movzwS Gv, Eb */
4658 case 0x1be: /* movsbS Gv, Eb */
4659 case 0x1bf: /* movswS Gv, Eb */
4662 /* d_ot is the size of destination */
4663 d_ot
= dflag
+ OT_WORD
;
4664 /* ot is the size of source */
4665 ot
= (b
& 1) + OT_BYTE
;
4666 modrm
= ldub_code(s
->pc
++);
4667 reg
= ((modrm
>> 3) & 7) | rex_r
;
4668 mod
= (modrm
>> 6) & 3;
4669 rm
= (modrm
& 7) | REX_B(s
);
4672 gen_op_mov_TN_reg(ot
, 0, rm
);
4673 switch(ot
| (b
& 8)) {
4675 tcg_gen_ext8u_tl(cpu_T
[0], cpu_T
[0]);
4678 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
4681 tcg_gen_ext16u_tl(cpu_T
[0], cpu_T
[0]);
4685 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4688 gen_op_mov_reg_T0(d_ot
, reg
);
4690 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4692 gen_op_lds_T0_A0(ot
+ s
->mem_index
);
4694 gen_op_ldu_T0_A0(ot
+ s
->mem_index
);
4696 gen_op_mov_reg_T0(d_ot
, reg
);
4701 case 0x8d: /* lea */
4702 ot
= dflag
+ OT_WORD
;
4703 modrm
= ldub_code(s
->pc
++);
4704 mod
= (modrm
>> 6) & 3;
4707 reg
= ((modrm
>> 3) & 7) | rex_r
;
4708 /* we must ensure that no segment is added */
4712 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4714 gen_op_mov_reg_A0(ot
- OT_WORD
, reg
);
4717 case 0xa0: /* mov EAX, Ov */
4719 case 0xa2: /* mov Ov, EAX */
4722 target_ulong offset_addr
;
4727 ot
= dflag
+ OT_WORD
;
4728 #ifdef TARGET_X86_64
4729 if (s
->aflag
== 2) {
4730 offset_addr
= ldq_code(s
->pc
);
4732 gen_op_movq_A0_im(offset_addr
);
4737 offset_addr
= insn_get(s
, OT_LONG
);
4739 offset_addr
= insn_get(s
, OT_WORD
);
4741 gen_op_movl_A0_im(offset_addr
);
4743 gen_add_A0_ds_seg(s
);
4745 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4746 gen_op_mov_reg_T0(ot
, R_EAX
);
4748 gen_op_mov_TN_reg(ot
, 0, R_EAX
);
4749 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4753 case 0xd7: /* xlat */
4754 #ifdef TARGET_X86_64
4755 if (s
->aflag
== 2) {
4756 gen_op_movq_A0_reg(R_EBX
);
4757 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EAX
);
4758 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xff);
4759 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_T
[0]);
4763 gen_op_movl_A0_reg(R_EBX
);
4764 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
4765 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xff);
4766 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_T
[0]);
4768 gen_op_andl_A0_ffff();
4770 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
4772 gen_add_A0_ds_seg(s
);
4773 gen_op_ldu_T0_A0(OT_BYTE
+ s
->mem_index
);
4774 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
4776 case 0xb0 ... 0xb7: /* mov R, Ib */
4777 val
= insn_get(s
, OT_BYTE
);
4778 gen_op_movl_T0_im(val
);
4779 gen_op_mov_reg_T0(OT_BYTE
, (b
& 7) | REX_B(s
));
4781 case 0xb8 ... 0xbf: /* mov R, Iv */
4782 #ifdef TARGET_X86_64
4786 tmp
= ldq_code(s
->pc
);
4788 reg
= (b
& 7) | REX_B(s
);
4789 gen_movtl_T0_im(tmp
);
4790 gen_op_mov_reg_T0(OT_QUAD
, reg
);
4794 ot
= dflag
? OT_LONG
: OT_WORD
;
4795 val
= insn_get(s
, ot
);
4796 reg
= (b
& 7) | REX_B(s
);
4797 gen_op_movl_T0_im(val
);
4798 gen_op_mov_reg_T0(ot
, reg
);
4802 case 0x91 ... 0x97: /* xchg R, EAX */
4803 ot
= dflag
+ OT_WORD
;
4804 reg
= (b
& 7) | REX_B(s
);
4808 case 0x87: /* xchg Ev, Gv */
4812 ot
= dflag
+ OT_WORD
;
4813 modrm
= ldub_code(s
->pc
++);
4814 reg
= ((modrm
>> 3) & 7) | rex_r
;
4815 mod
= (modrm
>> 6) & 3;
4817 rm
= (modrm
& 7) | REX_B(s
);
4819 gen_op_mov_TN_reg(ot
, 0, reg
);
4820 gen_op_mov_TN_reg(ot
, 1, rm
);
4821 gen_op_mov_reg_T0(ot
, rm
);
4822 gen_op_mov_reg_T1(ot
, reg
);
4824 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4825 gen_op_mov_TN_reg(ot
, 0, reg
);
4826 /* for xchg, lock is implicit */
4827 if (!(prefixes
& PREFIX_LOCK
))
4828 tcg_gen_helper_0_0(helper_lock
);
4829 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4830 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4831 if (!(prefixes
& PREFIX_LOCK
))
4832 tcg_gen_helper_0_0(helper_unlock
);
4833 gen_op_mov_reg_T1(ot
, reg
);
4836 case 0xc4: /* les Gv */
4841 case 0xc5: /* lds Gv */
4846 case 0x1b2: /* lss Gv */
4849 case 0x1b4: /* lfs Gv */
4852 case 0x1b5: /* lgs Gv */
4855 ot
= dflag
? OT_LONG
: OT_WORD
;
4856 modrm
= ldub_code(s
->pc
++);
4857 reg
= ((modrm
>> 3) & 7) | rex_r
;
4858 mod
= (modrm
>> 6) & 3;
4861 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4862 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4863 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4864 /* load the segment first to handle exceptions properly */
4865 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4866 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
4867 /* then put the data */
4868 gen_op_mov_reg_T1(ot
, reg
);
4870 gen_jmp_im(s
->pc
- s
->cs_base
);
4875 /************************/
4886 ot
= dflag
+ OT_WORD
;
4888 modrm
= ldub_code(s
->pc
++);
4889 mod
= (modrm
>> 6) & 3;
4890 op
= (modrm
>> 3) & 7;
4896 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4899 opreg
= (modrm
& 7) | REX_B(s
);
4904 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
4907 shift
= ldub_code(s
->pc
++);
4909 gen_shifti(s
, op
, ot
, opreg
, shift
);
4924 case 0x1a4: /* shld imm */
4928 case 0x1a5: /* shld cl */
4932 case 0x1ac: /* shrd imm */
4936 case 0x1ad: /* shrd cl */
4940 ot
= dflag
+ OT_WORD
;
4941 modrm
= ldub_code(s
->pc
++);
4942 mod
= (modrm
>> 6) & 3;
4943 rm
= (modrm
& 7) | REX_B(s
);
4944 reg
= ((modrm
>> 3) & 7) | rex_r
;
4946 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4951 gen_op_mov_TN_reg(ot
, 1, reg
);
4954 val
= ldub_code(s
->pc
++);
4955 tcg_gen_movi_tl(cpu_T3
, val
);
4957 tcg_gen_ld_tl(cpu_T3
, cpu_env
, offsetof(CPUState
, regs
[R_ECX
]));
4959 gen_shiftd_rm_T1_T3(s
, ot
, opreg
, op
);
4962 /************************/
4965 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
4966 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4967 /* XXX: what to do if illegal op ? */
4968 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
4971 modrm
= ldub_code(s
->pc
++);
4972 mod
= (modrm
>> 6) & 3;
4974 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
4977 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4979 case 0x00 ... 0x07: /* fxxxs */
4980 case 0x10 ... 0x17: /* fixxxl */
4981 case 0x20 ... 0x27: /* fxxxl */
4982 case 0x30 ... 0x37: /* fixxx */
4989 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
4990 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4991 tcg_gen_helper_0_1(helper_flds_FT0
, cpu_tmp2_i32
);
4994 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
4995 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4996 tcg_gen_helper_0_1(helper_fildl_FT0
, cpu_tmp2_i32
);
4999 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
5000 (s
->mem_index
>> 2) - 1);
5001 tcg_gen_helper_0_1(helper_fldl_FT0
, cpu_tmp1_i64
);
5005 gen_op_lds_T0_A0(OT_WORD
+ s
->mem_index
);
5006 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5007 tcg_gen_helper_0_1(helper_fildl_FT0
, cpu_tmp2_i32
);
5011 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0
[op1
]);
5013 /* fcomp needs pop */
5014 tcg_gen_helper_0_0(helper_fpop
);
5018 case 0x08: /* flds */
5019 case 0x0a: /* fsts */
5020 case 0x0b: /* fstps */
5021 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5022 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5023 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5028 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5029 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5030 tcg_gen_helper_0_1(helper_flds_ST0
, cpu_tmp2_i32
);
5033 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5034 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5035 tcg_gen_helper_0_1(helper_fildl_ST0
, cpu_tmp2_i32
);
5038 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
5039 (s
->mem_index
>> 2) - 1);
5040 tcg_gen_helper_0_1(helper_fldl_ST0
, cpu_tmp1_i64
);
5044 gen_op_lds_T0_A0(OT_WORD
+ s
->mem_index
);
5045 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5046 tcg_gen_helper_0_1(helper_fildl_ST0
, cpu_tmp2_i32
);
5051 /* XXX: the corresponding CPUID bit must be tested ! */
5054 tcg_gen_helper_1_0(helper_fisttl_ST0
, cpu_tmp2_i32
);
5055 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5056 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
5059 tcg_gen_helper_1_0(helper_fisttll_ST0
, cpu_tmp1_i64
);
5060 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
5061 (s
->mem_index
>> 2) - 1);
5065 tcg_gen_helper_1_0(helper_fistt_ST0
, cpu_tmp2_i32
);
5066 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5067 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5070 tcg_gen_helper_0_0(helper_fpop
);
5075 tcg_gen_helper_1_0(helper_fsts_ST0
, cpu_tmp2_i32
);
5076 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5077 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
5080 tcg_gen_helper_1_0(helper_fistl_ST0
, cpu_tmp2_i32
);
5081 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5082 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
5085 tcg_gen_helper_1_0(helper_fstl_ST0
, cpu_tmp1_i64
);
5086 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
5087 (s
->mem_index
>> 2) - 1);
5091 tcg_gen_helper_1_0(helper_fist_ST0
, cpu_tmp2_i32
);
5092 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5093 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5097 tcg_gen_helper_0_0(helper_fpop
);
5101 case 0x0c: /* fldenv mem */
5102 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5103 gen_op_set_cc_op(s
->cc_op
);
5104 gen_jmp_im(pc_start
- s
->cs_base
);
5105 tcg_gen_helper_0_2(helper_fldenv
,
5106 cpu_A0
, tcg_const_i32(s
->dflag
));
5108 case 0x0d: /* fldcw mem */
5109 gen_op_ld_T0_A0(OT_WORD
+ s
->mem_index
);
5110 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5111 tcg_gen_helper_0_1(helper_fldcw
, cpu_tmp2_i32
);
5113 case 0x0e: /* fnstenv mem */
5114 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5115 gen_op_set_cc_op(s
->cc_op
);
5116 gen_jmp_im(pc_start
- s
->cs_base
);
5117 tcg_gen_helper_0_2(helper_fstenv
,
5118 cpu_A0
, tcg_const_i32(s
->dflag
));
5120 case 0x0f: /* fnstcw mem */
5121 tcg_gen_helper_1_0(helper_fnstcw
, cpu_tmp2_i32
);
5122 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5123 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5125 case 0x1d: /* fldt mem */
5126 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5127 gen_op_set_cc_op(s
->cc_op
);
5128 gen_jmp_im(pc_start
- s
->cs_base
);
5129 tcg_gen_helper_0_1(helper_fldt_ST0
, cpu_A0
);
5131 case 0x1f: /* fstpt mem */
5132 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5133 gen_op_set_cc_op(s
->cc_op
);
5134 gen_jmp_im(pc_start
- s
->cs_base
);
5135 tcg_gen_helper_0_1(helper_fstt_ST0
, cpu_A0
);
5136 tcg_gen_helper_0_0(helper_fpop
);
5138 case 0x2c: /* frstor mem */
5139 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5140 gen_op_set_cc_op(s
->cc_op
);
5141 gen_jmp_im(pc_start
- s
->cs_base
);
5142 tcg_gen_helper_0_2(helper_frstor
,
5143 cpu_A0
, tcg_const_i32(s
->dflag
));
5145 case 0x2e: /* fnsave mem */
5146 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5147 gen_op_set_cc_op(s
->cc_op
);
5148 gen_jmp_im(pc_start
- s
->cs_base
);
5149 tcg_gen_helper_0_2(helper_fsave
,
5150 cpu_A0
, tcg_const_i32(s
->dflag
));
5152 case 0x2f: /* fnstsw mem */
5153 tcg_gen_helper_1_0(helper_fnstsw
, cpu_tmp2_i32
);
5154 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5155 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5157 case 0x3c: /* fbld */
5158 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5159 gen_op_set_cc_op(s
->cc_op
);
5160 gen_jmp_im(pc_start
- s
->cs_base
);
5161 tcg_gen_helper_0_1(helper_fbld_ST0
, cpu_A0
);
5163 case 0x3e: /* fbstp */
5164 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5165 gen_op_set_cc_op(s
->cc_op
);
5166 gen_jmp_im(pc_start
- s
->cs_base
);
5167 tcg_gen_helper_0_1(helper_fbst_ST0
, cpu_A0
);
5168 tcg_gen_helper_0_0(helper_fpop
);
5170 case 0x3d: /* fildll */
5171 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
5172 (s
->mem_index
>> 2) - 1);
5173 tcg_gen_helper_0_1(helper_fildll_ST0
, cpu_tmp1_i64
);
5175 case 0x3f: /* fistpll */
5176 tcg_gen_helper_1_0(helper_fistll_ST0
, cpu_tmp1_i64
);
5177 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
5178 (s
->mem_index
>> 2) - 1);
5179 tcg_gen_helper_0_0(helper_fpop
);
5185 /* register float ops */
5189 case 0x08: /* fld sti */
5190 tcg_gen_helper_0_0(helper_fpush
);
5191 tcg_gen_helper_0_1(helper_fmov_ST0_STN
, tcg_const_i32((opreg
+ 1) & 7));
5193 case 0x09: /* fxchg sti */
5194 case 0x29: /* fxchg4 sti, undocumented op */
5195 case 0x39: /* fxchg7 sti, undocumented op */
5196 tcg_gen_helper_0_1(helper_fxchg_ST0_STN
, tcg_const_i32(opreg
));
5198 case 0x0a: /* grp d9/2 */
5201 /* check exceptions (FreeBSD FPU probe) */
5202 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5203 gen_op_set_cc_op(s
->cc_op
);
5204 gen_jmp_im(pc_start
- s
->cs_base
);
5205 tcg_gen_helper_0_0(helper_fwait
);
5211 case 0x0c: /* grp d9/4 */
5214 tcg_gen_helper_0_0(helper_fchs_ST0
);
5217 tcg_gen_helper_0_0(helper_fabs_ST0
);
5220 tcg_gen_helper_0_0(helper_fldz_FT0
);
5221 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5224 tcg_gen_helper_0_0(helper_fxam_ST0
);
5230 case 0x0d: /* grp d9/5 */
5234 tcg_gen_helper_0_0(helper_fpush
);
5235 tcg_gen_helper_0_0(helper_fld1_ST0
);
5238 tcg_gen_helper_0_0(helper_fpush
);
5239 tcg_gen_helper_0_0(helper_fldl2t_ST0
);
5242 tcg_gen_helper_0_0(helper_fpush
);
5243 tcg_gen_helper_0_0(helper_fldl2e_ST0
);
5246 tcg_gen_helper_0_0(helper_fpush
);
5247 tcg_gen_helper_0_0(helper_fldpi_ST0
);
5250 tcg_gen_helper_0_0(helper_fpush
);
5251 tcg_gen_helper_0_0(helper_fldlg2_ST0
);
5254 tcg_gen_helper_0_0(helper_fpush
);
5255 tcg_gen_helper_0_0(helper_fldln2_ST0
);
5258 tcg_gen_helper_0_0(helper_fpush
);
5259 tcg_gen_helper_0_0(helper_fldz_ST0
);
5266 case 0x0e: /* grp d9/6 */
5269 tcg_gen_helper_0_0(helper_f2xm1
);
5272 tcg_gen_helper_0_0(helper_fyl2x
);
5275 tcg_gen_helper_0_0(helper_fptan
);
5277 case 3: /* fpatan */
5278 tcg_gen_helper_0_0(helper_fpatan
);
5280 case 4: /* fxtract */
5281 tcg_gen_helper_0_0(helper_fxtract
);
5283 case 5: /* fprem1 */
5284 tcg_gen_helper_0_0(helper_fprem1
);
5286 case 6: /* fdecstp */
5287 tcg_gen_helper_0_0(helper_fdecstp
);
5290 case 7: /* fincstp */
5291 tcg_gen_helper_0_0(helper_fincstp
);
5295 case 0x0f: /* grp d9/7 */
5298 tcg_gen_helper_0_0(helper_fprem
);
5300 case 1: /* fyl2xp1 */
5301 tcg_gen_helper_0_0(helper_fyl2xp1
);
5304 tcg_gen_helper_0_0(helper_fsqrt
);
5306 case 3: /* fsincos */
5307 tcg_gen_helper_0_0(helper_fsincos
);
5309 case 5: /* fscale */
5310 tcg_gen_helper_0_0(helper_fscale
);
5312 case 4: /* frndint */
5313 tcg_gen_helper_0_0(helper_frndint
);
5316 tcg_gen_helper_0_0(helper_fsin
);
5320 tcg_gen_helper_0_0(helper_fcos
);
5324 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5325 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5326 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5332 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0
[op1
], tcg_const_i32(opreg
));
5334 tcg_gen_helper_0_0(helper_fpop
);
5336 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5337 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0
[op1
]);
5341 case 0x02: /* fcom */
5342 case 0x22: /* fcom2, undocumented op */
5343 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5344 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5346 case 0x03: /* fcomp */
5347 case 0x23: /* fcomp3, undocumented op */
5348 case 0x32: /* fcomp5, undocumented op */
5349 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5350 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5351 tcg_gen_helper_0_0(helper_fpop
);
5353 case 0x15: /* da/5 */
5355 case 1: /* fucompp */
5356 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(1));
5357 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
5358 tcg_gen_helper_0_0(helper_fpop
);
5359 tcg_gen_helper_0_0(helper_fpop
);
5367 case 0: /* feni (287 only, just do nop here) */
5369 case 1: /* fdisi (287 only, just do nop here) */
5372 tcg_gen_helper_0_0(helper_fclex
);
5374 case 3: /* fninit */
5375 tcg_gen_helper_0_0(helper_fninit
);
5377 case 4: /* fsetpm (287 only, just do nop here) */
5383 case 0x1d: /* fucomi */
5384 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5385 gen_op_set_cc_op(s
->cc_op
);
5386 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5387 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0
);
5388 s
->cc_op
= CC_OP_EFLAGS
;
5390 case 0x1e: /* fcomi */
5391 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5392 gen_op_set_cc_op(s
->cc_op
);
5393 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5394 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0
);
5395 s
->cc_op
= CC_OP_EFLAGS
;
5397 case 0x28: /* ffree sti */
5398 tcg_gen_helper_0_1(helper_ffree_STN
, tcg_const_i32(opreg
));
5400 case 0x2a: /* fst sti */
5401 tcg_gen_helper_0_1(helper_fmov_STN_ST0
, tcg_const_i32(opreg
));
5403 case 0x2b: /* fstp sti */
5404 case 0x0b: /* fstp1 sti, undocumented op */
5405 case 0x3a: /* fstp8 sti, undocumented op */
5406 case 0x3b: /* fstp9 sti, undocumented op */
5407 tcg_gen_helper_0_1(helper_fmov_STN_ST0
, tcg_const_i32(opreg
));
5408 tcg_gen_helper_0_0(helper_fpop
);
5410 case 0x2c: /* fucom st(i) */
5411 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5412 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
5414 case 0x2d: /* fucomp st(i) */
5415 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5416 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
5417 tcg_gen_helper_0_0(helper_fpop
);
5419 case 0x33: /* de/3 */
5421 case 1: /* fcompp */
5422 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(1));
5423 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5424 tcg_gen_helper_0_0(helper_fpop
);
5425 tcg_gen_helper_0_0(helper_fpop
);
5431 case 0x38: /* ffreep sti, undocumented op */
5432 tcg_gen_helper_0_1(helper_ffree_STN
, tcg_const_i32(opreg
));
5433 tcg_gen_helper_0_0(helper_fpop
);
5435 case 0x3c: /* df/4 */
5438 tcg_gen_helper_1_0(helper_fnstsw
, cpu_tmp2_i32
);
5439 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5440 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
5446 case 0x3d: /* fucomip */
5447 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5448 gen_op_set_cc_op(s
->cc_op
);
5449 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5450 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0
);
5451 tcg_gen_helper_0_0(helper_fpop
);
5452 s
->cc_op
= CC_OP_EFLAGS
;
5454 case 0x3e: /* fcomip */
5455 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5456 gen_op_set_cc_op(s
->cc_op
);
5457 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5458 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0
);
5459 tcg_gen_helper_0_0(helper_fpop
);
5460 s
->cc_op
= CC_OP_EFLAGS
;
5462 case 0x10 ... 0x13: /* fcmovxx */
5466 const static uint8_t fcmov_cc
[8] = {
5472 op1
= fcmov_cc
[op
& 3] | (((op
>> 3) & 1) ^ 1);
5473 l1
= gen_new_label();
5474 gen_jcc1(s
, s
->cc_op
, op1
, l1
);
5475 tcg_gen_helper_0_1(helper_fmov_ST0_STN
, tcg_const_i32(opreg
));
5484 /************************/
5487 case 0xa4: /* movsS */
5492 ot
= dflag
+ OT_WORD
;
5494 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5495 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5501 case 0xaa: /* stosS */
5506 ot
= dflag
+ OT_WORD
;
5508 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5509 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5514 case 0xac: /* lodsS */
5519 ot
= dflag
+ OT_WORD
;
5520 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5521 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5526 case 0xae: /* scasS */
5531 ot
= dflag
+ OT_WORD
;
5532 if (prefixes
& PREFIX_REPNZ
) {
5533 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
5534 } else if (prefixes
& PREFIX_REPZ
) {
5535 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
5538 s
->cc_op
= CC_OP_SUBB
+ ot
;
5542 case 0xa6: /* cmpsS */
5547 ot
= dflag
+ OT_WORD
;
5548 if (prefixes
& PREFIX_REPNZ
) {
5549 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
5550 } else if (prefixes
& PREFIX_REPZ
) {
5551 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
5554 s
->cc_op
= CC_OP_SUBB
+ ot
;
5557 case 0x6c: /* insS */
5562 ot
= dflag
? OT_LONG
: OT_WORD
;
5563 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5564 gen_op_andl_T0_ffff();
5565 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5566 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) | 4);
5567 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5568 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5573 case 0x6e: /* outsS */
5578 ot
= dflag
? OT_LONG
: OT_WORD
;
5579 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5580 gen_op_andl_T0_ffff();
5581 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5582 svm_is_rep(prefixes
) | 4);
5583 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5584 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5590 /************************/
5598 ot
= dflag
? OT_LONG
: OT_WORD
;
5599 val
= ldub_code(s
->pc
++);
5600 gen_op_movl_T0_im(val
);
5601 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5602 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
));
5603 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5604 tcg_gen_helper_1_1(helper_in_func
[ot
], cpu_T
[1], cpu_tmp2_i32
);
5605 gen_op_mov_reg_T1(ot
, R_EAX
);
5612 ot
= dflag
? OT_LONG
: OT_WORD
;
5613 val
= ldub_code(s
->pc
++);
5614 gen_op_movl_T0_im(val
);
5615 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5616 svm_is_rep(prefixes
));
5617 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5619 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5620 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
5621 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[1]);
5622 tcg_gen_helper_0_2(helper_out_func
[ot
], cpu_tmp2_i32
, cpu_tmp3_i32
);
5629 ot
= dflag
? OT_LONG
: OT_WORD
;
5630 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5631 gen_op_andl_T0_ffff();
5632 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5633 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
));
5634 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5635 tcg_gen_helper_1_1(helper_in_func
[ot
], cpu_T
[1], cpu_tmp2_i32
);
5636 gen_op_mov_reg_T1(ot
, R_EAX
);
5643 ot
= dflag
? OT_LONG
: OT_WORD
;
5644 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5645 gen_op_andl_T0_ffff();
5646 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5647 svm_is_rep(prefixes
));
5648 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5650 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5651 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
5652 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[1]);
5653 tcg_gen_helper_0_2(helper_out_func
[ot
], cpu_tmp2_i32
, cpu_tmp3_i32
);
5656 /************************/
5658 case 0xc2: /* ret im */
5659 val
= ldsw_code(s
->pc
);
5662 if (CODE64(s
) && s
->dflag
)
5664 gen_stack_update(s
, val
+ (2 << s
->dflag
));
5666 gen_op_andl_T0_ffff();
5670 case 0xc3: /* ret */
5674 gen_op_andl_T0_ffff();
5678 case 0xca: /* lret im */
5679 val
= ldsw_code(s
->pc
);
5682 if (s
->pe
&& !s
->vm86
) {
5683 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5684 gen_op_set_cc_op(s
->cc_op
);
5685 gen_jmp_im(pc_start
- s
->cs_base
);
5686 tcg_gen_helper_0_2(helper_lret_protected
,
5687 tcg_const_i32(s
->dflag
),
5688 tcg_const_i32(val
));
5692 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5694 gen_op_andl_T0_ffff();
5695 /* NOTE: keeping EIP updated is not a problem in case of
5699 gen_op_addl_A0_im(2 << s
->dflag
);
5700 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5701 gen_op_movl_seg_T0_vm(R_CS
);
5702 /* add stack offset */
5703 gen_stack_update(s
, val
+ (4 << s
->dflag
));
5707 case 0xcb: /* lret */
5710 case 0xcf: /* iret */
5711 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IRET
);
5714 tcg_gen_helper_0_1(helper_iret_real
, tcg_const_i32(s
->dflag
));
5715 s
->cc_op
= CC_OP_EFLAGS
;
5716 } else if (s
->vm86
) {
5718 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5720 tcg_gen_helper_0_1(helper_iret_real
, tcg_const_i32(s
->dflag
));
5721 s
->cc_op
= CC_OP_EFLAGS
;
5724 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5725 gen_op_set_cc_op(s
->cc_op
);
5726 gen_jmp_im(pc_start
- s
->cs_base
);
5727 tcg_gen_helper_0_2(helper_iret_protected
,
5728 tcg_const_i32(s
->dflag
),
5729 tcg_const_i32(s
->pc
- s
->cs_base
));
5730 s
->cc_op
= CC_OP_EFLAGS
;
5734 case 0xe8: /* call im */
5737 tval
= (int32_t)insn_get(s
, OT_LONG
);
5739 tval
= (int16_t)insn_get(s
, OT_WORD
);
5740 next_eip
= s
->pc
- s
->cs_base
;
5744 gen_movtl_T0_im(next_eip
);
5749 case 0x9a: /* lcall im */
5751 unsigned int selector
, offset
;
5755 ot
= dflag
? OT_LONG
: OT_WORD
;
5756 offset
= insn_get(s
, ot
);
5757 selector
= insn_get(s
, OT_WORD
);
5759 gen_op_movl_T0_im(selector
);
5760 gen_op_movl_T1_imu(offset
);
5763 case 0xe9: /* jmp im */
5765 tval
= (int32_t)insn_get(s
, OT_LONG
);
5767 tval
= (int16_t)insn_get(s
, OT_WORD
);
5768 tval
+= s
->pc
- s
->cs_base
;
5773 case 0xea: /* ljmp im */
5775 unsigned int selector
, offset
;
5779 ot
= dflag
? OT_LONG
: OT_WORD
;
5780 offset
= insn_get(s
, ot
);
5781 selector
= insn_get(s
, OT_WORD
);
5783 gen_op_movl_T0_im(selector
);
5784 gen_op_movl_T1_imu(offset
);
5787 case 0xeb: /* jmp Jb */
5788 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5789 tval
+= s
->pc
- s
->cs_base
;
5794 case 0x70 ... 0x7f: /* jcc Jb */
5795 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5797 case 0x180 ... 0x18f: /* jcc Jv */
5799 tval
= (int32_t)insn_get(s
, OT_LONG
);
5801 tval
= (int16_t)insn_get(s
, OT_WORD
);
5804 next_eip
= s
->pc
- s
->cs_base
;
5808 gen_jcc(s
, b
, tval
, next_eip
);
5811 case 0x190 ... 0x19f: /* setcc Gv */
5812 modrm
= ldub_code(s
->pc
++);
5814 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
5816 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5821 ot
= dflag
+ OT_WORD
;
5822 modrm
= ldub_code(s
->pc
++);
5823 reg
= ((modrm
>> 3) & 7) | rex_r
;
5824 mod
= (modrm
>> 6) & 3;
5825 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
5827 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5828 gen_op_ld_v(ot
+ s
->mem_index
, t0
, cpu_A0
);
5830 rm
= (modrm
& 7) | REX_B(s
);
5831 gen_op_mov_v_reg(ot
, t0
, rm
);
5833 #ifdef TARGET_X86_64
5834 if (ot
== OT_LONG
) {
5835 /* XXX: specific Intel behaviour ? */
5836 l1
= gen_new_label();
5837 gen_jcc1(s
, s
->cc_op
, b
^ 1, l1
);
5838 tcg_gen_st32_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
5840 tcg_gen_movi_tl(cpu_tmp0
, 0);
5841 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
5845 l1
= gen_new_label();
5846 gen_jcc1(s
, s
->cc_op
, b
^ 1, l1
);
5847 gen_op_mov_reg_v(ot
, reg
, t0
);
5854 /************************/
5856 case 0x9c: /* pushf */
5857 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PUSHF
);
5858 if (s
->vm86
&& s
->iopl
!= 3) {
5859 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5861 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5862 gen_op_set_cc_op(s
->cc_op
);
5863 tcg_gen_helper_1_0(helper_read_eflags
, cpu_T
[0]);
5867 case 0x9d: /* popf */
5868 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_POPF
);
5869 if (s
->vm86
&& s
->iopl
!= 3) {
5870 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5875 tcg_gen_helper_0_2(helper_write_eflags
, cpu_T
[0],
5876 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
| IOPL_MASK
)));
5878 tcg_gen_helper_0_2(helper_write_eflags
, cpu_T
[0],
5879 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
| IOPL_MASK
) & 0xffff));
5882 if (s
->cpl
<= s
->iopl
) {
5884 tcg_gen_helper_0_2(helper_write_eflags
, cpu_T
[0],
5885 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
)));
5887 tcg_gen_helper_0_2(helper_write_eflags
, cpu_T
[0],
5888 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
) & 0xffff));
5892 tcg_gen_helper_0_2(helper_write_eflags
, cpu_T
[0],
5893 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
)));
5895 tcg_gen_helper_0_2(helper_write_eflags
, cpu_T
[0],
5896 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
) & 0xffff));
5901 s
->cc_op
= CC_OP_EFLAGS
;
5902 /* abort translation because TF flag may change */
5903 gen_jmp_im(s
->pc
- s
->cs_base
);
5907 case 0x9e: /* sahf */
5908 if (CODE64(s
) && !(s
->cpuid_ext3_features
& CPUID_EXT3_LAHF_LM
))
5910 gen_op_mov_TN_reg(OT_BYTE
, 0, R_AH
);
5911 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5912 gen_op_set_cc_op(s
->cc_op
);
5913 gen_compute_eflags(cpu_cc_src
);
5914 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, CC_O
);
5915 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
);
5916 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_T
[0]);
5917 s
->cc_op
= CC_OP_EFLAGS
;
5919 case 0x9f: /* lahf */
5920 if (CODE64(s
) && !(s
->cpuid_ext3_features
& CPUID_EXT3_LAHF_LM
))
5922 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5923 gen_op_set_cc_op(s
->cc_op
);
5924 gen_compute_eflags(cpu_T
[0]);
5925 /* Note: gen_compute_eflags() only gives the condition codes */
5926 tcg_gen_ori_tl(cpu_T
[0], cpu_T
[0], 0x02);
5927 gen_op_mov_reg_T0(OT_BYTE
, R_AH
);
5929 case 0xf5: /* cmc */
5930 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5931 gen_op_set_cc_op(s
->cc_op
);
5932 gen_compute_eflags(cpu_cc_src
);
5933 tcg_gen_xori_tl(cpu_cc_src
, cpu_cc_src
, CC_C
);
5934 s
->cc_op
= CC_OP_EFLAGS
;
5936 case 0xf8: /* clc */
5937 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5938 gen_op_set_cc_op(s
->cc_op
);
5939 gen_compute_eflags(cpu_cc_src
);
5940 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~CC_C
);
5941 s
->cc_op
= CC_OP_EFLAGS
;
5943 case 0xf9: /* stc */
5944 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5945 gen_op_set_cc_op(s
->cc_op
);
5946 gen_compute_eflags(cpu_cc_src
);
5947 tcg_gen_ori_tl(cpu_cc_src
, cpu_cc_src
, CC_C
);
5948 s
->cc_op
= CC_OP_EFLAGS
;
5950 case 0xfc: /* cld */
5951 tcg_gen_movi_i32(cpu_tmp2_i32
, 1);
5952 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, offsetof(CPUState
, df
));
5954 case 0xfd: /* std */
5955 tcg_gen_movi_i32(cpu_tmp2_i32
, -1);
5956 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, offsetof(CPUState
, df
));
5959 /************************/
5960 /* bit operations */
5961 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5962 ot
= dflag
+ OT_WORD
;
5963 modrm
= ldub_code(s
->pc
++);
5964 op
= (modrm
>> 3) & 7;
5965 mod
= (modrm
>> 6) & 3;
5966 rm
= (modrm
& 7) | REX_B(s
);
5969 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5970 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5972 gen_op_mov_TN_reg(ot
, 0, rm
);
5975 val
= ldub_code(s
->pc
++);
5976 gen_op_movl_T1_im(val
);
5981 case 0x1a3: /* bt Gv, Ev */
5984 case 0x1ab: /* bts */
5987 case 0x1b3: /* btr */
5990 case 0x1bb: /* btc */
5993 ot
= dflag
+ OT_WORD
;
5994 modrm
= ldub_code(s
->pc
++);
5995 reg
= ((modrm
>> 3) & 7) | rex_r
;
5996 mod
= (modrm
>> 6) & 3;
5997 rm
= (modrm
& 7) | REX_B(s
);
5998 gen_op_mov_TN_reg(OT_LONG
, 1, reg
);
6000 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6001 /* specific case: we need to add a displacement */
6002 gen_exts(ot
, cpu_T
[1]);
6003 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[1], 3 + ot
);
6004 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, ot
);
6005 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
6006 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
6008 gen_op_mov_TN_reg(ot
, 0, rm
);
6011 tcg_gen_andi_tl(cpu_T
[1], cpu_T
[1], (1 << (3 + ot
)) - 1);
6014 tcg_gen_shr_tl(cpu_cc_src
, cpu_T
[0], cpu_T
[1]);
6015 tcg_gen_movi_tl(cpu_cc_dst
, 0);
6018 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
6019 tcg_gen_movi_tl(cpu_tmp0
, 1);
6020 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
6021 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
6024 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
6025 tcg_gen_movi_tl(cpu_tmp0
, 1);
6026 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
6027 tcg_gen_not_tl(cpu_tmp0
, cpu_tmp0
);
6028 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
6032 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
6033 tcg_gen_movi_tl(cpu_tmp0
, 1);
6034 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
6035 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
6038 s
->cc_op
= CC_OP_SARB
+ ot
;
6041 gen_op_st_T0_A0(ot
+ s
->mem_index
);
6043 gen_op_mov_reg_T0(ot
, rm
);
6044 tcg_gen_mov_tl(cpu_cc_src
, cpu_tmp4
);
6045 tcg_gen_movi_tl(cpu_cc_dst
, 0);
6048 case 0x1bc: /* bsf */
6049 case 0x1bd: /* bsr */
6054 ot
= dflag
+ OT_WORD
;
6055 modrm
= ldub_code(s
->pc
++);
6056 reg
= ((modrm
>> 3) & 7) | rex_r
;
6057 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
6058 gen_extu(ot
, cpu_T
[0]);
6059 label1
= gen_new_label();
6060 tcg_gen_movi_tl(cpu_cc_dst
, 0);
6061 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
6062 tcg_gen_mov_tl(t0
, cpu_T
[0]);
6063 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, label1
);
6065 tcg_gen_helper_1_1(helper_bsr
, cpu_T
[0], t0
);
6067 tcg_gen_helper_1_1(helper_bsf
, cpu_T
[0], t0
);
6069 gen_op_mov_reg_T0(ot
, reg
);
6070 tcg_gen_movi_tl(cpu_cc_dst
, 1);
6071 gen_set_label(label1
);
6072 tcg_gen_discard_tl(cpu_cc_src
);
6073 s
->cc_op
= CC_OP_LOGICB
+ ot
;
6077 /************************/
6079 case 0x27: /* daa */
6082 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6083 gen_op_set_cc_op(s
->cc_op
);
6084 tcg_gen_helper_0_0(helper_daa
);
6085 s
->cc_op
= CC_OP_EFLAGS
;
6087 case 0x2f: /* das */
6090 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6091 gen_op_set_cc_op(s
->cc_op
);
6092 tcg_gen_helper_0_0(helper_das
);
6093 s
->cc_op
= CC_OP_EFLAGS
;
6095 case 0x37: /* aaa */
6098 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6099 gen_op_set_cc_op(s
->cc_op
);
6100 tcg_gen_helper_0_0(helper_aaa
);
6101 s
->cc_op
= CC_OP_EFLAGS
;
6103 case 0x3f: /* aas */
6106 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6107 gen_op_set_cc_op(s
->cc_op
);
6108 tcg_gen_helper_0_0(helper_aas
);
6109 s
->cc_op
= CC_OP_EFLAGS
;
6111 case 0xd4: /* aam */
6114 val
= ldub_code(s
->pc
++);
6116 gen_exception(s
, EXCP00_DIVZ
, pc_start
- s
->cs_base
);
6118 tcg_gen_helper_0_1(helper_aam
, tcg_const_i32(val
));
6119 s
->cc_op
= CC_OP_LOGICB
;
6122 case 0xd5: /* aad */
6125 val
= ldub_code(s
->pc
++);
6126 tcg_gen_helper_0_1(helper_aad
, tcg_const_i32(val
));
6127 s
->cc_op
= CC_OP_LOGICB
;
6129 /************************/
6131 case 0x90: /* nop */
6132 /* XXX: xchg + rex handling */
6133 /* XXX: correct lock test for all insn */
6134 if (prefixes
& PREFIX_LOCK
)
6136 if (prefixes
& PREFIX_REPZ
) {
6137 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PAUSE
);
6140 case 0x9b: /* fwait */
6141 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
6142 (HF_MP_MASK
| HF_TS_MASK
)) {
6143 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6145 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6146 gen_op_set_cc_op(s
->cc_op
);
6147 gen_jmp_im(pc_start
- s
->cs_base
);
6148 tcg_gen_helper_0_0(helper_fwait
);
6151 case 0xcc: /* int3 */
6152 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6154 case 0xcd: /* int N */
6155 val
= ldub_code(s
->pc
++);
6156 if (s
->vm86
&& s
->iopl
!= 3) {
6157 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6159 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6162 case 0xce: /* into */
6165 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6166 gen_op_set_cc_op(s
->cc_op
);
6167 gen_jmp_im(pc_start
- s
->cs_base
);
6168 tcg_gen_helper_0_1(helper_into
, tcg_const_i32(s
->pc
- pc_start
));
6170 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6171 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_ICEBP
);
6173 gen_debug(s
, pc_start
- s
->cs_base
);
6176 tb_flush(cpu_single_env
);
6177 cpu_set_log(CPU_LOG_INT
| CPU_LOG_TB_IN_ASM
);
6180 case 0xfa: /* cli */
6182 if (s
->cpl
<= s
->iopl
) {
6183 tcg_gen_helper_0_0(helper_cli
);
6185 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6189 tcg_gen_helper_0_0(helper_cli
);
6191 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6195 case 0xfb: /* sti */
6197 if (s
->cpl
<= s
->iopl
) {
6199 tcg_gen_helper_0_0(helper_sti
);
6200 /* interruptions are enabled only the first insn after sti */
6201 /* If several instructions disable interrupts, only the
6203 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
6204 tcg_gen_helper_0_0(helper_set_inhibit_irq
);
6205 /* give a chance to handle pending irqs */
6206 gen_jmp_im(s
->pc
- s
->cs_base
);
6209 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6215 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6219 case 0x62: /* bound */
6222 ot
= dflag
? OT_LONG
: OT_WORD
;
6223 modrm
= ldub_code(s
->pc
++);
6224 reg
= (modrm
>> 3) & 7;
6225 mod
= (modrm
>> 6) & 3;
6228 gen_op_mov_TN_reg(ot
, 0, reg
);
6229 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6230 gen_jmp_im(pc_start
- s
->cs_base
);
6231 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6233 tcg_gen_helper_0_2(helper_boundw
, cpu_A0
, cpu_tmp2_i32
);
6235 tcg_gen_helper_0_2(helper_boundl
, cpu_A0
, cpu_tmp2_i32
);
6237 case 0x1c8 ... 0x1cf: /* bswap reg */
6238 reg
= (b
& 7) | REX_B(s
);
6239 #ifdef TARGET_X86_64
6241 gen_op_mov_TN_reg(OT_QUAD
, 0, reg
);
6242 tcg_gen_bswap_i64(cpu_T
[0], cpu_T
[0]);
6243 gen_op_mov_reg_T0(OT_QUAD
, reg
);
6247 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
6249 tmp0
= tcg_temp_new(TCG_TYPE_I32
);
6250 tcg_gen_trunc_i64_i32(tmp0
, cpu_T
[0]);
6251 tcg_gen_bswap_i32(tmp0
, tmp0
);
6252 tcg_gen_extu_i32_i64(cpu_T
[0], tmp0
);
6253 gen_op_mov_reg_T0(OT_LONG
, reg
);
6257 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
6258 tcg_gen_bswap_i32(cpu_T
[0], cpu_T
[0]);
6259 gen_op_mov_reg_T0(OT_LONG
, reg
);
6263 case 0xd6: /* salc */
6266 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6267 gen_op_set_cc_op(s
->cc_op
);
6268 gen_compute_eflags_c(cpu_T
[0]);
6269 tcg_gen_neg_tl(cpu_T
[0], cpu_T
[0]);
6270 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
6272 case 0xe0: /* loopnz */
6273 case 0xe1: /* loopz */
6274 case 0xe2: /* loop */
6275 case 0xe3: /* jecxz */
6279 tval
= (int8_t)insn_get(s
, OT_BYTE
);
6280 next_eip
= s
->pc
- s
->cs_base
;
6285 l1
= gen_new_label();
6286 l2
= gen_new_label();
6287 l3
= gen_new_label();
6290 case 0: /* loopnz */
6292 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6293 gen_op_set_cc_op(s
->cc_op
);
6294 gen_op_add_reg_im(s
->aflag
, R_ECX
, -1);
6295 gen_op_jz_ecx(s
->aflag
, l3
);
6296 gen_compute_eflags(cpu_tmp0
);
6297 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, CC_Z
);
6299 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
6301 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_tmp0
, 0, l1
);
6305 gen_op_add_reg_im(s
->aflag
, R_ECX
, -1);
6306 gen_op_jnz_ecx(s
->aflag
, l1
);
6310 gen_op_jz_ecx(s
->aflag
, l1
);
6315 gen_jmp_im(next_eip
);
6324 case 0x130: /* wrmsr */
6325 case 0x132: /* rdmsr */
6327 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6329 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6330 gen_op_set_cc_op(s
->cc_op
);
6331 gen_jmp_im(pc_start
- s
->cs_base
);
6333 tcg_gen_helper_0_0(helper_rdmsr
);
6335 tcg_gen_helper_0_0(helper_wrmsr
);
6339 case 0x131: /* rdtsc */
6340 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6341 gen_op_set_cc_op(s
->cc_op
);
6342 gen_jmp_im(pc_start
- s
->cs_base
);
6343 tcg_gen_helper_0_0(helper_rdtsc
);
6345 case 0x133: /* rdpmc */
6346 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6347 gen_op_set_cc_op(s
->cc_op
);
6348 gen_jmp_im(pc_start
- s
->cs_base
);
6349 tcg_gen_helper_0_0(helper_rdpmc
);
6351 case 0x134: /* sysenter */
6355 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6357 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6358 gen_op_set_cc_op(s
->cc_op
);
6359 s
->cc_op
= CC_OP_DYNAMIC
;
6361 gen_jmp_im(pc_start
- s
->cs_base
);
6362 tcg_gen_helper_0_0(helper_sysenter
);
6366 case 0x135: /* sysexit */
6370 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6372 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6373 gen_op_set_cc_op(s
->cc_op
);
6374 s
->cc_op
= CC_OP_DYNAMIC
;
6376 gen_jmp_im(pc_start
- s
->cs_base
);
6377 tcg_gen_helper_0_0(helper_sysexit
);
6381 #ifdef TARGET_X86_64
6382 case 0x105: /* syscall */
6383 /* XXX: is it usable in real mode ? */
6384 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6385 gen_op_set_cc_op(s
->cc_op
);
6386 s
->cc_op
= CC_OP_DYNAMIC
;
6388 gen_jmp_im(pc_start
- s
->cs_base
);
6389 tcg_gen_helper_0_1(helper_syscall
, tcg_const_i32(s
->pc
- pc_start
));
6392 case 0x107: /* sysret */
6394 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6396 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6397 gen_op_set_cc_op(s
->cc_op
);
6398 s
->cc_op
= CC_OP_DYNAMIC
;
6400 gen_jmp_im(pc_start
- s
->cs_base
);
6401 tcg_gen_helper_0_1(helper_sysret
, tcg_const_i32(s
->dflag
));
6402 /* condition codes are modified only in long mode */
6404 s
->cc_op
= CC_OP_EFLAGS
;
6409 case 0x1a2: /* cpuid */
6410 tcg_gen_helper_0_0(helper_cpuid
);
6412 case 0xf4: /* hlt */
6414 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6416 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6417 gen_op_set_cc_op(s
->cc_op
);
6418 gen_jmp_im(s
->pc
- s
->cs_base
);
6419 tcg_gen_helper_0_0(helper_hlt
);
6424 modrm
= ldub_code(s
->pc
++);
6425 mod
= (modrm
>> 6) & 3;
6426 op
= (modrm
>> 3) & 7;
6429 if (!s
->pe
|| s
->vm86
)
6431 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_READ
);
6432 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,ldt
.selector
));
6436 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
6439 if (!s
->pe
|| s
->vm86
)
6442 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6444 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_WRITE
);
6445 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6446 gen_jmp_im(pc_start
- s
->cs_base
);
6447 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6448 tcg_gen_helper_0_1(helper_lldt
, cpu_tmp2_i32
);
6452 if (!s
->pe
|| s
->vm86
)
6454 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_READ
);
6455 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,tr
.selector
));
6459 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
6462 if (!s
->pe
|| s
->vm86
)
6465 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6467 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_WRITE
);
6468 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6469 gen_jmp_im(pc_start
- s
->cs_base
);
6470 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6471 tcg_gen_helper_0_1(helper_ltr
, cpu_tmp2_i32
);
6476 if (!s
->pe
|| s
->vm86
)
6478 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6479 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6480 gen_op_set_cc_op(s
->cc_op
);
6482 tcg_gen_helper_0_1(helper_verr
, cpu_T
[0]);
6484 tcg_gen_helper_0_1(helper_verw
, cpu_T
[0]);
6485 s
->cc_op
= CC_OP_EFLAGS
;
6492 modrm
= ldub_code(s
->pc
++);
6493 mod
= (modrm
>> 6) & 3;
6494 op
= (modrm
>> 3) & 7;
6500 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_GDTR_READ
);
6501 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6502 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, gdt
.limit
));
6503 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
6504 gen_add_A0_im(s
, 2);
6505 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, gdt
.base
));
6507 gen_op_andl_T0_im(0xffffff);
6508 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6513 case 0: /* monitor */
6514 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
6517 gen_jmp_im(pc_start
- s
->cs_base
);
6518 #ifdef TARGET_X86_64
6519 if (s
->aflag
== 2) {
6520 gen_op_movq_A0_reg(R_EAX
);
6524 gen_op_movl_A0_reg(R_EAX
);
6526 gen_op_andl_A0_ffff();
6528 gen_add_A0_ds_seg(s
);
6529 tcg_gen_helper_0_1(helper_monitor
, cpu_A0
);
6532 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
6535 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6536 gen_op_set_cc_op(s
->cc_op
);
6537 s
->cc_op
= CC_OP_DYNAMIC
;
6539 gen_jmp_im(s
->pc
- s
->cs_base
);
6540 tcg_gen_helper_0_0(helper_mwait
);
6547 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IDTR_READ
);
6548 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6549 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, idt
.limit
));
6550 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
6551 gen_add_A0_im(s
, 2);
6552 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, idt
.base
));
6554 gen_op_andl_T0_im(0xffffff);
6555 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6561 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6562 gen_op_set_cc_op(s
->cc_op
);
6563 gen_jmp_im(pc_start
- s
->cs_base
);
6566 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
6569 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6572 tcg_gen_helper_0_2(helper_vmrun
,
6573 tcg_const_i32(s
->aflag
),
6574 tcg_const_i32(s
->pc
- pc_start
));
6579 case 1: /* VMMCALL */
6580 if (!(s
->flags
& HF_SVME_MASK
))
6582 tcg_gen_helper_0_0(helper_vmmcall
);
6584 case 2: /* VMLOAD */
6585 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
6588 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6591 tcg_gen_helper_0_1(helper_vmload
,
6592 tcg_const_i32(s
->aflag
));
6595 case 3: /* VMSAVE */
6596 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
6599 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6602 tcg_gen_helper_0_1(helper_vmsave
,
6603 tcg_const_i32(s
->aflag
));
6607 if ((!(s
->flags
& HF_SVME_MASK
) &&
6608 !(s
->cpuid_ext3_features
& CPUID_EXT3_SKINIT
)) ||
6612 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6615 tcg_gen_helper_0_0(helper_stgi
);
6619 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
6622 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6625 tcg_gen_helper_0_0(helper_clgi
);
6628 case 6: /* SKINIT */
6629 if ((!(s
->flags
& HF_SVME_MASK
) &&
6630 !(s
->cpuid_ext3_features
& CPUID_EXT3_SKINIT
)) ||
6633 tcg_gen_helper_0_0(helper_skinit
);
6635 case 7: /* INVLPGA */
6636 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
6639 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6642 tcg_gen_helper_0_1(helper_invlpga
,
6643 tcg_const_i32(s
->aflag
));
6649 } else if (s
->cpl
!= 0) {
6650 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6652 gen_svm_check_intercept(s
, pc_start
,
6653 op
==2 ? SVM_EXIT_GDTR_WRITE
: SVM_EXIT_IDTR_WRITE
);
6654 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6655 gen_op_ld_T1_A0(OT_WORD
+ s
->mem_index
);
6656 gen_add_A0_im(s
, 2);
6657 gen_op_ld_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6659 gen_op_andl_T0_im(0xffffff);
6661 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,gdt
.base
));
6662 tcg_gen_st32_tl(cpu_T
[1], cpu_env
, offsetof(CPUX86State
,gdt
.limit
));
6664 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,idt
.base
));
6665 tcg_gen_st32_tl(cpu_T
[1], cpu_env
, offsetof(CPUX86State
,idt
.limit
));
6670 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
);
6671 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,cr
[0]));
6672 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
6676 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6678 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
6679 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6680 tcg_gen_helper_0_1(helper_lmsw
, cpu_T
[0]);
6681 gen_jmp_im(s
->pc
- s
->cs_base
);
6685 case 7: /* invlpg */
6687 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6690 #ifdef TARGET_X86_64
6691 if (CODE64(s
) && rm
== 0) {
6693 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,segs
[R_GS
].base
));
6694 tcg_gen_ld_tl(cpu_T
[1], cpu_env
, offsetof(CPUX86State
,kernelgsbase
));
6695 tcg_gen_st_tl(cpu_T
[1], cpu_env
, offsetof(CPUX86State
,segs
[R_GS
].base
));
6696 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,kernelgsbase
));
6703 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6704 tcg_gen_helper_0_1(helper_invlpg
, cpu_A0
);
6705 gen_jmp_im(s
->pc
- s
->cs_base
);
6714 case 0x108: /* invd */
6715 case 0x109: /* wbinvd */
6717 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6719 gen_svm_check_intercept(s
, pc_start
, (b
& 2) ? SVM_EXIT_INVD
: SVM_EXIT_WBINVD
);
6723 case 0x63: /* arpl or movslS (x86_64) */
6724 #ifdef TARGET_X86_64
6727 /* d_ot is the size of destination */
6728 d_ot
= dflag
+ OT_WORD
;
6730 modrm
= ldub_code(s
->pc
++);
6731 reg
= ((modrm
>> 3) & 7) | rex_r
;
6732 mod
= (modrm
>> 6) & 3;
6733 rm
= (modrm
& 7) | REX_B(s
);
6736 gen_op_mov_TN_reg(OT_LONG
, 0, rm
);
6738 if (d_ot
== OT_QUAD
)
6739 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
6740 gen_op_mov_reg_T0(d_ot
, reg
);
6742 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6743 if (d_ot
== OT_QUAD
) {
6744 gen_op_lds_T0_A0(OT_LONG
+ s
->mem_index
);
6746 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6748 gen_op_mov_reg_T0(d_ot
, reg
);
6756 if (!s
->pe
|| s
->vm86
)
6758 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
6759 t1
= tcg_temp_local_new(TCG_TYPE_TL
);
6760 t2
= tcg_temp_local_new(TCG_TYPE_TL
);
6762 modrm
= ldub_code(s
->pc
++);
6763 reg
= (modrm
>> 3) & 7;
6764 mod
= (modrm
>> 6) & 3;
6767 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6768 gen_op_ld_v(ot
+ s
->mem_index
, t0
, cpu_A0
);
6770 gen_op_mov_v_reg(ot
, t0
, rm
);
6772 gen_op_mov_v_reg(ot
, t1
, reg
);
6773 tcg_gen_andi_tl(cpu_tmp0
, t0
, 3);
6774 tcg_gen_andi_tl(t1
, t1
, 3);
6775 tcg_gen_movi_tl(t2
, 0);
6776 label1
= gen_new_label();
6777 tcg_gen_brcond_tl(TCG_COND_GE
, cpu_tmp0
, t1
, label1
);
6778 tcg_gen_andi_tl(t0
, t0
, ~3);
6779 tcg_gen_or_tl(t0
, t0
, t1
);
6780 tcg_gen_movi_tl(t2
, CC_Z
);
6781 gen_set_label(label1
);
6783 gen_op_st_v(ot
+ s
->mem_index
, t0
, cpu_A0
);
6785 gen_op_mov_reg_v(ot
, rm
, t0
);
6787 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6788 gen_op_set_cc_op(s
->cc_op
);
6789 gen_compute_eflags(cpu_cc_src
);
6790 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~CC_Z
);
6791 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, t2
);
6792 s
->cc_op
= CC_OP_EFLAGS
;
6798 case 0x102: /* lar */
6799 case 0x103: /* lsl */
6803 if (!s
->pe
|| s
->vm86
)
6805 ot
= dflag
? OT_LONG
: OT_WORD
;
6806 modrm
= ldub_code(s
->pc
++);
6807 reg
= ((modrm
>> 3) & 7) | rex_r
;
6808 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6809 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
6810 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6811 gen_op_set_cc_op(s
->cc_op
);
6813 tcg_gen_helper_1_1(helper_lar
, t0
, cpu_T
[0]);
6815 tcg_gen_helper_1_1(helper_lsl
, t0
, cpu_T
[0]);
6816 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_src
, CC_Z
);
6817 label1
= gen_new_label();
6818 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, label1
);
6819 gen_op_mov_reg_v(ot
, reg
, t0
);
6820 gen_set_label(label1
);
6821 s
->cc_op
= CC_OP_EFLAGS
;
6826 modrm
= ldub_code(s
->pc
++);
6827 mod
= (modrm
>> 6) & 3;
6828 op
= (modrm
>> 3) & 7;
6830 case 0: /* prefetchnta */
6831 case 1: /* prefetchnt0 */
6832 case 2: /* prefetchnt0 */
6833 case 3: /* prefetchnt0 */
6836 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6837 /* nothing more to do */
6839 default: /* nop (multi byte) */
6840 gen_nop_modrm(s
, modrm
);
6844 case 0x119 ... 0x11f: /* nop (multi byte) */
6845 modrm
= ldub_code(s
->pc
++);
6846 gen_nop_modrm(s
, modrm
);
6848 case 0x120: /* mov reg, crN */
6849 case 0x122: /* mov crN, reg */
6851 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6853 modrm
= ldub_code(s
->pc
++);
6854 if ((modrm
& 0xc0) != 0xc0)
6856 rm
= (modrm
& 7) | REX_B(s
);
6857 reg
= ((modrm
>> 3) & 7) | rex_r
;
6868 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6869 gen_op_set_cc_op(s
->cc_op
);
6870 gen_jmp_im(pc_start
- s
->cs_base
);
6872 gen_op_mov_TN_reg(ot
, 0, rm
);
6873 tcg_gen_helper_0_2(helper_write_crN
,
6874 tcg_const_i32(reg
), cpu_T
[0]);
6875 gen_jmp_im(s
->pc
- s
->cs_base
);
6878 tcg_gen_helper_1_1(helper_read_crN
,
6879 cpu_T
[0], tcg_const_i32(reg
));
6880 gen_op_mov_reg_T0(ot
, rm
);
6888 case 0x121: /* mov reg, drN */
6889 case 0x123: /* mov drN, reg */
6891 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6893 modrm
= ldub_code(s
->pc
++);
6894 if ((modrm
& 0xc0) != 0xc0)
6896 rm
= (modrm
& 7) | REX_B(s
);
6897 reg
= ((modrm
>> 3) & 7) | rex_r
;
6902 /* XXX: do it dynamically with CR4.DE bit */
6903 if (reg
== 4 || reg
== 5 || reg
>= 8)
6906 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_DR0
+ reg
);
6907 gen_op_mov_TN_reg(ot
, 0, rm
);
6908 tcg_gen_helper_0_2(helper_movl_drN_T0
,
6909 tcg_const_i32(reg
), cpu_T
[0]);
6910 gen_jmp_im(s
->pc
- s
->cs_base
);
6913 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_DR0
+ reg
);
6914 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,dr
[reg
]));
6915 gen_op_mov_reg_T0(ot
, rm
);
6919 case 0x106: /* clts */
6921 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6923 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
6924 tcg_gen_helper_0_0(helper_clts
);
6925 /* abort block because static cpu state changed */
6926 gen_jmp_im(s
->pc
- s
->cs_base
);
6930 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6931 case 0x1c3: /* MOVNTI reg, mem */
6932 if (!(s
->cpuid_features
& CPUID_SSE2
))
6934 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
6935 modrm
= ldub_code(s
->pc
++);
6936 mod
= (modrm
>> 6) & 3;
6939 reg
= ((modrm
>> 3) & 7) | rex_r
;
6940 /* generate a generic store */
6941 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
6944 modrm
= ldub_code(s
->pc
++);
6945 mod
= (modrm
>> 6) & 3;
6946 op
= (modrm
>> 3) & 7;
6948 case 0: /* fxsave */
6949 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6950 (s
->flags
& HF_EM_MASK
))
6952 if (s
->flags
& HF_TS_MASK
) {
6953 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6956 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6957 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6958 gen_op_set_cc_op(s
->cc_op
);
6959 gen_jmp_im(pc_start
- s
->cs_base
);
6960 tcg_gen_helper_0_2(helper_fxsave
,
6961 cpu_A0
, tcg_const_i32((s
->dflag
== 2)));
6963 case 1: /* fxrstor */
6964 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6965 (s
->flags
& HF_EM_MASK
))
6967 if (s
->flags
& HF_TS_MASK
) {
6968 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6971 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6972 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6973 gen_op_set_cc_op(s
->cc_op
);
6974 gen_jmp_im(pc_start
- s
->cs_base
);
6975 tcg_gen_helper_0_2(helper_fxrstor
,
6976 cpu_A0
, tcg_const_i32((s
->dflag
== 2)));
6978 case 2: /* ldmxcsr */
6979 case 3: /* stmxcsr */
6980 if (s
->flags
& HF_TS_MASK
) {
6981 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6984 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
6987 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6989 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6990 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, mxcsr
));
6992 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, mxcsr
));
6993 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
6996 case 5: /* lfence */
6997 case 6: /* mfence */
6998 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
7001 case 7: /* sfence / clflush */
7002 if ((modrm
& 0xc7) == 0xc0) {
7004 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
7005 if (!(s
->cpuid_features
& CPUID_SSE
))
7009 if (!(s
->cpuid_features
& CPUID_CLFLUSH
))
7011 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7018 case 0x10d: /* 3DNow! prefetch(w) */
7019 modrm
= ldub_code(s
->pc
++);
7020 mod
= (modrm
>> 6) & 3;
7023 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7024 /* ignore for now */
7026 case 0x1aa: /* rsm */
7027 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RSM
);
7028 if (!(s
->flags
& HF_SMM_MASK
))
7030 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
7031 gen_op_set_cc_op(s
->cc_op
);
7032 s
->cc_op
= CC_OP_DYNAMIC
;
7034 gen_jmp_im(s
->pc
- s
->cs_base
);
7035 tcg_gen_helper_0_0(helper_rsm
);
7038 case 0x10e ... 0x10f:
7039 /* 3DNow! instructions, ignore prefixes */
7040 s
->prefix
&= ~(PREFIX_REPZ
| PREFIX_REPNZ
| PREFIX_DATA
);
7041 case 0x110 ... 0x117:
7042 case 0x128 ... 0x12f:
7043 case 0x150 ... 0x177:
7044 case 0x17c ... 0x17f:
7046 case 0x1c4 ... 0x1c6:
7047 case 0x1d0 ... 0x1fe:
7048 gen_sse(s
, b
, pc_start
, rex_r
);
7053 /* lock generation */
7054 if (s
->prefix
& PREFIX_LOCK
)
7055 tcg_gen_helper_0_0(helper_unlock
);
7058 if (s
->prefix
& PREFIX_LOCK
)
7059 tcg_gen_helper_0_0(helper_unlock
);
7060 /* XXX: ensure that no lock was generated */
7061 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
7065 void optimize_flags_init(void)
7067 #if TCG_TARGET_REG_BITS == 32
7068 assert(sizeof(CCTable
) == (1 << 3));
7070 assert(sizeof(CCTable
) == (1 << 4));
7072 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
7073 cpu_cc_op
= tcg_global_mem_new(TCG_TYPE_I32
,
7074 TCG_AREG0
, offsetof(CPUState
, cc_op
), "cc_op");
7075 cpu_cc_src
= tcg_global_mem_new(TCG_TYPE_TL
,
7076 TCG_AREG0
, offsetof(CPUState
, cc_src
), "cc_src");
7077 cpu_cc_dst
= tcg_global_mem_new(TCG_TYPE_TL
,
7078 TCG_AREG0
, offsetof(CPUState
, cc_dst
), "cc_dst");
7079 cpu_cc_tmp
= tcg_global_mem_new(TCG_TYPE_TL
,
7080 TCG_AREG0
, offsetof(CPUState
, cc_tmp
), "cc_tmp");
7082 /* register helpers */
7084 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
7088 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7089 basic block 'tb'. If search_pc is TRUE, also generate PC
7090 information for each intermediate instruction. */
7091 static inline int gen_intermediate_code_internal(CPUState
*env
,
7092 TranslationBlock
*tb
,
7095 DisasContext dc1
, *dc
= &dc1
;
7096 target_ulong pc_ptr
;
7097 uint16_t *gen_opc_end
;
7100 target_ulong pc_start
;
7101 target_ulong cs_base
;
7103 /* generate intermediate code */
7105 cs_base
= tb
->cs_base
;
7107 cflags
= tb
->cflags
;
7109 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
7110 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
7111 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
7112 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
7114 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
7115 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
7116 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
7117 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
7118 dc
->singlestep_enabled
= env
->singlestep_enabled
;
7119 dc
->cc_op
= CC_OP_DYNAMIC
;
7120 dc
->cs_base
= cs_base
;
7122 dc
->popl_esp_hack
= 0;
7123 /* select memory access functions */
7125 if (flags
& HF_SOFTMMU_MASK
) {
7127 dc
->mem_index
= 2 * 4;
7129 dc
->mem_index
= 1 * 4;
7131 dc
->cpuid_features
= env
->cpuid_features
;
7132 dc
->cpuid_ext_features
= env
->cpuid_ext_features
;
7133 dc
->cpuid_ext2_features
= env
->cpuid_ext2_features
;
7134 dc
->cpuid_ext3_features
= env
->cpuid_ext3_features
;
7135 #ifdef TARGET_X86_64
7136 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
7137 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
7140 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
7141 (flags
& HF_INHIBIT_IRQ_MASK
)
7142 #ifndef CONFIG_SOFTMMU
7143 || (flags
& HF_SOFTMMU_MASK
)
7147 /* check addseg logic */
7148 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
7149 printf("ERROR addseg\n");
7152 cpu_T
[0] = tcg_temp_new(TCG_TYPE_TL
);
7153 cpu_T
[1] = tcg_temp_new(TCG_TYPE_TL
);
7154 cpu_A0
= tcg_temp_new(TCG_TYPE_TL
);
7155 cpu_T3
= tcg_temp_new(TCG_TYPE_TL
);
7157 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
7158 cpu_tmp1_i64
= tcg_temp_new(TCG_TYPE_I64
);
7159 cpu_tmp2_i32
= tcg_temp_new(TCG_TYPE_I32
);
7160 cpu_tmp3_i32
= tcg_temp_new(TCG_TYPE_I32
);
7161 cpu_tmp4
= tcg_temp_new(TCG_TYPE_TL
);
7162 cpu_tmp5
= tcg_temp_new(TCG_TYPE_TL
);
7163 cpu_tmp6
= tcg_temp_new(TCG_TYPE_TL
);
7164 cpu_ptr0
= tcg_temp_new(TCG_TYPE_PTR
);
7165 cpu_ptr1
= tcg_temp_new(TCG_TYPE_PTR
);
7167 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
7169 dc
->is_jmp
= DISAS_NEXT
;
7174 if (env
->nb_breakpoints
> 0) {
7175 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
7176 if (env
->breakpoints
[j
] == pc_ptr
) {
7177 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
7183 j
= gen_opc_ptr
- gen_opc_buf
;
7187 gen_opc_instr_start
[lj
++] = 0;
7189 gen_opc_pc
[lj
] = pc_ptr
;
7190 gen_opc_cc_op
[lj
] = dc
->cc_op
;
7191 gen_opc_instr_start
[lj
] = 1;
7193 pc_ptr
= disas_insn(dc
, pc_ptr
);
7194 /* stop translation if indicated */
7197 /* if single step mode, we generate only one instruction and
7198 generate an exception */
7199 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7200 the flag and abort the translation to give the irqs a
7201 change to be happen */
7202 if (dc
->tf
|| dc
->singlestep_enabled
||
7203 (flags
& HF_INHIBIT_IRQ_MASK
) ||
7204 (cflags
& CF_SINGLE_INSN
)) {
7205 gen_jmp_im(pc_ptr
- dc
->cs_base
);
7209 /* if too long translation, stop generation too */
7210 if (gen_opc_ptr
>= gen_opc_end
||
7211 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
7212 gen_jmp_im(pc_ptr
- dc
->cs_base
);
7217 *gen_opc_ptr
= INDEX_op_end
;
7218 /* we don't forget to fill the last values */
7220 j
= gen_opc_ptr
- gen_opc_buf
;
7223 gen_opc_instr_start
[lj
++] = 0;
7227 if (loglevel
& CPU_LOG_TB_CPU
) {
7228 cpu_dump_state(env
, logfile
, fprintf
, X86_DUMP_CCOP
);
7230 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
7232 fprintf(logfile
, "----------------\n");
7233 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
7234 #ifdef TARGET_X86_64
7239 disas_flags
= !dc
->code32
;
7240 target_disas(logfile
, pc_start
, pc_ptr
- pc_start
, disas_flags
);
7241 fprintf(logfile
, "\n");
7246 tb
->size
= pc_ptr
- pc_start
;
7250 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
7252 return gen_intermediate_code_internal(env
, tb
, 0);
7255 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
7257 return gen_intermediate_code_internal(env
, tb
, 1);
7260 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
7261 unsigned long searched_pc
, int pc_pos
, void *puc
)
7265 if (loglevel
& CPU_LOG_TB_OP
) {
7267 fprintf(logfile
, "RESTORE:\n");
7268 for(i
= 0;i
<= pc_pos
; i
++) {
7269 if (gen_opc_instr_start
[i
]) {
7270 fprintf(logfile
, "0x%04x: " TARGET_FMT_lx
"\n", i
, gen_opc_pc
[i
]);
7273 fprintf(logfile
, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx
" cs_base=%x\n",
7274 searched_pc
, pc_pos
, gen_opc_pc
[pc_pos
] - tb
->cs_base
,
7275 (uint32_t)tb
->cs_base
);
7278 env
->eip
= gen_opc_pc
[pc_pos
] - tb
->cs_base
;
7279 cc_op
= gen_opc_cc_op
[pc_pos
];
7280 if (cc_op
!= CC_OP_DYNAMIC
)