4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env
, cpu_T
[2], cpu_A0
, cpu_cc_op
, cpu_cc_src
, cpu_cc_dst
;
63 /* local register indexes (only used inside old micro ops) */
64 static TCGv cpu_tmp0
, cpu_tmp1_i64
, cpu_tmp2_i32
, cpu_tmp3_i32
, cpu_tmp4
, cpu_ptr0
, cpu_ptr1
;
65 static TCGv cpu_tmp5
, cpu_tmp6
;
68 static int x86_64_hregs
;
71 typedef struct DisasContext
{
72 /* current insn context */
73 int override
; /* -1 if no override */
76 target_ulong pc
; /* pc = eip + cs_base */
77 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
78 static state change (stop translation) */
79 /* current block context */
80 target_ulong cs_base
; /* base of CS segment */
81 int pe
; /* protected mode */
82 int code32
; /* 32 bit code segment */
84 int lma
; /* long mode active */
85 int code64
; /* 64 bit code segment */
88 int ss32
; /* 32 bit stack segment */
89 int cc_op
; /* current CC operation */
90 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
91 int f_st
; /* currently unused */
92 int vm86
; /* vm86 mode */
95 int tf
; /* TF cpu flag */
96 int singlestep_enabled
; /* "hardware" single step enabled */
97 int jmp_opt
; /* use direct block chaining for direct jumps */
98 int mem_index
; /* select memory access functions */
99 uint64_t flags
; /* all execution flags */
100 struct TranslationBlock
*tb
;
101 int popl_esp_hack
; /* for correct popl with esp base handling */
102 int rip_offset
; /* only used in x86_64, but left for simplicity */
104 int cpuid_ext_features
;
105 int cpuid_ext2_features
;
108 static void gen_eob(DisasContext
*s
);
109 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
110 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
112 /* i386 arith/logic operations */
132 OP_SHL1
, /* undocumented */
145 /* I386 int registers */
146 OR_EAX
, /* MUST be even numbered */
155 OR_TMP0
= 16, /* temporary operand register */
157 OR_A0
, /* temporary register used when doing address evaluation */
160 static inline void gen_op_movl_T0_0(void)
162 tcg_gen_movi_tl(cpu_T
[0], 0);
165 static inline void gen_op_movl_T0_im(int32_t val
)
167 tcg_gen_movi_tl(cpu_T
[0], val
);
170 static inline void gen_op_movl_T0_imu(uint32_t val
)
172 tcg_gen_movi_tl(cpu_T
[0], val
);
175 static inline void gen_op_movl_T1_im(int32_t val
)
177 tcg_gen_movi_tl(cpu_T
[1], val
);
180 static inline void gen_op_movl_T1_imu(uint32_t val
)
182 tcg_gen_movi_tl(cpu_T
[1], val
);
185 static inline void gen_op_movl_A0_im(uint32_t val
)
187 tcg_gen_movi_tl(cpu_A0
, val
);
191 static inline void gen_op_movq_A0_im(int64_t val
)
193 tcg_gen_movi_tl(cpu_A0
, val
);
197 static inline void gen_movtl_T0_im(target_ulong val
)
199 tcg_gen_movi_tl(cpu_T
[0], val
);
202 static inline void gen_movtl_T1_im(target_ulong val
)
204 tcg_gen_movi_tl(cpu_T
[1], val
);
207 static inline void gen_op_andl_T0_ffff(void)
209 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
212 static inline void gen_op_andl_T0_im(uint32_t val
)
214 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], val
);
217 static inline void gen_op_movl_T0_T1(void)
219 tcg_gen_mov_tl(cpu_T
[0], cpu_T
[1]);
222 static inline void gen_op_andl_A0_ffff(void)
224 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffff);
229 #define NB_OP_SIZES 4
231 #define DEF_REGS(prefix, suffix) \
232 prefix ## EAX ## suffix,\
233 prefix ## ECX ## suffix,\
234 prefix ## EDX ## suffix,\
235 prefix ## EBX ## suffix,\
236 prefix ## ESP ## suffix,\
237 prefix ## EBP ## suffix,\
238 prefix ## ESI ## suffix,\
239 prefix ## EDI ## suffix,\
240 prefix ## R8 ## suffix,\
241 prefix ## R9 ## suffix,\
242 prefix ## R10 ## suffix,\
243 prefix ## R11 ## suffix,\
244 prefix ## R12 ## suffix,\
245 prefix ## R13 ## suffix,\
246 prefix ## R14 ## suffix,\
247 prefix ## R15 ## suffix,
249 #else /* !TARGET_X86_64 */
251 #define NB_OP_SIZES 3
253 #define DEF_REGS(prefix, suffix) \
254 prefix ## EAX ## suffix,\
255 prefix ## ECX ## suffix,\
256 prefix ## EDX ## suffix,\
257 prefix ## EBX ## suffix,\
258 prefix ## ESP ## suffix,\
259 prefix ## EBP ## suffix,\
260 prefix ## ESI ## suffix,\
261 prefix ## EDI ## suffix,
263 #endif /* !TARGET_X86_64 */
265 #if defined(WORDS_BIGENDIAN)
266 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
267 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
268 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
269 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
270 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
272 #define REG_B_OFFSET 0
273 #define REG_H_OFFSET 1
274 #define REG_W_OFFSET 0
275 #define REG_L_OFFSET 0
276 #define REG_LH_OFFSET 4
279 static inline void gen_op_mov_reg_TN(int ot
, int t_index
, int reg
)
283 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
284 tcg_gen_st8_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_B_OFFSET
);
286 tcg_gen_st8_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
290 tcg_gen_st16_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
294 tcg_gen_st32_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
295 /* high part of register set to zero */
296 tcg_gen_movi_tl(cpu_tmp0
, 0);
297 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
301 tcg_gen_st_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]));
306 tcg_gen_st32_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
312 static inline void gen_op_mov_reg_T0(int ot
, int reg
)
314 gen_op_mov_reg_TN(ot
, 0, reg
);
317 static inline void gen_op_mov_reg_T1(int ot
, int reg
)
319 gen_op_mov_reg_TN(ot
, 1, reg
);
322 static inline void gen_op_mov_reg_A0(int size
, int reg
)
326 tcg_gen_st16_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
330 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
331 /* high part of register set to zero */
332 tcg_gen_movi_tl(cpu_tmp0
, 0);
333 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
337 tcg_gen_st_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
342 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
348 static inline void gen_op_mov_TN_reg(int ot
, int t_index
, int reg
)
352 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
355 tcg_gen_ld8u_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
360 tcg_gen_ld_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]));
365 static inline void gen_op_movl_A0_reg(int reg
)
367 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
370 static inline void gen_op_addl_A0_im(int32_t val
)
372 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
374 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
379 static inline void gen_op_addq_A0_im(int64_t val
)
381 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
385 static void gen_add_A0_im(DisasContext
*s
, int val
)
389 gen_op_addq_A0_im(val
);
392 gen_op_addl_A0_im(val
);
395 static inline void gen_op_addl_T0_T1(void)
397 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
400 static inline void gen_op_jmp_T0(void)
402 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUState
, eip
));
405 static inline void gen_op_add_reg_im(int size
, int reg
, int32_t val
)
409 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
410 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
411 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
414 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
415 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
417 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffff);
419 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
423 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
424 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
425 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
431 static inline void gen_op_add_reg_T0(int size
, int reg
)
435 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
436 tcg_gen_add_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[0]);
437 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
440 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
441 tcg_gen_add_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[0]);
443 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffff);
445 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
449 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
450 tcg_gen_add_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[0]);
451 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
457 static inline void gen_op_set_cc_op(int32_t val
)
459 tcg_gen_movi_i32(cpu_cc_op
, val
);
462 static inline void gen_op_addl_A0_reg_sN(int shift
, int reg
)
464 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
466 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
467 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
469 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
473 static inline void gen_op_movl_A0_seg(int reg
)
475 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
) + REG_L_OFFSET
);
478 static inline void gen_op_addl_A0_seg(int reg
)
480 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
481 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
483 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
488 static inline void gen_op_movq_A0_seg(int reg
)
490 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
493 static inline void gen_op_addq_A0_seg(int reg
)
495 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
496 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
499 static inline void gen_op_movq_A0_reg(int reg
)
501 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
504 static inline void gen_op_addq_A0_reg_sN(int shift
, int reg
)
506 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
508 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
509 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
513 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
515 DEF_REGS(gen_op_cmovw_
, _T1_T0
)
518 DEF_REGS(gen_op_cmovl_
, _T1_T0
)
522 DEF_REGS(gen_op_cmovq_
, _T1_T0
)
527 static inline void gen_op_lds_T0_A0(int idx
)
529 int mem_index
= (idx
>> 2) - 1;
532 tcg_gen_qemu_ld8s(cpu_T
[0], cpu_A0
, mem_index
);
535 tcg_gen_qemu_ld16s(cpu_T
[0], cpu_A0
, mem_index
);
539 tcg_gen_qemu_ld32s(cpu_T
[0], cpu_A0
, mem_index
);
544 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
545 static inline void gen_op_ld_T0_A0(int idx
)
547 int mem_index
= (idx
>> 2) - 1;
550 tcg_gen_qemu_ld8u(cpu_T
[0], cpu_A0
, mem_index
);
553 tcg_gen_qemu_ld16u(cpu_T
[0], cpu_A0
, mem_index
);
556 tcg_gen_qemu_ld32u(cpu_T
[0], cpu_A0
, mem_index
);
560 tcg_gen_qemu_ld64(cpu_T
[0], cpu_A0
, mem_index
);
565 static inline void gen_op_ldu_T0_A0(int idx
)
567 gen_op_ld_T0_A0(idx
);
570 static inline void gen_op_ld_T1_A0(int idx
)
572 int mem_index
= (idx
>> 2) - 1;
575 tcg_gen_qemu_ld8u(cpu_T
[1], cpu_A0
, mem_index
);
578 tcg_gen_qemu_ld16u(cpu_T
[1], cpu_A0
, mem_index
);
581 tcg_gen_qemu_ld32u(cpu_T
[1], cpu_A0
, mem_index
);
585 tcg_gen_qemu_ld64(cpu_T
[1], cpu_A0
, mem_index
);
590 static inline void gen_op_st_T0_A0(int idx
)
592 int mem_index
= (idx
>> 2) - 1;
595 tcg_gen_qemu_st8(cpu_T
[0], cpu_A0
, mem_index
);
598 tcg_gen_qemu_st16(cpu_T
[0], cpu_A0
, mem_index
);
601 tcg_gen_qemu_st32(cpu_T
[0], cpu_A0
, mem_index
);
605 tcg_gen_qemu_st64(cpu_T
[0], cpu_A0
, mem_index
);
610 static inline void gen_op_st_T1_A0(int idx
)
612 int mem_index
= (idx
>> 2) - 1;
615 tcg_gen_qemu_st8(cpu_T
[1], cpu_A0
, mem_index
);
618 tcg_gen_qemu_st16(cpu_T
[1], cpu_A0
, mem_index
);
621 tcg_gen_qemu_st32(cpu_T
[1], cpu_A0
, mem_index
);
625 tcg_gen_qemu_st64(cpu_T
[1], cpu_A0
, mem_index
);
630 static inline void gen_jmp_im(target_ulong pc
)
632 tcg_gen_movi_tl(cpu_tmp0
, pc
);
633 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, eip
));
636 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
640 override
= s
->override
;
644 gen_op_movq_A0_seg(override
);
645 gen_op_addq_A0_reg_sN(0, R_ESI
);
647 gen_op_movq_A0_reg(R_ESI
);
653 if (s
->addseg
&& override
< 0)
656 gen_op_movl_A0_seg(override
);
657 gen_op_addl_A0_reg_sN(0, R_ESI
);
659 gen_op_movl_A0_reg(R_ESI
);
662 /* 16 address, always override */
665 gen_op_movl_A0_reg(R_ESI
);
666 gen_op_andl_A0_ffff();
667 gen_op_addl_A0_seg(override
);
671 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
675 gen_op_movq_A0_reg(R_EDI
);
680 gen_op_movl_A0_seg(R_ES
);
681 gen_op_addl_A0_reg_sN(0, R_EDI
);
683 gen_op_movl_A0_reg(R_EDI
);
686 gen_op_movl_A0_reg(R_EDI
);
687 gen_op_andl_A0_ffff();
688 gen_op_addl_A0_seg(R_ES
);
692 static inline void gen_op_movl_T0_Dshift(int ot
)
694 tcg_gen_ld32s_tl(cpu_T
[0], cpu_env
, offsetof(CPUState
, df
));
695 tcg_gen_shli_tl(cpu_T
[0], cpu_T
[0], ot
);
698 static void gen_extu(int ot
, TCGv reg
)
702 tcg_gen_ext8u_tl(reg
, reg
);
705 tcg_gen_ext16u_tl(reg
, reg
);
708 tcg_gen_ext32u_tl(reg
, reg
);
715 static void gen_exts(int ot
, TCGv reg
)
719 tcg_gen_ext8s_tl(reg
, reg
);
722 tcg_gen_ext16s_tl(reg
, reg
);
725 tcg_gen_ext32s_tl(reg
, reg
);
732 static inline void gen_op_jnz_ecx(int size
, int label1
)
734 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ECX
]));
735 gen_extu(size
+ 1, cpu_tmp0
);
736 tcg_gen_brcond_tl(TCG_COND_NE
, cpu_tmp0
, tcg_const_tl(0), label1
);
739 static inline void gen_op_jz_ecx(int size
, int label1
)
741 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ECX
]));
742 gen_extu(size
+ 1, cpu_tmp0
);
743 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_tmp0
, tcg_const_tl(0), label1
);
746 static GenOpFunc1
*gen_op_string_jnz_sub
[2][4] = {
751 X86_64_ONLY(gen_op_jnz_subq
),
757 X86_64_ONLY(gen_op_jz_subq
),
761 static void *helper_in_func
[3] = {
767 static void *helper_out_func
[3] = {
773 static void *gen_check_io_func
[3] = {
779 static void gen_check_io(DisasContext
*s
, int ot
, target_ulong cur_eip
,
783 target_ulong next_eip
;
786 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
787 if (s
->cc_op
!= CC_OP_DYNAMIC
)
788 gen_op_set_cc_op(s
->cc_op
);
791 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
792 tcg_gen_helper_0_1(gen_check_io_func
[ot
],
795 if(s
->flags
& (1ULL << INTERCEPT_IOIO_PROT
)) {
797 if (s
->cc_op
!= CC_OP_DYNAMIC
)
798 gen_op_set_cc_op(s
->cc_op
);
802 svm_flags
|= (1 << (4 + ot
));
803 next_eip
= s
->pc
- s
->cs_base
;
804 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
805 tcg_gen_helper_0_3(helper_svm_check_io
,
807 tcg_const_i32(svm_flags
),
808 tcg_const_i32(next_eip
- cur_eip
));
812 static inline void gen_movs(DisasContext
*s
, int ot
)
814 gen_string_movl_A0_ESI(s
);
815 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
816 gen_string_movl_A0_EDI(s
);
817 gen_op_st_T0_A0(ot
+ s
->mem_index
);
818 gen_op_movl_T0_Dshift(ot
);
819 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
820 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
823 static inline void gen_update_cc_op(DisasContext
*s
)
825 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
826 gen_op_set_cc_op(s
->cc_op
);
827 s
->cc_op
= CC_OP_DYNAMIC
;
831 static void gen_op_update1_cc(void)
833 tcg_gen_discard_tl(cpu_cc_src
);
834 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
837 static void gen_op_update2_cc(void)
839 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
840 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
843 static inline void gen_op_cmpl_T0_T1_cc(void)
845 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
846 tcg_gen_sub_tl(cpu_cc_dst
, cpu_T
[0], cpu_T
[1]);
849 static inline void gen_op_testl_T0_T1_cc(void)
851 tcg_gen_discard_tl(cpu_cc_src
);
852 tcg_gen_and_tl(cpu_cc_dst
, cpu_T
[0], cpu_T
[1]);
855 static void gen_op_update_neg_cc(void)
857 tcg_gen_neg_tl(cpu_cc_src
, cpu_T
[0]);
858 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
861 /* XXX: does not work with gdbstub "ice" single step - not a
863 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
867 l1
= gen_new_label();
868 l2
= gen_new_label();
869 gen_op_jnz_ecx(s
->aflag
, l1
);
871 gen_jmp_tb(s
, next_eip
, 1);
876 static inline void gen_stos(DisasContext
*s
, int ot
)
878 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
879 gen_string_movl_A0_EDI(s
);
880 gen_op_st_T0_A0(ot
+ s
->mem_index
);
881 gen_op_movl_T0_Dshift(ot
);
882 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
885 static inline void gen_lods(DisasContext
*s
, int ot
)
887 gen_string_movl_A0_ESI(s
);
888 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
889 gen_op_mov_reg_T0(ot
, R_EAX
);
890 gen_op_movl_T0_Dshift(ot
);
891 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
894 static inline void gen_scas(DisasContext
*s
, int ot
)
896 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
897 gen_string_movl_A0_EDI(s
);
898 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
899 gen_op_cmpl_T0_T1_cc();
900 gen_op_movl_T0_Dshift(ot
);
901 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
904 static inline void gen_cmps(DisasContext
*s
, int ot
)
906 gen_string_movl_A0_ESI(s
);
907 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
908 gen_string_movl_A0_EDI(s
);
909 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
910 gen_op_cmpl_T0_T1_cc();
911 gen_op_movl_T0_Dshift(ot
);
912 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
913 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
916 static inline void gen_ins(DisasContext
*s
, int ot
)
918 gen_string_movl_A0_EDI(s
);
919 /* Note: we must do this dummy write first to be restartable in
920 case of page fault. */
922 gen_op_st_T0_A0(ot
+ s
->mem_index
);
923 gen_op_mov_TN_reg(OT_WORD
, 1, R_EDX
);
924 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[1]);
925 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
926 tcg_gen_helper_1_1(helper_in_func
[ot
], cpu_T
[0], cpu_tmp2_i32
);
927 gen_op_st_T0_A0(ot
+ s
->mem_index
);
928 gen_op_movl_T0_Dshift(ot
);
929 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
932 static inline void gen_outs(DisasContext
*s
, int ot
)
934 gen_string_movl_A0_ESI(s
);
935 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
937 gen_op_mov_TN_reg(OT_WORD
, 1, R_EDX
);
938 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[1]);
939 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
940 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[0]);
941 tcg_gen_helper_0_2(helper_out_func
[ot
], cpu_tmp2_i32
, cpu_tmp3_i32
);
943 gen_op_movl_T0_Dshift(ot
);
944 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
947 /* same method as Valgrind : we generate jumps to current or next
949 #define GEN_REPZ(op) \
950 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
951 target_ulong cur_eip, target_ulong next_eip) \
954 gen_update_cc_op(s); \
955 l2 = gen_jz_ecx_string(s, next_eip); \
957 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
958 /* a loop would cause two single step exceptions if ECX = 1 \
959 before rep string_insn */ \
961 gen_op_jz_ecx(s->aflag, l2); \
962 gen_jmp(s, cur_eip); \
965 #define GEN_REPZ2(op) \
966 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
967 target_ulong cur_eip, \
968 target_ulong next_eip, \
972 gen_update_cc_op(s); \
973 l2 = gen_jz_ecx_string(s, next_eip); \
975 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
976 gen_op_set_cc_op(CC_OP_SUBB + ot); \
977 gen_op_string_jnz_sub[nz][ot](l2);\
979 gen_op_jz_ecx(s->aflag, l2); \
980 gen_jmp(s, cur_eip); \
1002 static GenOpFunc1
*gen_jcc_sub
[4][8] = {
1033 #ifdef TARGET_X86_64
1036 BUGGY_64(gen_op_jb_subq
),
1038 BUGGY_64(gen_op_jbe_subq
),
1041 BUGGY_64(gen_op_jl_subq
),
1042 BUGGY_64(gen_op_jle_subq
),
1047 static GenOpFunc
*gen_setcc_slow
[8] = {
1058 static GenOpFunc
*gen_setcc_sub
[4][8] = {
1061 gen_op_setb_T0_subb
,
1062 gen_op_setz_T0_subb
,
1063 gen_op_setbe_T0_subb
,
1064 gen_op_sets_T0_subb
,
1066 gen_op_setl_T0_subb
,
1067 gen_op_setle_T0_subb
,
1071 gen_op_setb_T0_subw
,
1072 gen_op_setz_T0_subw
,
1073 gen_op_setbe_T0_subw
,
1074 gen_op_sets_T0_subw
,
1076 gen_op_setl_T0_subw
,
1077 gen_op_setle_T0_subw
,
1081 gen_op_setb_T0_subl
,
1082 gen_op_setz_T0_subl
,
1083 gen_op_setbe_T0_subl
,
1084 gen_op_sets_T0_subl
,
1086 gen_op_setl_T0_subl
,
1087 gen_op_setle_T0_subl
,
1089 #ifdef TARGET_X86_64
1092 gen_op_setb_T0_subq
,
1093 gen_op_setz_T0_subq
,
1094 gen_op_setbe_T0_subq
,
1095 gen_op_sets_T0_subq
,
1097 gen_op_setl_T0_subq
,
1098 gen_op_setle_T0_subq
,
1103 static void *helper_fp_arith_ST0_FT0
[8] = {
1104 helper_fadd_ST0_FT0
,
1105 helper_fmul_ST0_FT0
,
1106 helper_fcom_ST0_FT0
,
1107 helper_fcom_ST0_FT0
,
1108 helper_fsub_ST0_FT0
,
1109 helper_fsubr_ST0_FT0
,
1110 helper_fdiv_ST0_FT0
,
1111 helper_fdivr_ST0_FT0
,
1114 /* NOTE the exception in "r" op ordering */
1115 static void *helper_fp_arith_STN_ST0
[8] = {
1116 helper_fadd_STN_ST0
,
1117 helper_fmul_STN_ST0
,
1120 helper_fsubr_STN_ST0
,
1121 helper_fsub_STN_ST0
,
1122 helper_fdivr_STN_ST0
,
1123 helper_fdiv_STN_ST0
,
1126 /* compute eflags.C to reg */
1127 static void gen_compute_eflags_c(TCGv reg
)
1129 #if TCG_TARGET_REG_BITS == 32
1130 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_cc_op
, 3);
1131 tcg_gen_addi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
,
1132 (long)cc_table
+ offsetof(CCTable
, compute_c
));
1133 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0);
1134 tcg_gen_call(&tcg_ctx
, cpu_tmp2_i32
, TCG_CALL_PURE
,
1135 1, &cpu_tmp2_i32
, 0, NULL
);
1137 tcg_gen_extu_i32_tl(cpu_tmp1_i64
, cpu_cc_op
);
1138 tcg_gen_shli_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 4);
1139 tcg_gen_addi_i64(cpu_tmp1_i64
, cpu_tmp1_i64
,
1140 (long)cc_table
+ offsetof(CCTable
, compute_c
));
1141 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 0);
1142 tcg_gen_call(&tcg_ctx
, cpu_tmp1_i64
, TCG_CALL_PURE
,
1143 1, &cpu_tmp2_i32
, 0, NULL
);
1145 tcg_gen_extu_i32_tl(reg
, cpu_tmp2_i32
);
1148 /* compute all eflags to cc_src */
1149 static void gen_compute_eflags(TCGv reg
)
1151 #if TCG_TARGET_REG_BITS == 32
1152 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_cc_op
, 3);
1153 tcg_gen_addi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
,
1154 (long)cc_table
+ offsetof(CCTable
, compute_all
));
1155 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0);
1156 tcg_gen_call(&tcg_ctx
, cpu_tmp2_i32
, TCG_CALL_PURE
,
1157 1, &cpu_tmp2_i32
, 0, NULL
);
1159 tcg_gen_extu_i32_tl(cpu_tmp1_i64
, cpu_cc_op
);
1160 tcg_gen_shli_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 4);
1161 tcg_gen_addi_i64(cpu_tmp1_i64
, cpu_tmp1_i64
,
1162 (long)cc_table
+ offsetof(CCTable
, compute_all
));
1163 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 0);
1164 tcg_gen_call(&tcg_ctx
, cpu_tmp1_i64
, TCG_CALL_PURE
,
1165 1, &cpu_tmp2_i32
, 0, NULL
);
1167 tcg_gen_extu_i32_tl(reg
, cpu_tmp2_i32
);
1170 /* if d == OR_TMP0, it means memory operand (address in A0) */
1171 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1174 gen_op_mov_TN_reg(ot
, 0, d
);
1176 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1180 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1181 gen_op_set_cc_op(s1
->cc_op
);
1182 gen_compute_eflags_c(cpu_tmp4
);
1183 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1184 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
1186 gen_op_mov_reg_T0(ot
, d
);
1188 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1189 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
1190 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1191 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp4
);
1192 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 2);
1193 tcg_gen_addi_i32(cpu_cc_op
, cpu_tmp2_i32
, CC_OP_ADDB
+ ot
);
1194 s1
->cc_op
= CC_OP_DYNAMIC
;
1197 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1198 gen_op_set_cc_op(s1
->cc_op
);
1199 gen_compute_eflags_c(cpu_tmp4
);
1200 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1201 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
1203 gen_op_mov_reg_T0(ot
, d
);
1205 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1206 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
1207 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1208 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp4
);
1209 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 2);
1210 tcg_gen_addi_i32(cpu_cc_op
, cpu_tmp2_i32
, CC_OP_SUBB
+ ot
);
1211 s1
->cc_op
= CC_OP_DYNAMIC
;
1214 gen_op_addl_T0_T1();
1216 gen_op_mov_reg_T0(ot
, d
);
1218 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1219 gen_op_update2_cc();
1220 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1223 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1225 gen_op_mov_reg_T0(ot
, d
);
1227 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1228 gen_op_update2_cc();
1229 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1233 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1235 gen_op_mov_reg_T0(ot
, d
);
1237 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1238 gen_op_update1_cc();
1239 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1242 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1244 gen_op_mov_reg_T0(ot
, d
);
1246 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1247 gen_op_update1_cc();
1248 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1251 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1253 gen_op_mov_reg_T0(ot
, d
);
1255 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1256 gen_op_update1_cc();
1257 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1260 gen_op_cmpl_T0_T1_cc();
1261 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1266 /* if d == OR_TMP0, it means memory operand (address in A0) */
1267 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1270 gen_op_mov_TN_reg(ot
, 0, d
);
1272 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1273 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1274 gen_op_set_cc_op(s1
->cc_op
);
1276 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], 1);
1277 s1
->cc_op
= CC_OP_INCB
+ ot
;
1279 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], -1);
1280 s1
->cc_op
= CC_OP_DECB
+ ot
;
1283 gen_op_mov_reg_T0(ot
, d
);
1285 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1286 gen_compute_eflags_c(cpu_cc_src
);
1287 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1290 /* XXX: add faster immediate case */
1291 static void gen_shift_rm_T1(DisasContext
*s
, int ot
, int op1
,
1292 int is_right
, int is_arith
)
1304 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1306 gen_op_mov_TN_reg(ot
, 0, op1
);
1308 tcg_gen_andi_tl(cpu_T
[1], cpu_T
[1], mask
);
1310 tcg_gen_addi_tl(cpu_tmp5
, cpu_T
[1], -1);
1314 gen_exts(ot
, cpu_T
[0]);
1315 tcg_gen_sar_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1316 tcg_gen_sar_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1318 gen_extu(ot
, cpu_T
[0]);
1319 tcg_gen_shr_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1320 tcg_gen_shr_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1323 tcg_gen_shl_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1324 tcg_gen_shl_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1329 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1331 gen_op_mov_reg_T0(ot
, op1
);
1333 /* update eflags if non zero shift */
1334 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1335 gen_op_set_cc_op(s
->cc_op
);
1337 shift_label
= gen_new_label();
1338 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T
[1], tcg_const_tl(0), shift_label
);
1340 tcg_gen_mov_tl(cpu_cc_src
, cpu_T3
);
1341 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1343 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SARB
+ ot
);
1345 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SHLB
+ ot
);
1347 gen_set_label(shift_label
);
1348 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1351 static inline void tcg_gen_lshift(TCGv ret
, TCGv arg1
, target_long arg2
)
1354 tcg_gen_shli_tl(ret
, arg1
, arg2
);
1356 tcg_gen_shri_tl(ret
, arg1
, -arg2
);
1359 /* XXX: add faster immediate case */
1360 static void gen_rot_rm_T1(DisasContext
*s
, int ot
, int op1
,
1364 int label1
, label2
, data_bits
;
1373 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1375 gen_op_mov_TN_reg(ot
, 0, op1
);
1377 tcg_gen_andi_tl(cpu_T
[1], cpu_T
[1], mask
);
1379 /* Must test zero case to avoid using undefined behaviour in TCG
1381 label1
= gen_new_label();
1382 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T
[1], tcg_const_tl(0), label1
);
1385 tcg_gen_andi_tl(cpu_tmp0
, cpu_T
[1], (1 << (3 + ot
)) - 1);
1387 tcg_gen_mov_tl(cpu_tmp0
, cpu_T
[1]);
1389 gen_extu(ot
, cpu_T
[0]);
1390 tcg_gen_mov_tl(cpu_T3
, cpu_T
[0]);
1392 data_bits
= 8 << ot
;
1393 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1394 fix TCG definition) */
1396 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_tmp0
);
1397 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(data_bits
), cpu_tmp0
);
1398 tcg_gen_shl_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
1400 tcg_gen_shl_tl(cpu_tmp4
, cpu_T
[0], cpu_tmp0
);
1401 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(data_bits
), cpu_tmp0
);
1402 tcg_gen_shr_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
1404 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
1406 gen_set_label(label1
);
1409 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1411 gen_op_mov_reg_T0(ot
, op1
);
1414 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1415 gen_op_set_cc_op(s
->cc_op
);
1417 label2
= gen_new_label();
1418 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T
[1], tcg_const_tl(0), label2
);
1420 gen_compute_eflags(cpu_cc_src
);
1421 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~(CC_O
| CC_C
));
1422 tcg_gen_xor_tl(cpu_tmp0
, cpu_T3
, cpu_T
[0]);
1423 tcg_gen_lshift(cpu_tmp0
, cpu_tmp0
, 11 - (data_bits
- 1));
1424 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, CC_O
);
1425 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
1427 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], data_bits
- 1);
1429 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], CC_C
);
1430 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_T
[0]);
1432 tcg_gen_discard_tl(cpu_cc_dst
);
1433 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_EFLAGS
);
1435 gen_set_label(label2
);
1436 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1439 static void *helper_rotc
[8] = {
1443 X86_64_ONLY(helper_rclq
),
1447 X86_64_ONLY(helper_rcrq
),
1450 /* XXX: add faster immediate = 1 case */
1451 static void gen_rotc_rm_T1(DisasContext
*s
, int ot
, int op1
,
1456 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1457 gen_op_set_cc_op(s
->cc_op
);
1461 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1463 gen_op_mov_TN_reg(ot
, 0, op1
);
1465 tcg_gen_helper_1_2(helper_rotc
[ot
+ (is_right
* 4)],
1466 cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1469 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1471 gen_op_mov_reg_T0(ot
, op1
);
1474 label1
= gen_new_label();
1475 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T3
, tcg_const_tl(-1), label1
);
1477 tcg_gen_mov_tl(cpu_cc_src
, cpu_T3
);
1478 tcg_gen_discard_tl(cpu_cc_dst
);
1479 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_EFLAGS
);
1481 gen_set_label(label1
);
1482 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1485 /* XXX: add faster immediate case */
1486 static void gen_shiftd_rm_T1_T3(DisasContext
*s
, int ot
, int op1
,
1489 int label1
, label2
, data_bits
;
1499 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1501 gen_op_mov_TN_reg(ot
, 0, op1
);
1503 tcg_gen_andi_tl(cpu_T3
, cpu_T3
, mask
);
1504 /* Must test zero case to avoid using undefined behaviour in TCG
1506 label1
= gen_new_label();
1507 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T3
, tcg_const_tl(0), label1
);
1509 tcg_gen_addi_tl(cpu_tmp5
, cpu_T3
, -1);
1510 if (ot
== OT_WORD
) {
1511 /* Note: we implement the Intel behaviour for shift count > 16 */
1513 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
1514 tcg_gen_shli_tl(cpu_tmp0
, cpu_T
[1], 16);
1515 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
1516 tcg_gen_ext32u_tl(cpu_T
[0], cpu_T
[0]);
1518 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_tmp5
);
1520 /* only needed if count > 16, but a test would complicate */
1521 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(32), cpu_T3
);
1522 tcg_gen_shl_tl(cpu_tmp0
, cpu_T
[0], cpu_tmp5
);
1524 tcg_gen_shr_tl(cpu_T
[0], cpu_T
[0], cpu_T3
);
1526 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
1528 /* XXX: not optimal */
1529 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
1530 tcg_gen_shli_tl(cpu_T
[1], cpu_T
[1], 16);
1531 tcg_gen_or_tl(cpu_T
[1], cpu_T
[1], cpu_T
[0]);
1532 tcg_gen_ext32u_tl(cpu_T
[1], cpu_T
[1]);
1534 tcg_gen_shl_tl(cpu_tmp4
, cpu_T
[0], cpu_tmp5
);
1535 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(32), cpu_tmp5
);
1536 tcg_gen_shr_tl(cpu_tmp6
, cpu_T
[1], cpu_tmp0
);
1537 tcg_gen_or_tl(cpu_tmp4
, cpu_tmp4
, cpu_tmp6
);
1539 tcg_gen_shl_tl(cpu_T
[0], cpu_T
[0], cpu_T3
);
1540 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(32), cpu_T3
);
1541 tcg_gen_shr_tl(cpu_T
[1], cpu_T
[1], cpu_tmp5
);
1542 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1545 data_bits
= 8 << ot
;
1548 tcg_gen_ext32u_tl(cpu_T
[0], cpu_T
[0]);
1550 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_tmp5
);
1552 tcg_gen_shr_tl(cpu_T
[0], cpu_T
[0], cpu_T3
);
1553 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(data_bits
), cpu_T3
);
1554 tcg_gen_shl_tl(cpu_T
[1], cpu_T
[1], cpu_tmp5
);
1555 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1559 tcg_gen_ext32u_tl(cpu_T
[1], cpu_T
[1]);
1561 tcg_gen_shl_tl(cpu_tmp4
, cpu_T
[0], cpu_tmp5
);
1563 tcg_gen_shl_tl(cpu_T
[0], cpu_T
[0], cpu_T3
);
1564 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(data_bits
), cpu_T3
);
1565 tcg_gen_shr_tl(cpu_T
[1], cpu_T
[1], cpu_tmp5
);
1566 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1569 tcg_gen_mov_tl(cpu_T
[1], cpu_tmp4
);
1571 gen_set_label(label1
);
1574 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1576 gen_op_mov_reg_T0(ot
, op1
);
1579 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1580 gen_op_set_cc_op(s
->cc_op
);
1582 label2
= gen_new_label();
1583 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T3
, tcg_const_tl(0), label2
);
1585 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
1586 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1588 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SARB
+ ot
);
1590 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SHLB
+ ot
);
1592 gen_set_label(label2
);
1593 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1596 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1599 gen_op_mov_TN_reg(ot
, 1, s
);
1602 gen_rot_rm_T1(s1
, ot
, d
, 0);
1605 gen_rot_rm_T1(s1
, ot
, d
, 1);
1609 gen_shift_rm_T1(s1
, ot
, d
, 0, 0);
1612 gen_shift_rm_T1(s1
, ot
, d
, 1, 0);
1615 gen_shift_rm_T1(s1
, ot
, d
, 1, 1);
1618 gen_rotc_rm_T1(s1
, ot
, d
, 0);
1621 gen_rotc_rm_T1(s1
, ot
, d
, 1);
1626 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1628 /* currently not optimized */
1629 gen_op_movl_T1_im(c
);
1630 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1633 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1641 int mod
, rm
, code
, override
, must_add_seg
;
1643 override
= s
->override
;
1644 must_add_seg
= s
->addseg
;
1647 mod
= (modrm
>> 6) & 3;
1659 code
= ldub_code(s
->pc
++);
1660 scale
= (code
>> 6) & 3;
1661 index
= ((code
>> 3) & 7) | REX_X(s
);
1668 if ((base
& 7) == 5) {
1670 disp
= (int32_t)ldl_code(s
->pc
);
1672 if (CODE64(s
) && !havesib
) {
1673 disp
+= s
->pc
+ s
->rip_offset
;
1680 disp
= (int8_t)ldub_code(s
->pc
++);
1684 disp
= ldl_code(s
->pc
);
1690 /* for correct popl handling with esp */
1691 if (base
== 4 && s
->popl_esp_hack
)
1692 disp
+= s
->popl_esp_hack
;
1693 #ifdef TARGET_X86_64
1694 if (s
->aflag
== 2) {
1695 gen_op_movq_A0_reg(base
);
1697 gen_op_addq_A0_im(disp
);
1702 gen_op_movl_A0_reg(base
);
1704 gen_op_addl_A0_im(disp
);
1707 #ifdef TARGET_X86_64
1708 if (s
->aflag
== 2) {
1709 gen_op_movq_A0_im(disp
);
1713 gen_op_movl_A0_im(disp
);
1716 /* XXX: index == 4 is always invalid */
1717 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1718 #ifdef TARGET_X86_64
1719 if (s
->aflag
== 2) {
1720 gen_op_addq_A0_reg_sN(scale
, index
);
1724 gen_op_addl_A0_reg_sN(scale
, index
);
1729 if (base
== R_EBP
|| base
== R_ESP
)
1734 #ifdef TARGET_X86_64
1735 if (s
->aflag
== 2) {
1736 gen_op_addq_A0_seg(override
);
1740 gen_op_addl_A0_seg(override
);
1747 disp
= lduw_code(s
->pc
);
1749 gen_op_movl_A0_im(disp
);
1750 rm
= 0; /* avoid SS override */
1757 disp
= (int8_t)ldub_code(s
->pc
++);
1761 disp
= lduw_code(s
->pc
);
1767 gen_op_movl_A0_reg(R_EBX
);
1768 gen_op_addl_A0_reg_sN(0, R_ESI
);
1771 gen_op_movl_A0_reg(R_EBX
);
1772 gen_op_addl_A0_reg_sN(0, R_EDI
);
1775 gen_op_movl_A0_reg(R_EBP
);
1776 gen_op_addl_A0_reg_sN(0, R_ESI
);
1779 gen_op_movl_A0_reg(R_EBP
);
1780 gen_op_addl_A0_reg_sN(0, R_EDI
);
1783 gen_op_movl_A0_reg(R_ESI
);
1786 gen_op_movl_A0_reg(R_EDI
);
1789 gen_op_movl_A0_reg(R_EBP
);
1793 gen_op_movl_A0_reg(R_EBX
);
1797 gen_op_addl_A0_im(disp
);
1798 gen_op_andl_A0_ffff();
1802 if (rm
== 2 || rm
== 3 || rm
== 6)
1807 gen_op_addl_A0_seg(override
);
1817 static void gen_nop_modrm(DisasContext
*s
, int modrm
)
1819 int mod
, rm
, base
, code
;
1821 mod
= (modrm
>> 6) & 3;
1831 code
= ldub_code(s
->pc
++);
1867 /* used for LEA and MOV AX, mem */
1868 static void gen_add_A0_ds_seg(DisasContext
*s
)
1870 int override
, must_add_seg
;
1871 must_add_seg
= s
->addseg
;
1873 if (s
->override
>= 0) {
1874 override
= s
->override
;
1880 #ifdef TARGET_X86_64
1882 gen_op_addq_A0_seg(override
);
1886 gen_op_addl_A0_seg(override
);
1891 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1893 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1895 int mod
, rm
, opreg
, disp
;
1897 mod
= (modrm
>> 6) & 3;
1898 rm
= (modrm
& 7) | REX_B(s
);
1902 gen_op_mov_TN_reg(ot
, 0, reg
);
1903 gen_op_mov_reg_T0(ot
, rm
);
1905 gen_op_mov_TN_reg(ot
, 0, rm
);
1907 gen_op_mov_reg_T0(ot
, reg
);
1910 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
1913 gen_op_mov_TN_reg(ot
, 0, reg
);
1914 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1916 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1918 gen_op_mov_reg_T0(ot
, reg
);
1923 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
1929 ret
= ldub_code(s
->pc
);
1933 ret
= lduw_code(s
->pc
);
1938 ret
= ldl_code(s
->pc
);
1945 static inline int insn_const_size(unsigned int ot
)
1953 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong eip
)
1955 TranslationBlock
*tb
;
1958 pc
= s
->cs_base
+ eip
;
1960 /* NOTE: we handle the case where the TB spans two pages here */
1961 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
1962 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
1963 /* jump to same page: we can use a direct jump */
1964 tcg_gen_goto_tb(tb_num
);
1966 tcg_gen_exit_tb((long)tb
+ tb_num
);
1968 /* jump to another page: currently not optimized */
1974 static inline void gen_jcc(DisasContext
*s
, int b
,
1975 target_ulong val
, target_ulong next_eip
)
1977 TranslationBlock
*tb
;
1984 jcc_op
= (b
>> 1) & 7;
1988 /* we optimize the cmp/jcc case */
1993 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1996 /* some jumps are easy to compute */
2038 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
2041 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
2053 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2054 gen_op_set_cc_op(s
->cc_op
);
2055 s
->cc_op
= CC_OP_DYNAMIC
;
2059 gen_setcc_slow
[jcc_op
]();
2060 func
= gen_op_jnz_T0_label
;
2070 l1
= gen_new_label();
2073 gen_goto_tb(s
, 0, next_eip
);
2076 gen_goto_tb(s
, 1, val
);
2081 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2082 gen_op_set_cc_op(s
->cc_op
);
2083 s
->cc_op
= CC_OP_DYNAMIC
;
2085 gen_setcc_slow
[jcc_op
]();
2091 l1
= gen_new_label();
2092 l2
= gen_new_label();
2093 gen_op_jnz_T0_label(l1
);
2094 gen_jmp_im(next_eip
);
2095 gen_op_jmp_label(l2
);
2103 static void gen_setcc(DisasContext
*s
, int b
)
2109 jcc_op
= (b
>> 1) & 7;
2111 /* we optimize the cmp/jcc case */
2116 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
2121 /* some jumps are easy to compute */
2148 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
2151 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
2159 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2160 gen_op_set_cc_op(s
->cc_op
);
2161 func
= gen_setcc_slow
[jcc_op
];
2170 /* move T0 to seg_reg and compute if the CPU state may change. Never
2171 call this function with seg_reg == R_CS */
2172 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
2174 if (s
->pe
&& !s
->vm86
) {
2175 /* XXX: optimize by finding processor state dynamically */
2176 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2177 gen_op_set_cc_op(s
->cc_op
);
2178 gen_jmp_im(cur_eip
);
2179 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
2180 tcg_gen_helper_0_2(helper_load_seg
, tcg_const_i32(seg_reg
), cpu_tmp2_i32
);
2181 /* abort translation because the addseg value may change or
2182 because ss32 may change. For R_SS, translation must always
2183 stop as a special handling must be done to disable hardware
2184 interrupts for the next instruction */
2185 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
2188 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
2189 if (seg_reg
== R_SS
)
2194 static inline int svm_is_rep(int prefixes
)
2196 return ((prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) ? 8 : 0);
2200 gen_svm_check_intercept_param(DisasContext
*s
, target_ulong pc_start
,
2201 uint32_t type
, uint64_t param
)
2203 if(!(s
->flags
& (INTERCEPT_SVM_MASK
)))
2204 /* no SVM activated */
2207 /* CRx and DRx reads/writes */
2208 case SVM_EXIT_READ_CR0
... SVM_EXIT_EXCP_BASE
- 1:
2209 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2210 gen_op_set_cc_op(s
->cc_op
);
2212 gen_jmp_im(pc_start
- s
->cs_base
);
2213 tcg_gen_helper_0_2(helper_svm_check_intercept_param
,
2214 tcg_const_i32(type
), tcg_const_i64(param
));
2215 /* this is a special case as we do not know if the interception occurs
2216 so we assume there was none */
2219 if(s
->flags
& (1ULL << INTERCEPT_MSR_PROT
)) {
2220 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2221 gen_op_set_cc_op(s
->cc_op
);
2223 gen_jmp_im(pc_start
- s
->cs_base
);
2224 tcg_gen_helper_0_2(helper_svm_check_intercept_param
,
2225 tcg_const_i32(type
), tcg_const_i64(param
));
2226 /* this is a special case as we do not know if the interception occurs
2227 so we assume there was none */
2232 if(s
->flags
& (1ULL << ((type
- SVM_EXIT_INTR
) + INTERCEPT_INTR
))) {
2233 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2234 gen_op_set_cc_op(s
->cc_op
);
2236 gen_jmp_im(pc_start
- s
->cs_base
);
2237 tcg_gen_helper_0_2(helper_vmexit
,
2238 tcg_const_i32(type
), tcg_const_i64(param
));
2239 /* we can optimize this one so TBs don't get longer
2240 than up to vmexit */
2249 gen_svm_check_intercept(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
2251 return gen_svm_check_intercept_param(s
, pc_start
, type
, 0);
2254 static inline void gen_stack_update(DisasContext
*s
, int addend
)
2256 #ifdef TARGET_X86_64
2258 gen_op_add_reg_im(2, R_ESP
, addend
);
2262 gen_op_add_reg_im(1, R_ESP
, addend
);
2264 gen_op_add_reg_im(0, R_ESP
, addend
);
2268 /* generate a push. It depends on ss32, addseg and dflag */
2269 static void gen_push_T0(DisasContext
*s
)
2271 #ifdef TARGET_X86_64
2273 gen_op_movq_A0_reg(R_ESP
);
2275 gen_op_addq_A0_im(-8);
2276 gen_op_st_T0_A0(OT_QUAD
+ s
->mem_index
);
2278 gen_op_addq_A0_im(-2);
2279 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2281 gen_op_mov_reg_A0(2, R_ESP
);
2285 gen_op_movl_A0_reg(R_ESP
);
2287 gen_op_addl_A0_im(-2);
2289 gen_op_addl_A0_im(-4);
2292 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2293 gen_op_addl_A0_seg(R_SS
);
2296 gen_op_andl_A0_ffff();
2297 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2298 gen_op_addl_A0_seg(R_SS
);
2300 gen_op_st_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2301 if (s
->ss32
&& !s
->addseg
)
2302 gen_op_mov_reg_A0(1, R_ESP
);
2304 gen_op_mov_reg_T1(s
->ss32
+ 1, R_ESP
);
2308 /* generate a push. It depends on ss32, addseg and dflag */
2309 /* slower version for T1, only used for call Ev */
2310 static void gen_push_T1(DisasContext
*s
)
2312 #ifdef TARGET_X86_64
2314 gen_op_movq_A0_reg(R_ESP
);
2316 gen_op_addq_A0_im(-8);
2317 gen_op_st_T1_A0(OT_QUAD
+ s
->mem_index
);
2319 gen_op_addq_A0_im(-2);
2320 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2322 gen_op_mov_reg_A0(2, R_ESP
);
2326 gen_op_movl_A0_reg(R_ESP
);
2328 gen_op_addl_A0_im(-2);
2330 gen_op_addl_A0_im(-4);
2333 gen_op_addl_A0_seg(R_SS
);
2336 gen_op_andl_A0_ffff();
2337 gen_op_addl_A0_seg(R_SS
);
2339 gen_op_st_T1_A0(s
->dflag
+ 1 + s
->mem_index
);
2341 if (s
->ss32
&& !s
->addseg
)
2342 gen_op_mov_reg_A0(1, R_ESP
);
2344 gen_stack_update(s
, (-2) << s
->dflag
);
2348 /* two step pop is necessary for precise exceptions */
2349 static void gen_pop_T0(DisasContext
*s
)
2351 #ifdef TARGET_X86_64
2353 gen_op_movq_A0_reg(R_ESP
);
2354 gen_op_ld_T0_A0((s
->dflag
? OT_QUAD
: OT_WORD
) + s
->mem_index
);
2358 gen_op_movl_A0_reg(R_ESP
);
2361 gen_op_addl_A0_seg(R_SS
);
2363 gen_op_andl_A0_ffff();
2364 gen_op_addl_A0_seg(R_SS
);
2366 gen_op_ld_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2370 static void gen_pop_update(DisasContext
*s
)
2372 #ifdef TARGET_X86_64
2373 if (CODE64(s
) && s
->dflag
) {
2374 gen_stack_update(s
, 8);
2378 gen_stack_update(s
, 2 << s
->dflag
);
2382 static void gen_stack_A0(DisasContext
*s
)
2384 gen_op_movl_A0_reg(R_ESP
);
2386 gen_op_andl_A0_ffff();
2387 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2389 gen_op_addl_A0_seg(R_SS
);
2392 /* NOTE: wrap around in 16 bit not fully handled */
2393 static void gen_pusha(DisasContext
*s
)
2396 gen_op_movl_A0_reg(R_ESP
);
2397 gen_op_addl_A0_im(-16 << s
->dflag
);
2399 gen_op_andl_A0_ffff();
2400 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2402 gen_op_addl_A0_seg(R_SS
);
2403 for(i
= 0;i
< 8; i
++) {
2404 gen_op_mov_TN_reg(OT_LONG
, 0, 7 - i
);
2405 gen_op_st_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2406 gen_op_addl_A0_im(2 << s
->dflag
);
2408 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2411 /* NOTE: wrap around in 16 bit not fully handled */
2412 static void gen_popa(DisasContext
*s
)
2415 gen_op_movl_A0_reg(R_ESP
);
2417 gen_op_andl_A0_ffff();
2418 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2419 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], 16 << s
->dflag
);
2421 gen_op_addl_A0_seg(R_SS
);
2422 for(i
= 0;i
< 8; i
++) {
2423 /* ESP is not reloaded */
2425 gen_op_ld_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2426 gen_op_mov_reg_T0(OT_WORD
+ s
->dflag
, 7 - i
);
2428 gen_op_addl_A0_im(2 << s
->dflag
);
2430 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2433 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2438 #ifdef TARGET_X86_64
2440 ot
= s
->dflag
? OT_QUAD
: OT_WORD
;
2443 gen_op_movl_A0_reg(R_ESP
);
2444 gen_op_addq_A0_im(-opsize
);
2445 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2448 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2449 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2451 /* XXX: must save state */
2452 tcg_gen_helper_0_3(helper_enter64_level
,
2453 tcg_const_i32(level
),
2454 tcg_const_i32((ot
== OT_QUAD
)),
2457 gen_op_mov_reg_T1(ot
, R_EBP
);
2458 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], -esp_addend
+ (-opsize
* level
));
2459 gen_op_mov_reg_T1(OT_QUAD
, R_ESP
);
2463 ot
= s
->dflag
+ OT_WORD
;
2464 opsize
= 2 << s
->dflag
;
2466 gen_op_movl_A0_reg(R_ESP
);
2467 gen_op_addl_A0_im(-opsize
);
2469 gen_op_andl_A0_ffff();
2470 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2472 gen_op_addl_A0_seg(R_SS
);
2474 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2475 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2477 /* XXX: must save state */
2478 tcg_gen_helper_0_3(helper_enter_level
,
2479 tcg_const_i32(level
),
2480 tcg_const_i32(s
->dflag
),
2483 gen_op_mov_reg_T1(ot
, R_EBP
);
2484 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], -esp_addend
+ (-opsize
* level
));
2485 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2489 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2491 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2492 gen_op_set_cc_op(s
->cc_op
);
2493 gen_jmp_im(cur_eip
);
2494 tcg_gen_helper_0_1(helper_raise_exception
, tcg_const_i32(trapno
));
2498 /* an interrupt is different from an exception because of the
2500 static void gen_interrupt(DisasContext
*s
, int intno
,
2501 target_ulong cur_eip
, target_ulong next_eip
)
2503 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2504 gen_op_set_cc_op(s
->cc_op
);
2505 gen_jmp_im(cur_eip
);
2506 tcg_gen_helper_0_2(helper_raise_interrupt
,
2507 tcg_const_i32(intno
),
2508 tcg_const_i32(next_eip
- cur_eip
));
2512 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2514 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2515 gen_op_set_cc_op(s
->cc_op
);
2516 gen_jmp_im(cur_eip
);
2517 tcg_gen_helper_0_0(helper_debug
);
2521 /* generate a generic end of block. Trace exception is also generated
2523 static void gen_eob(DisasContext
*s
)
2525 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2526 gen_op_set_cc_op(s
->cc_op
);
2527 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2528 tcg_gen_helper_0_0(helper_reset_inhibit_irq
);
2530 if (s
->singlestep_enabled
) {
2531 tcg_gen_helper_0_0(helper_debug
);
2533 tcg_gen_helper_0_0(helper_single_step
);
2540 /* generate a jump to eip. No segment change must happen before as a
2541 direct call to the next block may occur */
2542 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2545 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2546 gen_op_set_cc_op(s
->cc_op
);
2547 s
->cc_op
= CC_OP_DYNAMIC
;
2549 gen_goto_tb(s
, tb_num
, eip
);
2557 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2559 gen_jmp_tb(s
, eip
, 0);
2562 static inline void gen_ldq_env_A0(int idx
, int offset
)
2564 int mem_index
= (idx
>> 2) - 1;
2565 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2566 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
);
2569 static inline void gen_stq_env_A0(int idx
, int offset
)
2571 int mem_index
= (idx
>> 2) - 1;
2572 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
);
2573 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2576 static inline void gen_ldo_env_A0(int idx
, int offset
)
2578 int mem_index
= (idx
>> 2) - 1;
2579 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2580 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2581 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2582 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_tmp0
, mem_index
);
2583 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2586 static inline void gen_sto_env_A0(int idx
, int offset
)
2588 int mem_index
= (idx
>> 2) - 1;
2589 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2590 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2591 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2592 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2593 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_tmp0
, mem_index
);
2596 static inline void gen_op_movo(int d_offset
, int s_offset
)
2598 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
);
2599 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2600 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
+ 8);
2601 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
+ 8);
2604 static inline void gen_op_movq(int d_offset
, int s_offset
)
2606 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
);
2607 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2610 static inline void gen_op_movl(int d_offset
, int s_offset
)
2612 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_env
, s_offset
);
2613 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, d_offset
);
2616 static inline void gen_op_movq_env_0(int d_offset
)
2618 tcg_gen_movi_i64(cpu_tmp1_i64
, 0);
2619 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2622 #define SSE_SPECIAL ((void *)1)
2623 #define SSE_DUMMY ((void *)2)
2625 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2626 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2627 helper_ ## x ## ss, helper_ ## x ## sd, }
2629 static void *sse_op_table1
[256][4] = {
2630 /* 3DNow! extensions */
2631 [0x0e] = { SSE_DUMMY
}, /* femms */
2632 [0x0f] = { SSE_DUMMY
}, /* pf... */
2633 /* pure SSE operations */
2634 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2635 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2636 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd, movsldup, movddup */
2637 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2638 [0x14] = { helper_punpckldq_xmm
, helper_punpcklqdq_xmm
},
2639 [0x15] = { helper_punpckhdq_xmm
, helper_punpckhqdq_xmm
},
2640 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2641 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2643 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2644 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2645 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2646 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2647 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2648 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2649 [0x2e] = { helper_ucomiss
, helper_ucomisd
},
2650 [0x2f] = { helper_comiss
, helper_comisd
},
2651 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2652 [0x51] = SSE_FOP(sqrt
),
2653 [0x52] = { helper_rsqrtps
, NULL
, helper_rsqrtss
, NULL
},
2654 [0x53] = { helper_rcpps
, NULL
, helper_rcpss
, NULL
},
2655 [0x54] = { helper_pand_xmm
, helper_pand_xmm
}, /* andps, andpd */
2656 [0x55] = { helper_pandn_xmm
, helper_pandn_xmm
}, /* andnps, andnpd */
2657 [0x56] = { helper_por_xmm
, helper_por_xmm
}, /* orps, orpd */
2658 [0x57] = { helper_pxor_xmm
, helper_pxor_xmm
}, /* xorps, xorpd */
2659 [0x58] = SSE_FOP(add
),
2660 [0x59] = SSE_FOP(mul
),
2661 [0x5a] = { helper_cvtps2pd
, helper_cvtpd2ps
,
2662 helper_cvtss2sd
, helper_cvtsd2ss
},
2663 [0x5b] = { helper_cvtdq2ps
, helper_cvtps2dq
, helper_cvttps2dq
},
2664 [0x5c] = SSE_FOP(sub
),
2665 [0x5d] = SSE_FOP(min
),
2666 [0x5e] = SSE_FOP(div
),
2667 [0x5f] = SSE_FOP(max
),
2669 [0xc2] = SSE_FOP(cmpeq
),
2670 [0xc6] = { helper_shufps
, helper_shufpd
},
2672 /* MMX ops and their SSE extensions */
2673 [0x60] = MMX_OP2(punpcklbw
),
2674 [0x61] = MMX_OP2(punpcklwd
),
2675 [0x62] = MMX_OP2(punpckldq
),
2676 [0x63] = MMX_OP2(packsswb
),
2677 [0x64] = MMX_OP2(pcmpgtb
),
2678 [0x65] = MMX_OP2(pcmpgtw
),
2679 [0x66] = MMX_OP2(pcmpgtl
),
2680 [0x67] = MMX_OP2(packuswb
),
2681 [0x68] = MMX_OP2(punpckhbw
),
2682 [0x69] = MMX_OP2(punpckhwd
),
2683 [0x6a] = MMX_OP2(punpckhdq
),
2684 [0x6b] = MMX_OP2(packssdw
),
2685 [0x6c] = { NULL
, helper_punpcklqdq_xmm
},
2686 [0x6d] = { NULL
, helper_punpckhqdq_xmm
},
2687 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2688 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2689 [0x70] = { helper_pshufw_mmx
,
2692 helper_pshuflw_xmm
},
2693 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2694 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2695 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2696 [0x74] = MMX_OP2(pcmpeqb
),
2697 [0x75] = MMX_OP2(pcmpeqw
),
2698 [0x76] = MMX_OP2(pcmpeql
),
2699 [0x77] = { SSE_DUMMY
}, /* emms */
2700 [0x7c] = { NULL
, helper_haddpd
, NULL
, helper_haddps
},
2701 [0x7d] = { NULL
, helper_hsubpd
, NULL
, helper_hsubps
},
2702 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2703 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2704 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2705 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2706 [0xd0] = { NULL
, helper_addsubpd
, NULL
, helper_addsubps
},
2707 [0xd1] = MMX_OP2(psrlw
),
2708 [0xd2] = MMX_OP2(psrld
),
2709 [0xd3] = MMX_OP2(psrlq
),
2710 [0xd4] = MMX_OP2(paddq
),
2711 [0xd5] = MMX_OP2(pmullw
),
2712 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2713 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2714 [0xd8] = MMX_OP2(psubusb
),
2715 [0xd9] = MMX_OP2(psubusw
),
2716 [0xda] = MMX_OP2(pminub
),
2717 [0xdb] = MMX_OP2(pand
),
2718 [0xdc] = MMX_OP2(paddusb
),
2719 [0xdd] = MMX_OP2(paddusw
),
2720 [0xde] = MMX_OP2(pmaxub
),
2721 [0xdf] = MMX_OP2(pandn
),
2722 [0xe0] = MMX_OP2(pavgb
),
2723 [0xe1] = MMX_OP2(psraw
),
2724 [0xe2] = MMX_OP2(psrad
),
2725 [0xe3] = MMX_OP2(pavgw
),
2726 [0xe4] = MMX_OP2(pmulhuw
),
2727 [0xe5] = MMX_OP2(pmulhw
),
2728 [0xe6] = { NULL
, helper_cvttpd2dq
, helper_cvtdq2pd
, helper_cvtpd2dq
},
2729 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2730 [0xe8] = MMX_OP2(psubsb
),
2731 [0xe9] = MMX_OP2(psubsw
),
2732 [0xea] = MMX_OP2(pminsw
),
2733 [0xeb] = MMX_OP2(por
),
2734 [0xec] = MMX_OP2(paddsb
),
2735 [0xed] = MMX_OP2(paddsw
),
2736 [0xee] = MMX_OP2(pmaxsw
),
2737 [0xef] = MMX_OP2(pxor
),
2738 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu */
2739 [0xf1] = MMX_OP2(psllw
),
2740 [0xf2] = MMX_OP2(pslld
),
2741 [0xf3] = MMX_OP2(psllq
),
2742 [0xf4] = MMX_OP2(pmuludq
),
2743 [0xf5] = MMX_OP2(pmaddwd
),
2744 [0xf6] = MMX_OP2(psadbw
),
2745 [0xf7] = MMX_OP2(maskmov
),
2746 [0xf8] = MMX_OP2(psubb
),
2747 [0xf9] = MMX_OP2(psubw
),
2748 [0xfa] = MMX_OP2(psubl
),
2749 [0xfb] = MMX_OP2(psubq
),
2750 [0xfc] = MMX_OP2(paddb
),
2751 [0xfd] = MMX_OP2(paddw
),
2752 [0xfe] = MMX_OP2(paddl
),
2755 static void *sse_op_table2
[3 * 8][2] = {
2756 [0 + 2] = MMX_OP2(psrlw
),
2757 [0 + 4] = MMX_OP2(psraw
),
2758 [0 + 6] = MMX_OP2(psllw
),
2759 [8 + 2] = MMX_OP2(psrld
),
2760 [8 + 4] = MMX_OP2(psrad
),
2761 [8 + 6] = MMX_OP2(pslld
),
2762 [16 + 2] = MMX_OP2(psrlq
),
2763 [16 + 3] = { NULL
, helper_psrldq_xmm
},
2764 [16 + 6] = MMX_OP2(psllq
),
2765 [16 + 7] = { NULL
, helper_pslldq_xmm
},
2768 static void *sse_op_table3
[4 * 3] = {
2771 X86_64_ONLY(helper_cvtsq2ss
),
2772 X86_64_ONLY(helper_cvtsq2sd
),
2776 X86_64_ONLY(helper_cvttss2sq
),
2777 X86_64_ONLY(helper_cvttsd2sq
),
2781 X86_64_ONLY(helper_cvtss2sq
),
2782 X86_64_ONLY(helper_cvtsd2sq
),
2785 static void *sse_op_table4
[8][4] = {
2796 static void *sse_op_table5
[256] = {
2797 [0x0c] = helper_pi2fw
,
2798 [0x0d] = helper_pi2fd
,
2799 [0x1c] = helper_pf2iw
,
2800 [0x1d] = helper_pf2id
,
2801 [0x8a] = helper_pfnacc
,
2802 [0x8e] = helper_pfpnacc
,
2803 [0x90] = helper_pfcmpge
,
2804 [0x94] = helper_pfmin
,
2805 [0x96] = helper_pfrcp
,
2806 [0x97] = helper_pfrsqrt
,
2807 [0x9a] = helper_pfsub
,
2808 [0x9e] = helper_pfadd
,
2809 [0xa0] = helper_pfcmpgt
,
2810 [0xa4] = helper_pfmax
,
2811 [0xa6] = helper_movq
, /* pfrcpit1; no need to actually increase precision */
2812 [0xa7] = helper_movq
, /* pfrsqit1 */
2813 [0xaa] = helper_pfsubr
,
2814 [0xae] = helper_pfacc
,
2815 [0xb0] = helper_pfcmpeq
,
2816 [0xb4] = helper_pfmul
,
2817 [0xb6] = helper_movq
, /* pfrcpit2 */
2818 [0xb7] = helper_pmulhrw_mmx
,
2819 [0xbb] = helper_pswapd
,
2820 [0xbf] = helper_pavgb_mmx
/* pavgusb */
2823 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
2825 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
2826 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
2830 if (s
->prefix
& PREFIX_DATA
)
2832 else if (s
->prefix
& PREFIX_REPZ
)
2834 else if (s
->prefix
& PREFIX_REPNZ
)
2838 sse_op2
= sse_op_table1
[b
][b1
];
2841 if ((b
<= 0x5f && b
>= 0x10) || b
== 0xc6 || b
== 0xc2) {
2851 /* simple MMX/SSE operation */
2852 if (s
->flags
& HF_TS_MASK
) {
2853 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
2856 if (s
->flags
& HF_EM_MASK
) {
2858 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
2861 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
2864 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
2867 tcg_gen_helper_0_0(helper_emms
);
2872 tcg_gen_helper_0_0(helper_emms
);
2875 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2876 the static cpu state) */
2878 tcg_gen_helper_0_0(helper_enter_mmx
);
2881 modrm
= ldub_code(s
->pc
++);
2882 reg
= ((modrm
>> 3) & 7);
2885 mod
= (modrm
>> 6) & 3;
2886 if (sse_op2
== SSE_SPECIAL
) {
2889 case 0x0e7: /* movntq */
2892 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2893 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2895 case 0x1e7: /* movntdq */
2896 case 0x02b: /* movntps */
2897 case 0x12b: /* movntps */
2898 case 0x3f0: /* lddqu */
2901 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2902 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
2904 case 0x6e: /* movd mm, ea */
2905 #ifdef TARGET_X86_64
2906 if (s
->dflag
== 2) {
2907 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2908 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2912 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2913 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
2914 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2915 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx
, cpu_ptr0
, cpu_T
[0]);
2918 case 0x16e: /* movd xmm, ea */
2919 #ifdef TARGET_X86_64
2920 if (s
->dflag
== 2) {
2921 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2922 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
2923 offsetof(CPUX86State
,xmm_regs
[reg
]));
2924 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm
, cpu_ptr0
, cpu_T
[0]);
2928 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2929 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
2930 offsetof(CPUX86State
,xmm_regs
[reg
]));
2931 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
2932 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm
, cpu_ptr0
, cpu_tmp2_i32
);
2935 case 0x6f: /* movq mm, ea */
2937 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2938 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2941 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
,
2942 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2943 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
,
2944 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2947 case 0x010: /* movups */
2948 case 0x110: /* movupd */
2949 case 0x028: /* movaps */
2950 case 0x128: /* movapd */
2951 case 0x16f: /* movdqa xmm, ea */
2952 case 0x26f: /* movdqu xmm, ea */
2954 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2955 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
2957 rm
= (modrm
& 7) | REX_B(s
);
2958 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
2959 offsetof(CPUX86State
,xmm_regs
[rm
]));
2962 case 0x210: /* movss xmm, ea */
2964 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2965 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
2966 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2968 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2969 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2970 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2972 rm
= (modrm
& 7) | REX_B(s
);
2973 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2974 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2977 case 0x310: /* movsd xmm, ea */
2979 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2980 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2982 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2983 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2985 rm
= (modrm
& 7) | REX_B(s
);
2986 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2987 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2990 case 0x012: /* movlps */
2991 case 0x112: /* movlpd */
2993 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2994 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2997 rm
= (modrm
& 7) | REX_B(s
);
2998 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2999 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3002 case 0x212: /* movsldup */
3004 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3005 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3007 rm
= (modrm
& 7) | REX_B(s
);
3008 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3009 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
3010 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
3011 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(2)));
3013 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
3014 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3015 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
3016 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3018 case 0x312: /* movddup */
3020 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3021 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3023 rm
= (modrm
& 7) | REX_B(s
);
3024 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3025 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3027 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
3028 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3030 case 0x016: /* movhps */
3031 case 0x116: /* movhpd */
3033 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3034 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3037 rm
= (modrm
& 7) | REX_B(s
);
3038 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
3039 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3042 case 0x216: /* movshdup */
3044 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3045 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3047 rm
= (modrm
& 7) | REX_B(s
);
3048 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
3049 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
3050 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
3051 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
3053 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3054 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
3055 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
3056 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3058 case 0x7e: /* movd ea, mm */
3059 #ifdef TARGET_X86_64
3060 if (s
->dflag
== 2) {
3061 tcg_gen_ld_i64(cpu_T
[0], cpu_env
,
3062 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3063 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
3067 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
3068 offsetof(CPUX86State
,fpregs
[reg
].mmx
.MMX_L(0)));
3069 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
3072 case 0x17e: /* movd ea, xmm */
3073 #ifdef TARGET_X86_64
3074 if (s
->dflag
== 2) {
3075 tcg_gen_ld_i64(cpu_T
[0], cpu_env
,
3076 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3077 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
3081 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
3082 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3083 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
3086 case 0x27e: /* movq xmm, ea */
3088 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3089 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3091 rm
= (modrm
& 7) | REX_B(s
);
3092 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3093 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3095 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3097 case 0x7f: /* movq ea, mm */
3099 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3100 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3103 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
3104 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3107 case 0x011: /* movups */
3108 case 0x111: /* movupd */
3109 case 0x029: /* movaps */
3110 case 0x129: /* movapd */
3111 case 0x17f: /* movdqa ea, xmm */
3112 case 0x27f: /* movdqu ea, xmm */
3114 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3115 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3117 rm
= (modrm
& 7) | REX_B(s
);
3118 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
3119 offsetof(CPUX86State
,xmm_regs
[reg
]));
3122 case 0x211: /* movss ea, xmm */
3124 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3125 gen_op_movl_T0_env(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3126 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
3128 rm
= (modrm
& 7) | REX_B(s
);
3129 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
3130 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3133 case 0x311: /* movsd ea, xmm */
3135 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3136 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3138 rm
= (modrm
& 7) | REX_B(s
);
3139 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3140 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3143 case 0x013: /* movlps */
3144 case 0x113: /* movlpd */
3146 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3147 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3152 case 0x017: /* movhps */
3153 case 0x117: /* movhpd */
3155 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3156 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3161 case 0x71: /* shift mm, im */
3164 case 0x171: /* shift xmm, im */
3167 val
= ldub_code(s
->pc
++);
3169 gen_op_movl_T0_im(val
);
3170 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3172 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
3173 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
3175 gen_op_movl_T0_im(val
);
3176 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
3178 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
3179 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
3181 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
3185 rm
= (modrm
& 7) | REX_B(s
);
3186 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3189 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3191 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op2_offset
);
3192 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op1_offset
);
3193 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3195 case 0x050: /* movmskps */
3196 rm
= (modrm
& 7) | REX_B(s
);
3197 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3198 offsetof(CPUX86State
,xmm_regs
[rm
]));
3199 tcg_gen_helper_1_1(helper_movmskps
, cpu_tmp2_i32
, cpu_ptr0
);
3200 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3201 gen_op_mov_reg_T0(OT_LONG
, reg
);
3203 case 0x150: /* movmskpd */
3204 rm
= (modrm
& 7) | REX_B(s
);
3205 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3206 offsetof(CPUX86State
,xmm_regs
[rm
]));
3207 tcg_gen_helper_1_1(helper_movmskpd
, cpu_tmp2_i32
, cpu_ptr0
);
3208 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3209 gen_op_mov_reg_T0(OT_LONG
, reg
);
3211 case 0x02a: /* cvtpi2ps */
3212 case 0x12a: /* cvtpi2pd */
3213 tcg_gen_helper_0_0(helper_enter_mmx
);
3215 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3216 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3217 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3220 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3222 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3223 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3224 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3227 tcg_gen_helper_0_2(helper_cvtpi2ps
, cpu_ptr0
, cpu_ptr1
);
3231 tcg_gen_helper_0_2(helper_cvtpi2pd
, cpu_ptr0
, cpu_ptr1
);
3235 case 0x22a: /* cvtsi2ss */
3236 case 0x32a: /* cvtsi2sd */
3237 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3238 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3239 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3240 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3241 sse_op2
= sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)];
3242 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3243 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_tmp2_i32
);
3245 case 0x02c: /* cvttps2pi */
3246 case 0x12c: /* cvttpd2pi */
3247 case 0x02d: /* cvtps2pi */
3248 case 0x12d: /* cvtpd2pi */
3249 tcg_gen_helper_0_0(helper_enter_mmx
);
3251 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3252 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3253 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3255 rm
= (modrm
& 7) | REX_B(s
);
3256 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3258 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
3259 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3260 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3263 tcg_gen_helper_0_2(helper_cvttps2pi
, cpu_ptr0
, cpu_ptr1
);
3266 tcg_gen_helper_0_2(helper_cvttpd2pi
, cpu_ptr0
, cpu_ptr1
);
3269 tcg_gen_helper_0_2(helper_cvtps2pi
, cpu_ptr0
, cpu_ptr1
);
3272 tcg_gen_helper_0_2(helper_cvtpd2pi
, cpu_ptr0
, cpu_ptr1
);
3276 case 0x22c: /* cvttss2si */
3277 case 0x32c: /* cvttsd2si */
3278 case 0x22d: /* cvtss2si */
3279 case 0x32d: /* cvtsd2si */
3280 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3282 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3284 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_Q(0)));
3286 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3287 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3289 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3291 rm
= (modrm
& 7) | REX_B(s
);
3292 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3294 sse_op2
= sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
3296 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op2_offset
);
3297 if (ot
== OT_LONG
) {
3298 tcg_gen_helper_1_1(sse_op2
, cpu_tmp2_i32
, cpu_ptr0
);
3299 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3301 tcg_gen_helper_1_1(sse_op2
, cpu_T
[0], cpu_ptr0
);
3303 gen_op_mov_reg_T0(ot
, reg
);
3305 case 0xc4: /* pinsrw */
3308 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3309 val
= ldub_code(s
->pc
++);
3312 tcg_gen_st16_tl(cpu_T
[0], cpu_env
,
3313 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_W(val
)));
3316 tcg_gen_st16_tl(cpu_T
[0], cpu_env
,
3317 offsetof(CPUX86State
,fpregs
[reg
].mmx
.MMX_W(val
)));
3320 case 0xc5: /* pextrw */
3324 val
= ldub_code(s
->pc
++);
3327 rm
= (modrm
& 7) | REX_B(s
);
3328 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
,
3329 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_W(val
)));
3333 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
,
3334 offsetof(CPUX86State
,fpregs
[rm
].mmx
.MMX_W(val
)));
3336 reg
= ((modrm
>> 3) & 7) | rex_r
;
3337 gen_op_mov_reg_T0(OT_LONG
, reg
);
3339 case 0x1d6: /* movq ea, xmm */
3341 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3342 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3344 rm
= (modrm
& 7) | REX_B(s
);
3345 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3346 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3347 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3350 case 0x2d6: /* movq2dq */
3351 tcg_gen_helper_0_0(helper_enter_mmx
);
3353 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3354 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3355 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3357 case 0x3d6: /* movdq2q */
3358 tcg_gen_helper_0_0(helper_enter_mmx
);
3359 rm
= (modrm
& 7) | REX_B(s
);
3360 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
),
3361 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3363 case 0xd7: /* pmovmskb */
3368 rm
= (modrm
& 7) | REX_B(s
);
3369 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, offsetof(CPUX86State
,xmm_regs
[rm
]));
3370 tcg_gen_helper_1_1(helper_pmovmskb_xmm
, cpu_tmp2_i32
, cpu_ptr0
);
3373 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3374 tcg_gen_helper_1_1(helper_pmovmskb_mmx
, cpu_tmp2_i32
, cpu_ptr0
);
3376 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3377 reg
= ((modrm
>> 3) & 7) | rex_r
;
3378 gen_op_mov_reg_T0(OT_LONG
, reg
);
3384 /* generic MMX or SSE operation */
3386 case 0x70: /* pshufx insn */
3387 case 0xc6: /* pshufx insn */
3388 case 0xc2: /* compare insns */
3395 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3397 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3398 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3399 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f && b
!= 0x5b) ||
3401 /* specific case for SSE single instructions */
3404 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3405 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3408 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
3411 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3414 rm
= (modrm
& 7) | REX_B(s
);
3415 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3418 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3420 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3421 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3422 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3425 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3429 case 0x0f: /* 3DNow! data insns */
3430 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
3432 val
= ldub_code(s
->pc
++);
3433 sse_op2
= sse_op_table5
[val
];
3436 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3437 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3438 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3440 case 0x70: /* pshufx insn */
3441 case 0xc6: /* pshufx insn */
3442 val
= ldub_code(s
->pc
++);
3443 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3444 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3445 tcg_gen_helper_0_3(sse_op2
, cpu_ptr0
, cpu_ptr1
, tcg_const_i32(val
));
3449 val
= ldub_code(s
->pc
++);
3452 sse_op2
= sse_op_table4
[val
][b1
];
3453 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3454 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3455 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3458 /* maskmov : we must prepare A0 */
3461 #ifdef TARGET_X86_64
3462 if (s
->aflag
== 2) {
3463 gen_op_movq_A0_reg(R_EDI
);
3467 gen_op_movl_A0_reg(R_EDI
);
3469 gen_op_andl_A0_ffff();
3471 gen_add_A0_ds_seg(s
);
3473 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3474 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3475 tcg_gen_helper_0_3(sse_op2
, cpu_ptr0
, cpu_ptr1
, cpu_A0
);
3478 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3479 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3480 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3483 if (b
== 0x2e || b
== 0x2f) {
3484 /* just to keep the EFLAGS optimization correct */
3486 s
->cc_op
= CC_OP_EFLAGS
;
3491 /* convert one instruction. s->is_jmp is set if the translation must
3492 be stopped. Return the next pc value */
3493 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
3495 int b
, prefixes
, aflag
, dflag
;
3497 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
3498 target_ulong next_eip
, tval
;
3508 #ifdef TARGET_X86_64
3513 s
->rip_offset
= 0; /* for relative ip address */
3515 b
= ldub_code(s
->pc
);
3517 /* check prefixes */
3518 #ifdef TARGET_X86_64
3522 prefixes
|= PREFIX_REPZ
;
3525 prefixes
|= PREFIX_REPNZ
;
3528 prefixes
|= PREFIX_LOCK
;
3549 prefixes
|= PREFIX_DATA
;
3552 prefixes
|= PREFIX_ADR
;
3556 rex_w
= (b
>> 3) & 1;
3557 rex_r
= (b
& 0x4) << 1;
3558 s
->rex_x
= (b
& 0x2) << 2;
3559 REX_B(s
) = (b
& 0x1) << 3;
3560 x86_64_hregs
= 1; /* select uniform byte register addressing */
3564 /* 0x66 is ignored if rex.w is set */
3567 if (prefixes
& PREFIX_DATA
)
3570 if (!(prefixes
& PREFIX_ADR
))
3577 prefixes
|= PREFIX_REPZ
;
3580 prefixes
|= PREFIX_REPNZ
;
3583 prefixes
|= PREFIX_LOCK
;
3604 prefixes
|= PREFIX_DATA
;
3607 prefixes
|= PREFIX_ADR
;
3610 if (prefixes
& PREFIX_DATA
)
3612 if (prefixes
& PREFIX_ADR
)
3616 s
->prefix
= prefixes
;
3620 /* lock generation */
3621 if (prefixes
& PREFIX_LOCK
)
3622 tcg_gen_helper_0_0(helper_lock
);
3624 /* now check op code */
3628 /**************************/
3629 /* extended op code */
3630 b
= ldub_code(s
->pc
++) | 0x100;
3633 /**************************/
3651 ot
= dflag
+ OT_WORD
;
3654 case 0: /* OP Ev, Gv */
3655 modrm
= ldub_code(s
->pc
++);
3656 reg
= ((modrm
>> 3) & 7) | rex_r
;
3657 mod
= (modrm
>> 6) & 3;
3658 rm
= (modrm
& 7) | REX_B(s
);
3660 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3662 } else if (op
== OP_XORL
&& rm
== reg
) {
3664 /* xor reg, reg optimisation */
3666 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3667 gen_op_mov_reg_T0(ot
, reg
);
3668 gen_op_update1_cc();
3673 gen_op_mov_TN_reg(ot
, 1, reg
);
3674 gen_op(s
, op
, ot
, opreg
);
3676 case 1: /* OP Gv, Ev */
3677 modrm
= ldub_code(s
->pc
++);
3678 mod
= (modrm
>> 6) & 3;
3679 reg
= ((modrm
>> 3) & 7) | rex_r
;
3680 rm
= (modrm
& 7) | REX_B(s
);
3682 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3683 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3684 } else if (op
== OP_XORL
&& rm
== reg
) {
3687 gen_op_mov_TN_reg(ot
, 1, rm
);
3689 gen_op(s
, op
, ot
, reg
);
3691 case 2: /* OP A, Iv */
3692 val
= insn_get(s
, ot
);
3693 gen_op_movl_T1_im(val
);
3694 gen_op(s
, op
, ot
, OR_EAX
);
3700 case 0x80: /* GRP1 */
3710 ot
= dflag
+ OT_WORD
;
3712 modrm
= ldub_code(s
->pc
++);
3713 mod
= (modrm
>> 6) & 3;
3714 rm
= (modrm
& 7) | REX_B(s
);
3715 op
= (modrm
>> 3) & 7;
3721 s
->rip_offset
= insn_const_size(ot
);
3722 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3733 val
= insn_get(s
, ot
);
3736 val
= (int8_t)insn_get(s
, OT_BYTE
);
3739 gen_op_movl_T1_im(val
);
3740 gen_op(s
, op
, ot
, opreg
);
3744 /**************************/
3745 /* inc, dec, and other misc arith */
3746 case 0x40 ... 0x47: /* inc Gv */
3747 ot
= dflag
? OT_LONG
: OT_WORD
;
3748 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
3750 case 0x48 ... 0x4f: /* dec Gv */
3751 ot
= dflag
? OT_LONG
: OT_WORD
;
3752 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
3754 case 0xf6: /* GRP3 */
3759 ot
= dflag
+ OT_WORD
;
3761 modrm
= ldub_code(s
->pc
++);
3762 mod
= (modrm
>> 6) & 3;
3763 rm
= (modrm
& 7) | REX_B(s
);
3764 op
= (modrm
>> 3) & 7;
3767 s
->rip_offset
= insn_const_size(ot
);
3768 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3769 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3771 gen_op_mov_TN_reg(ot
, 0, rm
);
3776 val
= insn_get(s
, ot
);
3777 gen_op_movl_T1_im(val
);
3778 gen_op_testl_T0_T1_cc();
3779 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3782 tcg_gen_not_tl(cpu_T
[0], cpu_T
[0]);
3784 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3786 gen_op_mov_reg_T0(ot
, rm
);
3790 tcg_gen_neg_tl(cpu_T
[0], cpu_T
[0]);
3792 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3794 gen_op_mov_reg_T0(ot
, rm
);
3796 gen_op_update_neg_cc();
3797 s
->cc_op
= CC_OP_SUBB
+ ot
;
3802 gen_op_mov_TN_reg(OT_BYTE
, 1, R_EAX
);
3803 tcg_gen_ext8u_tl(cpu_T
[0], cpu_T
[0]);
3804 tcg_gen_ext8u_tl(cpu_T
[1], cpu_T
[1]);
3805 /* XXX: use 32 bit mul which could be faster */
3806 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3807 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
3808 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3809 tcg_gen_andi_tl(cpu_cc_src
, cpu_T
[0], 0xff00);
3810 s
->cc_op
= CC_OP_MULB
;
3813 gen_op_mov_TN_reg(OT_WORD
, 1, R_EAX
);
3814 tcg_gen_ext16u_tl(cpu_T
[0], cpu_T
[0]);
3815 tcg_gen_ext16u_tl(cpu_T
[1], cpu_T
[1]);
3816 /* XXX: use 32 bit mul which could be faster */
3817 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3818 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
3819 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3820 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 16);
3821 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
3822 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
3823 s
->cc_op
= CC_OP_MULW
;
3827 #ifdef TARGET_X86_64
3828 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
3829 tcg_gen_ext32u_tl(cpu_T
[0], cpu_T
[0]);
3830 tcg_gen_ext32u_tl(cpu_T
[1], cpu_T
[1]);
3831 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3832 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
3833 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3834 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 32);
3835 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
3836 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
3840 t0
= tcg_temp_new(TCG_TYPE_I64
);
3841 t1
= tcg_temp_new(TCG_TYPE_I64
);
3842 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
3843 tcg_gen_extu_i32_i64(t0
, cpu_T
[0]);
3844 tcg_gen_extu_i32_i64(t1
, cpu_T
[1]);
3845 tcg_gen_mul_i64(t0
, t0
, t1
);
3846 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
3847 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
3848 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3849 tcg_gen_shri_i64(t0
, t0
, 32);
3850 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
3851 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
3852 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
3855 s
->cc_op
= CC_OP_MULL
;
3857 #ifdef TARGET_X86_64
3859 tcg_gen_helper_0_1(helper_mulq_EAX_T0
, cpu_T
[0]);
3860 s
->cc_op
= CC_OP_MULQ
;
3868 gen_op_mov_TN_reg(OT_BYTE
, 1, R_EAX
);
3869 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
3870 tcg_gen_ext8s_tl(cpu_T
[1], cpu_T
[1]);
3871 /* XXX: use 32 bit mul which could be faster */
3872 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3873 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
3874 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3875 tcg_gen_ext8s_tl(cpu_tmp0
, cpu_T
[0]);
3876 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
3877 s
->cc_op
= CC_OP_MULB
;
3880 gen_op_mov_TN_reg(OT_WORD
, 1, R_EAX
);
3881 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
3882 tcg_gen_ext16s_tl(cpu_T
[1], cpu_T
[1]);
3883 /* XXX: use 32 bit mul which could be faster */
3884 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3885 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
3886 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3887 tcg_gen_ext16s_tl(cpu_tmp0
, cpu_T
[0]);
3888 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
3889 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 16);
3890 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
3891 s
->cc_op
= CC_OP_MULW
;
3895 #ifdef TARGET_X86_64
3896 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
3897 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
3898 tcg_gen_ext32s_tl(cpu_T
[1], cpu_T
[1]);
3899 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3900 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
3901 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3902 tcg_gen_ext32s_tl(cpu_tmp0
, cpu_T
[0]);
3903 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
3904 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 32);
3905 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
3909 t0
= tcg_temp_new(TCG_TYPE_I64
);
3910 t1
= tcg_temp_new(TCG_TYPE_I64
);
3911 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
3912 tcg_gen_ext_i32_i64(t0
, cpu_T
[0]);
3913 tcg_gen_ext_i32_i64(t1
, cpu_T
[1]);
3914 tcg_gen_mul_i64(t0
, t0
, t1
);
3915 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
3916 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
3917 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
3918 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[0], 31);
3919 tcg_gen_shri_i64(t0
, t0
, 32);
3920 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
3921 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
3922 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
3925 s
->cc_op
= CC_OP_MULL
;
3927 #ifdef TARGET_X86_64
3929 tcg_gen_helper_0_1(helper_imulq_EAX_T0
, cpu_T
[0]);
3930 s
->cc_op
= CC_OP_MULQ
;
3938 gen_jmp_im(pc_start
- s
->cs_base
);
3939 tcg_gen_helper_0_1(helper_divb_AL
, cpu_T
[0]);
3942 gen_jmp_im(pc_start
- s
->cs_base
);
3943 tcg_gen_helper_0_1(helper_divw_AX
, cpu_T
[0]);
3947 gen_jmp_im(pc_start
- s
->cs_base
);
3948 tcg_gen_helper_0_1(helper_divl_EAX
, cpu_T
[0]);
3950 #ifdef TARGET_X86_64
3952 gen_jmp_im(pc_start
- s
->cs_base
);
3953 tcg_gen_helper_0_1(helper_divq_EAX
, cpu_T
[0]);
3961 gen_jmp_im(pc_start
- s
->cs_base
);
3962 tcg_gen_helper_0_1(helper_idivb_AL
, cpu_T
[0]);
3965 gen_jmp_im(pc_start
- s
->cs_base
);
3966 tcg_gen_helper_0_1(helper_idivw_AX
, cpu_T
[0]);
3970 gen_jmp_im(pc_start
- s
->cs_base
);
3971 tcg_gen_helper_0_1(helper_idivl_EAX
, cpu_T
[0]);
3973 #ifdef TARGET_X86_64
3975 gen_jmp_im(pc_start
- s
->cs_base
);
3976 tcg_gen_helper_0_1(helper_idivq_EAX
, cpu_T
[0]);
3986 case 0xfe: /* GRP4 */
3987 case 0xff: /* GRP5 */
3991 ot
= dflag
+ OT_WORD
;
3993 modrm
= ldub_code(s
->pc
++);
3994 mod
= (modrm
>> 6) & 3;
3995 rm
= (modrm
& 7) | REX_B(s
);
3996 op
= (modrm
>> 3) & 7;
3997 if (op
>= 2 && b
== 0xfe) {
4001 if (op
== 2 || op
== 4) {
4002 /* operand size for jumps is 64 bit */
4004 } else if (op
== 3 || op
== 5) {
4005 /* for call calls, the operand is 16 or 32 bit, even
4007 ot
= dflag
? OT_LONG
: OT_WORD
;
4008 } else if (op
== 6) {
4009 /* default push size is 64 bit */
4010 ot
= dflag
? OT_QUAD
: OT_WORD
;
4014 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4015 if (op
>= 2 && op
!= 3 && op
!= 5)
4016 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4018 gen_op_mov_TN_reg(ot
, 0, rm
);
4022 case 0: /* inc Ev */
4027 gen_inc(s
, ot
, opreg
, 1);
4029 case 1: /* dec Ev */
4034 gen_inc(s
, ot
, opreg
, -1);
4036 case 2: /* call Ev */
4037 /* XXX: optimize if memory (no 'and' is necessary) */
4039 gen_op_andl_T0_ffff();
4040 next_eip
= s
->pc
- s
->cs_base
;
4041 gen_movtl_T1_im(next_eip
);
4046 case 3: /* lcall Ev */
4047 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4048 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4049 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4051 if (s
->pe
&& !s
->vm86
) {
4052 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4053 gen_op_set_cc_op(s
->cc_op
);
4054 gen_jmp_im(pc_start
- s
->cs_base
);
4055 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4056 tcg_gen_helper_0_4(helper_lcall_protected
,
4057 cpu_tmp2_i32
, cpu_T
[1],
4058 tcg_const_i32(dflag
),
4059 tcg_const_i32(s
->pc
- pc_start
));
4061 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4062 tcg_gen_helper_0_4(helper_lcall_real
,
4063 cpu_tmp2_i32
, cpu_T
[1],
4064 tcg_const_i32(dflag
),
4065 tcg_const_i32(s
->pc
- s
->cs_base
));
4069 case 4: /* jmp Ev */
4071 gen_op_andl_T0_ffff();
4075 case 5: /* ljmp Ev */
4076 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4077 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4078 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4080 if (s
->pe
&& !s
->vm86
) {
4081 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4082 gen_op_set_cc_op(s
->cc_op
);
4083 gen_jmp_im(pc_start
- s
->cs_base
);
4084 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4085 tcg_gen_helper_0_3(helper_ljmp_protected
,
4088 tcg_const_i32(s
->pc
- pc_start
));
4090 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
4091 gen_op_movl_T0_T1();
4096 case 6: /* push Ev */
4104 case 0x84: /* test Ev, Gv */
4109 ot
= dflag
+ OT_WORD
;
4111 modrm
= ldub_code(s
->pc
++);
4112 mod
= (modrm
>> 6) & 3;
4113 rm
= (modrm
& 7) | REX_B(s
);
4114 reg
= ((modrm
>> 3) & 7) | rex_r
;
4116 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4117 gen_op_mov_TN_reg(ot
, 1, reg
);
4118 gen_op_testl_T0_T1_cc();
4119 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4122 case 0xa8: /* test eAX, Iv */
4127 ot
= dflag
+ OT_WORD
;
4128 val
= insn_get(s
, ot
);
4130 gen_op_mov_TN_reg(ot
, 0, OR_EAX
);
4131 gen_op_movl_T1_im(val
);
4132 gen_op_testl_T0_T1_cc();
4133 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4136 case 0x98: /* CWDE/CBW */
4137 #ifdef TARGET_X86_64
4139 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
4140 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4141 gen_op_mov_reg_T0(OT_QUAD
, R_EAX
);
4145 gen_op_mov_TN_reg(OT_WORD
, 0, R_EAX
);
4146 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4147 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4149 gen_op_mov_TN_reg(OT_BYTE
, 0, R_EAX
);
4150 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
4151 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4154 case 0x99: /* CDQ/CWD */
4155 #ifdef TARGET_X86_64
4157 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EAX
);
4158 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 63);
4159 gen_op_mov_reg_T0(OT_QUAD
, R_EDX
);
4163 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
4164 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4165 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 31);
4166 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4168 gen_op_mov_TN_reg(OT_WORD
, 0, R_EAX
);
4169 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4170 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 15);
4171 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
4174 case 0x1af: /* imul Gv, Ev */
4175 case 0x69: /* imul Gv, Ev, I */
4177 ot
= dflag
+ OT_WORD
;
4178 modrm
= ldub_code(s
->pc
++);
4179 reg
= ((modrm
>> 3) & 7) | rex_r
;
4181 s
->rip_offset
= insn_const_size(ot
);
4184 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4186 val
= insn_get(s
, ot
);
4187 gen_op_movl_T1_im(val
);
4188 } else if (b
== 0x6b) {
4189 val
= (int8_t)insn_get(s
, OT_BYTE
);
4190 gen_op_movl_T1_im(val
);
4192 gen_op_mov_TN_reg(ot
, 1, reg
);
4195 #ifdef TARGET_X86_64
4196 if (ot
== OT_QUAD
) {
4197 tcg_gen_helper_1_2(helper_imulq_T0_T1
, cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4200 if (ot
== OT_LONG
) {
4201 #ifdef TARGET_X86_64
4202 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4203 tcg_gen_ext32s_tl(cpu_T
[1], cpu_T
[1]);
4204 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4205 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4206 tcg_gen_ext32s_tl(cpu_tmp0
, cpu_T
[0]);
4207 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4211 t0
= tcg_temp_new(TCG_TYPE_I64
);
4212 t1
= tcg_temp_new(TCG_TYPE_I64
);
4213 tcg_gen_ext_i32_i64(t0
, cpu_T
[0]);
4214 tcg_gen_ext_i32_i64(t1
, cpu_T
[1]);
4215 tcg_gen_mul_i64(t0
, t0
, t1
);
4216 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4217 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4218 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[0], 31);
4219 tcg_gen_shri_i64(t0
, t0
, 32);
4220 tcg_gen_trunc_i64_i32(cpu_T
[1], t0
);
4221 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[1], cpu_tmp0
);
4225 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4226 tcg_gen_ext16s_tl(cpu_T
[1], cpu_T
[1]);
4227 /* XXX: use 32 bit mul which could be faster */
4228 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4229 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4230 tcg_gen_ext16s_tl(cpu_tmp0
, cpu_T
[0]);
4231 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4233 gen_op_mov_reg_T0(ot
, reg
);
4234 s
->cc_op
= CC_OP_MULB
+ ot
;
4237 case 0x1c1: /* xadd Ev, Gv */
4241 ot
= dflag
+ OT_WORD
;
4242 modrm
= ldub_code(s
->pc
++);
4243 reg
= ((modrm
>> 3) & 7) | rex_r
;
4244 mod
= (modrm
>> 6) & 3;
4246 rm
= (modrm
& 7) | REX_B(s
);
4247 gen_op_mov_TN_reg(ot
, 0, reg
);
4248 gen_op_mov_TN_reg(ot
, 1, rm
);
4249 gen_op_addl_T0_T1();
4250 gen_op_mov_reg_T1(ot
, reg
);
4251 gen_op_mov_reg_T0(ot
, rm
);
4253 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4254 gen_op_mov_TN_reg(ot
, 0, reg
);
4255 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4256 gen_op_addl_T0_T1();
4257 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4258 gen_op_mov_reg_T1(ot
, reg
);
4260 gen_op_update2_cc();
4261 s
->cc_op
= CC_OP_ADDB
+ ot
;
4264 case 0x1b1: /* cmpxchg Ev, Gv */
4271 ot
= dflag
+ OT_WORD
;
4272 modrm
= ldub_code(s
->pc
++);
4273 reg
= ((modrm
>> 3) & 7) | rex_r
;
4274 mod
= (modrm
>> 6) & 3;
4275 gen_op_mov_TN_reg(ot
, 1, reg
);
4277 rm
= (modrm
& 7) | REX_B(s
);
4278 gen_op_mov_TN_reg(ot
, 0, rm
);
4280 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4281 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4282 rm
= 0; /* avoid warning */
4284 label1
= gen_new_label();
4285 tcg_gen_ld_tl(cpu_T3
, cpu_env
, offsetof(CPUState
, regs
[R_EAX
]));
4286 tcg_gen_sub_tl(cpu_T3
, cpu_T3
, cpu_T
[0]);
4287 gen_extu(ot
, cpu_T3
);
4288 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T3
, tcg_const_tl(0), label1
);
4289 tcg_gen_mov_tl(cpu_T
[1], cpu_T
[0]);
4290 gen_op_mov_reg_T0(ot
, R_EAX
);
4291 gen_set_label(label1
);
4293 gen_op_mov_reg_T1(ot
, rm
);
4295 gen_op_st_T1_A0(ot
+ s
->mem_index
);
4297 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
4298 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T3
);
4299 s
->cc_op
= CC_OP_SUBB
+ ot
;
4302 case 0x1c7: /* cmpxchg8b */
4303 modrm
= ldub_code(s
->pc
++);
4304 mod
= (modrm
>> 6) & 3;
4305 if ((mod
== 3) || ((modrm
& 0x38) != 0x8))
4307 gen_jmp_im(pc_start
- s
->cs_base
);
4308 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4309 gen_op_set_cc_op(s
->cc_op
);
4310 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4312 s
->cc_op
= CC_OP_EFLAGS
;
4315 /**************************/
4317 case 0x50 ... 0x57: /* push */
4318 gen_op_mov_TN_reg(OT_LONG
, 0, (b
& 7) | REX_B(s
));
4321 case 0x58 ... 0x5f: /* pop */
4323 ot
= dflag
? OT_QUAD
: OT_WORD
;
4325 ot
= dflag
+ OT_WORD
;
4328 /* NOTE: order is important for pop %sp */
4330 gen_op_mov_reg_T0(ot
, (b
& 7) | REX_B(s
));
4332 case 0x60: /* pusha */
4337 case 0x61: /* popa */
4342 case 0x68: /* push Iv */
4345 ot
= dflag
? OT_QUAD
: OT_WORD
;
4347 ot
= dflag
+ OT_WORD
;
4350 val
= insn_get(s
, ot
);
4352 val
= (int8_t)insn_get(s
, OT_BYTE
);
4353 gen_op_movl_T0_im(val
);
4356 case 0x8f: /* pop Ev */
4358 ot
= dflag
? OT_QUAD
: OT_WORD
;
4360 ot
= dflag
+ OT_WORD
;
4362 modrm
= ldub_code(s
->pc
++);
4363 mod
= (modrm
>> 6) & 3;
4366 /* NOTE: order is important for pop %sp */
4368 rm
= (modrm
& 7) | REX_B(s
);
4369 gen_op_mov_reg_T0(ot
, rm
);
4371 /* NOTE: order is important too for MMU exceptions */
4372 s
->popl_esp_hack
= 1 << ot
;
4373 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4374 s
->popl_esp_hack
= 0;
4378 case 0xc8: /* enter */
4381 val
= lduw_code(s
->pc
);
4383 level
= ldub_code(s
->pc
++);
4384 gen_enter(s
, val
, level
);
4387 case 0xc9: /* leave */
4388 /* XXX: exception not precise (ESP is updated before potential exception) */
4390 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EBP
);
4391 gen_op_mov_reg_T0(OT_QUAD
, R_ESP
);
4392 } else if (s
->ss32
) {
4393 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
4394 gen_op_mov_reg_T0(OT_LONG
, R_ESP
);
4396 gen_op_mov_TN_reg(OT_WORD
, 0, R_EBP
);
4397 gen_op_mov_reg_T0(OT_WORD
, R_ESP
);
4401 ot
= dflag
? OT_QUAD
: OT_WORD
;
4403 ot
= dflag
+ OT_WORD
;
4405 gen_op_mov_reg_T0(ot
, R_EBP
);
4408 case 0x06: /* push es */
4409 case 0x0e: /* push cs */
4410 case 0x16: /* push ss */
4411 case 0x1e: /* push ds */
4414 gen_op_movl_T0_seg(b
>> 3);
4417 case 0x1a0: /* push fs */
4418 case 0x1a8: /* push gs */
4419 gen_op_movl_T0_seg((b
>> 3) & 7);
4422 case 0x07: /* pop es */
4423 case 0x17: /* pop ss */
4424 case 0x1f: /* pop ds */
4429 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4432 /* if reg == SS, inhibit interrupts/trace. */
4433 /* If several instructions disable interrupts, only the
4435 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4436 tcg_gen_helper_0_0(helper_set_inhibit_irq
);
4440 gen_jmp_im(s
->pc
- s
->cs_base
);
4444 case 0x1a1: /* pop fs */
4445 case 0x1a9: /* pop gs */
4447 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
4450 gen_jmp_im(s
->pc
- s
->cs_base
);
4455 /**************************/
4458 case 0x89: /* mov Gv, Ev */
4462 ot
= dflag
+ OT_WORD
;
4463 modrm
= ldub_code(s
->pc
++);
4464 reg
= ((modrm
>> 3) & 7) | rex_r
;
4466 /* generate a generic store */
4467 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
4470 case 0xc7: /* mov Ev, Iv */
4474 ot
= dflag
+ OT_WORD
;
4475 modrm
= ldub_code(s
->pc
++);
4476 mod
= (modrm
>> 6) & 3;
4478 s
->rip_offset
= insn_const_size(ot
);
4479 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4481 val
= insn_get(s
, ot
);
4482 gen_op_movl_T0_im(val
);
4484 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4486 gen_op_mov_reg_T0(ot
, (modrm
& 7) | REX_B(s
));
4489 case 0x8b: /* mov Ev, Gv */
4493 ot
= OT_WORD
+ dflag
;
4494 modrm
= ldub_code(s
->pc
++);
4495 reg
= ((modrm
>> 3) & 7) | rex_r
;
4497 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4498 gen_op_mov_reg_T0(ot
, reg
);
4500 case 0x8e: /* mov seg, Gv */
4501 modrm
= ldub_code(s
->pc
++);
4502 reg
= (modrm
>> 3) & 7;
4503 if (reg
>= 6 || reg
== R_CS
)
4505 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
4506 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4508 /* if reg == SS, inhibit interrupts/trace */
4509 /* If several instructions disable interrupts, only the
4511 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4512 tcg_gen_helper_0_0(helper_set_inhibit_irq
);
4516 gen_jmp_im(s
->pc
- s
->cs_base
);
4520 case 0x8c: /* mov Gv, seg */
4521 modrm
= ldub_code(s
->pc
++);
4522 reg
= (modrm
>> 3) & 7;
4523 mod
= (modrm
>> 6) & 3;
4526 gen_op_movl_T0_seg(reg
);
4528 ot
= OT_WORD
+ dflag
;
4531 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4534 case 0x1b6: /* movzbS Gv, Eb */
4535 case 0x1b7: /* movzwS Gv, Eb */
4536 case 0x1be: /* movsbS Gv, Eb */
4537 case 0x1bf: /* movswS Gv, Eb */
4540 /* d_ot is the size of destination */
4541 d_ot
= dflag
+ OT_WORD
;
4542 /* ot is the size of source */
4543 ot
= (b
& 1) + OT_BYTE
;
4544 modrm
= ldub_code(s
->pc
++);
4545 reg
= ((modrm
>> 3) & 7) | rex_r
;
4546 mod
= (modrm
>> 6) & 3;
4547 rm
= (modrm
& 7) | REX_B(s
);
4550 gen_op_mov_TN_reg(ot
, 0, rm
);
4551 switch(ot
| (b
& 8)) {
4553 tcg_gen_ext8u_tl(cpu_T
[0], cpu_T
[0]);
4556 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
4559 tcg_gen_ext16u_tl(cpu_T
[0], cpu_T
[0]);
4563 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4566 gen_op_mov_reg_T0(d_ot
, reg
);
4568 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4570 gen_op_lds_T0_A0(ot
+ s
->mem_index
);
4572 gen_op_ldu_T0_A0(ot
+ s
->mem_index
);
4574 gen_op_mov_reg_T0(d_ot
, reg
);
4579 case 0x8d: /* lea */
4580 ot
= dflag
+ OT_WORD
;
4581 modrm
= ldub_code(s
->pc
++);
4582 mod
= (modrm
>> 6) & 3;
4585 reg
= ((modrm
>> 3) & 7) | rex_r
;
4586 /* we must ensure that no segment is added */
4590 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4592 gen_op_mov_reg_A0(ot
- OT_WORD
, reg
);
4595 case 0xa0: /* mov EAX, Ov */
4597 case 0xa2: /* mov Ov, EAX */
4600 target_ulong offset_addr
;
4605 ot
= dflag
+ OT_WORD
;
4606 #ifdef TARGET_X86_64
4607 if (s
->aflag
== 2) {
4608 offset_addr
= ldq_code(s
->pc
);
4610 gen_op_movq_A0_im(offset_addr
);
4615 offset_addr
= insn_get(s
, OT_LONG
);
4617 offset_addr
= insn_get(s
, OT_WORD
);
4619 gen_op_movl_A0_im(offset_addr
);
4621 gen_add_A0_ds_seg(s
);
4623 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4624 gen_op_mov_reg_T0(ot
, R_EAX
);
4626 gen_op_mov_TN_reg(ot
, 0, R_EAX
);
4627 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4631 case 0xd7: /* xlat */
4632 #ifdef TARGET_X86_64
4633 if (s
->aflag
== 2) {
4634 gen_op_movq_A0_reg(R_EBX
);
4635 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EAX
);
4636 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xff);
4637 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_T
[0]);
4641 gen_op_movl_A0_reg(R_EBX
);
4642 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
4643 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xff);
4644 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_T
[0]);
4646 gen_op_andl_A0_ffff();
4648 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
4650 gen_add_A0_ds_seg(s
);
4651 gen_op_ldu_T0_A0(OT_BYTE
+ s
->mem_index
);
4652 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
4654 case 0xb0 ... 0xb7: /* mov R, Ib */
4655 val
= insn_get(s
, OT_BYTE
);
4656 gen_op_movl_T0_im(val
);
4657 gen_op_mov_reg_T0(OT_BYTE
, (b
& 7) | REX_B(s
));
4659 case 0xb8 ... 0xbf: /* mov R, Iv */
4660 #ifdef TARGET_X86_64
4664 tmp
= ldq_code(s
->pc
);
4666 reg
= (b
& 7) | REX_B(s
);
4667 gen_movtl_T0_im(tmp
);
4668 gen_op_mov_reg_T0(OT_QUAD
, reg
);
4672 ot
= dflag
? OT_LONG
: OT_WORD
;
4673 val
= insn_get(s
, ot
);
4674 reg
= (b
& 7) | REX_B(s
);
4675 gen_op_movl_T0_im(val
);
4676 gen_op_mov_reg_T0(ot
, reg
);
4680 case 0x91 ... 0x97: /* xchg R, EAX */
4681 ot
= dflag
+ OT_WORD
;
4682 reg
= (b
& 7) | REX_B(s
);
4686 case 0x87: /* xchg Ev, Gv */
4690 ot
= dflag
+ OT_WORD
;
4691 modrm
= ldub_code(s
->pc
++);
4692 reg
= ((modrm
>> 3) & 7) | rex_r
;
4693 mod
= (modrm
>> 6) & 3;
4695 rm
= (modrm
& 7) | REX_B(s
);
4697 gen_op_mov_TN_reg(ot
, 0, reg
);
4698 gen_op_mov_TN_reg(ot
, 1, rm
);
4699 gen_op_mov_reg_T0(ot
, rm
);
4700 gen_op_mov_reg_T1(ot
, reg
);
4702 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4703 gen_op_mov_TN_reg(ot
, 0, reg
);
4704 /* for xchg, lock is implicit */
4705 if (!(prefixes
& PREFIX_LOCK
))
4706 tcg_gen_helper_0_0(helper_lock
);
4707 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4708 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4709 if (!(prefixes
& PREFIX_LOCK
))
4710 tcg_gen_helper_0_0(helper_unlock
);
4711 gen_op_mov_reg_T1(ot
, reg
);
4714 case 0xc4: /* les Gv */
4719 case 0xc5: /* lds Gv */
4724 case 0x1b2: /* lss Gv */
4727 case 0x1b4: /* lfs Gv */
4730 case 0x1b5: /* lgs Gv */
4733 ot
= dflag
? OT_LONG
: OT_WORD
;
4734 modrm
= ldub_code(s
->pc
++);
4735 reg
= ((modrm
>> 3) & 7) | rex_r
;
4736 mod
= (modrm
>> 6) & 3;
4739 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4740 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4741 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4742 /* load the segment first to handle exceptions properly */
4743 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4744 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
4745 /* then put the data */
4746 gen_op_mov_reg_T1(ot
, reg
);
4748 gen_jmp_im(s
->pc
- s
->cs_base
);
4753 /************************/
4764 ot
= dflag
+ OT_WORD
;
4766 modrm
= ldub_code(s
->pc
++);
4767 mod
= (modrm
>> 6) & 3;
4768 op
= (modrm
>> 3) & 7;
4774 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4777 opreg
= (modrm
& 7) | REX_B(s
);
4782 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
4785 shift
= ldub_code(s
->pc
++);
4787 gen_shifti(s
, op
, ot
, opreg
, shift
);
4802 case 0x1a4: /* shld imm */
4806 case 0x1a5: /* shld cl */
4810 case 0x1ac: /* shrd imm */
4814 case 0x1ad: /* shrd cl */
4818 ot
= dflag
+ OT_WORD
;
4819 modrm
= ldub_code(s
->pc
++);
4820 mod
= (modrm
>> 6) & 3;
4821 rm
= (modrm
& 7) | REX_B(s
);
4822 reg
= ((modrm
>> 3) & 7) | rex_r
;
4824 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4829 gen_op_mov_TN_reg(ot
, 1, reg
);
4832 val
= ldub_code(s
->pc
++);
4833 tcg_gen_movi_tl(cpu_T3
, val
);
4835 tcg_gen_ld_tl(cpu_T3
, cpu_env
, offsetof(CPUState
, regs
[R_ECX
]));
4837 gen_shiftd_rm_T1_T3(s
, ot
, opreg
, op
);
4840 /************************/
4843 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
4844 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4845 /* XXX: what to do if illegal op ? */
4846 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
4849 modrm
= ldub_code(s
->pc
++);
4850 mod
= (modrm
>> 6) & 3;
4852 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
4855 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4857 case 0x00 ... 0x07: /* fxxxs */
4858 case 0x10 ... 0x17: /* fixxxl */
4859 case 0x20 ... 0x27: /* fxxxl */
4860 case 0x30 ... 0x37: /* fixxx */
4867 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
4868 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4869 tcg_gen_helper_0_1(helper_flds_FT0
, cpu_tmp2_i32
);
4872 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
4873 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4874 tcg_gen_helper_0_1(helper_fildl_FT0
, cpu_tmp2_i32
);
4877 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
4878 (s
->mem_index
>> 2) - 1);
4879 tcg_gen_helper_0_1(helper_fldl_FT0
, cpu_tmp1_i64
);
4883 gen_op_lds_T0_A0(OT_WORD
+ s
->mem_index
);
4884 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4885 tcg_gen_helper_0_1(helper_fildl_FT0
, cpu_tmp2_i32
);
4889 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0
[op1
]);
4891 /* fcomp needs pop */
4892 tcg_gen_helper_0_0(helper_fpop
);
4896 case 0x08: /* flds */
4897 case 0x0a: /* fsts */
4898 case 0x0b: /* fstps */
4899 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4900 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4901 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4906 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
4907 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4908 tcg_gen_helper_0_1(helper_flds_ST0
, cpu_tmp2_i32
);
4911 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
4912 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4913 tcg_gen_helper_0_1(helper_fildl_ST0
, cpu_tmp2_i32
);
4916 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
4917 (s
->mem_index
>> 2) - 1);
4918 tcg_gen_helper_0_1(helper_fldl_ST0
, cpu_tmp1_i64
);
4922 gen_op_lds_T0_A0(OT_WORD
+ s
->mem_index
);
4923 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4924 tcg_gen_helper_0_1(helper_fildl_ST0
, cpu_tmp2_i32
);
4929 /* XXX: the corresponding CPUID bit must be tested ! */
4932 tcg_gen_helper_1_0(helper_fisttl_ST0
, cpu_tmp2_i32
);
4933 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
4934 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
4937 tcg_gen_helper_1_0(helper_fisttll_ST0
, cpu_tmp1_i64
);
4938 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
4939 (s
->mem_index
>> 2) - 1);
4943 tcg_gen_helper_1_0(helper_fistt_ST0
, cpu_tmp2_i32
);
4944 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
4945 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
4948 tcg_gen_helper_0_0(helper_fpop
);
4953 tcg_gen_helper_1_0(helper_fsts_ST0
, cpu_tmp2_i32
);
4954 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
4955 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
4958 tcg_gen_helper_1_0(helper_fistl_ST0
, cpu_tmp2_i32
);
4959 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
4960 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
4963 tcg_gen_helper_1_0(helper_fstl_ST0
, cpu_tmp1_i64
);
4964 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
4965 (s
->mem_index
>> 2) - 1);
4969 tcg_gen_helper_1_0(helper_fist_ST0
, cpu_tmp2_i32
);
4970 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
4971 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
4975 tcg_gen_helper_0_0(helper_fpop
);
4979 case 0x0c: /* fldenv mem */
4980 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4981 gen_op_set_cc_op(s
->cc_op
);
4982 gen_jmp_im(pc_start
- s
->cs_base
);
4983 tcg_gen_helper_0_2(helper_fldenv
,
4984 cpu_A0
, tcg_const_i32(s
->dflag
));
4986 case 0x0d: /* fldcw mem */
4987 gen_op_ld_T0_A0(OT_WORD
+ s
->mem_index
);
4988 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4989 tcg_gen_helper_0_1(helper_fldcw
, cpu_tmp2_i32
);
4991 case 0x0e: /* fnstenv mem */
4992 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4993 gen_op_set_cc_op(s
->cc_op
);
4994 gen_jmp_im(pc_start
- s
->cs_base
);
4995 tcg_gen_helper_0_2(helper_fstenv
,
4996 cpu_A0
, tcg_const_i32(s
->dflag
));
4998 case 0x0f: /* fnstcw mem */
4999 tcg_gen_helper_1_0(helper_fnstcw
, cpu_tmp2_i32
);
5000 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5001 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5003 case 0x1d: /* fldt mem */
5004 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5005 gen_op_set_cc_op(s
->cc_op
);
5006 gen_jmp_im(pc_start
- s
->cs_base
);
5007 tcg_gen_helper_0_1(helper_fldt_ST0
, cpu_A0
);
5009 case 0x1f: /* fstpt mem */
5010 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5011 gen_op_set_cc_op(s
->cc_op
);
5012 gen_jmp_im(pc_start
- s
->cs_base
);
5013 tcg_gen_helper_0_1(helper_fstt_ST0
, cpu_A0
);
5014 tcg_gen_helper_0_0(helper_fpop
);
5016 case 0x2c: /* frstor mem */
5017 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5018 gen_op_set_cc_op(s
->cc_op
);
5019 gen_jmp_im(pc_start
- s
->cs_base
);
5020 tcg_gen_helper_0_2(helper_frstor
,
5021 cpu_A0
, tcg_const_i32(s
->dflag
));
5023 case 0x2e: /* fnsave mem */
5024 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5025 gen_op_set_cc_op(s
->cc_op
);
5026 gen_jmp_im(pc_start
- s
->cs_base
);
5027 tcg_gen_helper_0_2(helper_fsave
,
5028 cpu_A0
, tcg_const_i32(s
->dflag
));
5030 case 0x2f: /* fnstsw mem */
5031 tcg_gen_helper_1_0(helper_fnstsw
, cpu_tmp2_i32
);
5032 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5033 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5035 case 0x3c: /* fbld */
5036 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5037 gen_op_set_cc_op(s
->cc_op
);
5038 gen_jmp_im(pc_start
- s
->cs_base
);
5039 tcg_gen_helper_0_1(helper_fbld_ST0
, cpu_A0
);
5041 case 0x3e: /* fbstp */
5042 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5043 gen_op_set_cc_op(s
->cc_op
);
5044 gen_jmp_im(pc_start
- s
->cs_base
);
5045 tcg_gen_helper_0_1(helper_fbst_ST0
, cpu_A0
);
5046 tcg_gen_helper_0_0(helper_fpop
);
5048 case 0x3d: /* fildll */
5049 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
5050 (s
->mem_index
>> 2) - 1);
5051 tcg_gen_helper_0_1(helper_fildll_ST0
, cpu_tmp1_i64
);
5053 case 0x3f: /* fistpll */
5054 tcg_gen_helper_1_0(helper_fistll_ST0
, cpu_tmp1_i64
);
5055 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
5056 (s
->mem_index
>> 2) - 1);
5057 tcg_gen_helper_0_0(helper_fpop
);
5063 /* register float ops */
5067 case 0x08: /* fld sti */
5068 tcg_gen_helper_0_0(helper_fpush
);
5069 tcg_gen_helper_0_1(helper_fmov_ST0_STN
, tcg_const_i32((opreg
+ 1) & 7));
5071 case 0x09: /* fxchg sti */
5072 case 0x29: /* fxchg4 sti, undocumented op */
5073 case 0x39: /* fxchg7 sti, undocumented op */
5074 tcg_gen_helper_0_1(helper_fxchg_ST0_STN
, tcg_const_i32(opreg
));
5076 case 0x0a: /* grp d9/2 */
5079 /* check exceptions (FreeBSD FPU probe) */
5080 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5081 gen_op_set_cc_op(s
->cc_op
);
5082 gen_jmp_im(pc_start
- s
->cs_base
);
5083 tcg_gen_helper_0_0(helper_fwait
);
5089 case 0x0c: /* grp d9/4 */
5092 tcg_gen_helper_0_0(helper_fchs_ST0
);
5095 tcg_gen_helper_0_0(helper_fabs_ST0
);
5098 tcg_gen_helper_0_0(helper_fldz_FT0
);
5099 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5102 tcg_gen_helper_0_0(helper_fxam_ST0
);
5108 case 0x0d: /* grp d9/5 */
5112 tcg_gen_helper_0_0(helper_fpush
);
5113 tcg_gen_helper_0_0(helper_fld1_ST0
);
5116 tcg_gen_helper_0_0(helper_fpush
);
5117 tcg_gen_helper_0_0(helper_fldl2t_ST0
);
5120 tcg_gen_helper_0_0(helper_fpush
);
5121 tcg_gen_helper_0_0(helper_fldl2e_ST0
);
5124 tcg_gen_helper_0_0(helper_fpush
);
5125 tcg_gen_helper_0_0(helper_fldpi_ST0
);
5128 tcg_gen_helper_0_0(helper_fpush
);
5129 tcg_gen_helper_0_0(helper_fldlg2_ST0
);
5132 tcg_gen_helper_0_0(helper_fpush
);
5133 tcg_gen_helper_0_0(helper_fldln2_ST0
);
5136 tcg_gen_helper_0_0(helper_fpush
);
5137 tcg_gen_helper_0_0(helper_fldz_ST0
);
5144 case 0x0e: /* grp d9/6 */
5147 tcg_gen_helper_0_0(helper_f2xm1
);
5150 tcg_gen_helper_0_0(helper_fyl2x
);
5153 tcg_gen_helper_0_0(helper_fptan
);
5155 case 3: /* fpatan */
5156 tcg_gen_helper_0_0(helper_fpatan
);
5158 case 4: /* fxtract */
5159 tcg_gen_helper_0_0(helper_fxtract
);
5161 case 5: /* fprem1 */
5162 tcg_gen_helper_0_0(helper_fprem1
);
5164 case 6: /* fdecstp */
5165 tcg_gen_helper_0_0(helper_fdecstp
);
5168 case 7: /* fincstp */
5169 tcg_gen_helper_0_0(helper_fincstp
);
5173 case 0x0f: /* grp d9/7 */
5176 tcg_gen_helper_0_0(helper_fprem
);
5178 case 1: /* fyl2xp1 */
5179 tcg_gen_helper_0_0(helper_fyl2xp1
);
5182 tcg_gen_helper_0_0(helper_fsqrt
);
5184 case 3: /* fsincos */
5185 tcg_gen_helper_0_0(helper_fsincos
);
5187 case 5: /* fscale */
5188 tcg_gen_helper_0_0(helper_fscale
);
5190 case 4: /* frndint */
5191 tcg_gen_helper_0_0(helper_frndint
);
5194 tcg_gen_helper_0_0(helper_fsin
);
5198 tcg_gen_helper_0_0(helper_fcos
);
5202 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5203 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5204 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5210 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0
[op1
], tcg_const_i32(opreg
));
5212 tcg_gen_helper_0_0(helper_fpop
);
5214 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5215 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0
[op1
]);
5219 case 0x02: /* fcom */
5220 case 0x22: /* fcom2, undocumented op */
5221 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5222 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5224 case 0x03: /* fcomp */
5225 case 0x23: /* fcomp3, undocumented op */
5226 case 0x32: /* fcomp5, undocumented op */
5227 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5228 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5229 tcg_gen_helper_0_0(helper_fpop
);
5231 case 0x15: /* da/5 */
5233 case 1: /* fucompp */
5234 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(1));
5235 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
5236 tcg_gen_helper_0_0(helper_fpop
);
5237 tcg_gen_helper_0_0(helper_fpop
);
5245 case 0: /* feni (287 only, just do nop here) */
5247 case 1: /* fdisi (287 only, just do nop here) */
5250 tcg_gen_helper_0_0(helper_fclex
);
5252 case 3: /* fninit */
5253 tcg_gen_helper_0_0(helper_fninit
);
5255 case 4: /* fsetpm (287 only, just do nop here) */
5261 case 0x1d: /* fucomi */
5262 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5263 gen_op_set_cc_op(s
->cc_op
);
5264 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5265 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0
);
5266 gen_op_fcomi_dummy();
5267 s
->cc_op
= CC_OP_EFLAGS
;
5269 case 0x1e: /* fcomi */
5270 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5271 gen_op_set_cc_op(s
->cc_op
);
5272 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5273 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0
);
5274 gen_op_fcomi_dummy();
5275 s
->cc_op
= CC_OP_EFLAGS
;
5277 case 0x28: /* ffree sti */
5278 tcg_gen_helper_0_1(helper_ffree_STN
, tcg_const_i32(opreg
));
5280 case 0x2a: /* fst sti */
5281 tcg_gen_helper_0_1(helper_fmov_STN_ST0
, tcg_const_i32(opreg
));
5283 case 0x2b: /* fstp sti */
5284 case 0x0b: /* fstp1 sti, undocumented op */
5285 case 0x3a: /* fstp8 sti, undocumented op */
5286 case 0x3b: /* fstp9 sti, undocumented op */
5287 tcg_gen_helper_0_1(helper_fmov_STN_ST0
, tcg_const_i32(opreg
));
5288 tcg_gen_helper_0_0(helper_fpop
);
5290 case 0x2c: /* fucom st(i) */
5291 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5292 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
5294 case 0x2d: /* fucomp st(i) */
5295 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5296 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
5297 tcg_gen_helper_0_0(helper_fpop
);
5299 case 0x33: /* de/3 */
5301 case 1: /* fcompp */
5302 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(1));
5303 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5304 tcg_gen_helper_0_0(helper_fpop
);
5305 tcg_gen_helper_0_0(helper_fpop
);
5311 case 0x38: /* ffreep sti, undocumented op */
5312 tcg_gen_helper_0_1(helper_ffree_STN
, tcg_const_i32(opreg
));
5313 tcg_gen_helper_0_0(helper_fpop
);
5315 case 0x3c: /* df/4 */
5318 tcg_gen_helper_1_0(helper_fnstsw
, cpu_tmp2_i32
);
5319 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5320 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
5326 case 0x3d: /* fucomip */
5327 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5328 gen_op_set_cc_op(s
->cc_op
);
5329 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5330 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0
);
5331 tcg_gen_helper_0_0(helper_fpop
);
5332 gen_op_fcomi_dummy();
5333 s
->cc_op
= CC_OP_EFLAGS
;
5335 case 0x3e: /* fcomip */
5336 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5337 gen_op_set_cc_op(s
->cc_op
);
5338 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5339 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0
);
5340 tcg_gen_helper_0_0(helper_fpop
);
5341 gen_op_fcomi_dummy();
5342 s
->cc_op
= CC_OP_EFLAGS
;
5344 case 0x10 ... 0x13: /* fcmovxx */
5348 const static uint8_t fcmov_cc
[8] = {
5354 op1
= fcmov_cc
[op
& 3] | ((op
>> 3) & 1);
5356 l1
= gen_new_label();
5357 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T
[0], tcg_const_tl(0), l1
);
5358 tcg_gen_helper_0_1(helper_fmov_ST0_STN
, tcg_const_i32(opreg
));
5367 /************************/
5370 case 0xa4: /* movsS */
5375 ot
= dflag
+ OT_WORD
;
5377 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5378 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5384 case 0xaa: /* stosS */
5389 ot
= dflag
+ OT_WORD
;
5391 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5392 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5397 case 0xac: /* lodsS */
5402 ot
= dflag
+ OT_WORD
;
5403 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5404 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5409 case 0xae: /* scasS */
5414 ot
= dflag
+ OT_WORD
;
5415 if (prefixes
& PREFIX_REPNZ
) {
5416 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
5417 } else if (prefixes
& PREFIX_REPZ
) {
5418 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
5421 s
->cc_op
= CC_OP_SUBB
+ ot
;
5425 case 0xa6: /* cmpsS */
5430 ot
= dflag
+ OT_WORD
;
5431 if (prefixes
& PREFIX_REPNZ
) {
5432 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
5433 } else if (prefixes
& PREFIX_REPZ
) {
5434 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
5437 s
->cc_op
= CC_OP_SUBB
+ ot
;
5440 case 0x6c: /* insS */
5445 ot
= dflag
? OT_LONG
: OT_WORD
;
5446 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5447 gen_op_andl_T0_ffff();
5448 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5449 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) | 4);
5450 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5451 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5456 case 0x6e: /* outsS */
5461 ot
= dflag
? OT_LONG
: OT_WORD
;
5462 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5463 gen_op_andl_T0_ffff();
5464 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5465 svm_is_rep(prefixes
) | 4);
5466 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5467 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5473 /************************/
5481 ot
= dflag
? OT_LONG
: OT_WORD
;
5482 val
= ldub_code(s
->pc
++);
5483 gen_op_movl_T0_im(val
);
5484 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5485 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
));
5486 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5487 tcg_gen_helper_1_1(helper_in_func
[ot
], cpu_T
[1], cpu_tmp2_i32
);
5488 gen_op_mov_reg_T1(ot
, R_EAX
);
5495 ot
= dflag
? OT_LONG
: OT_WORD
;
5496 val
= ldub_code(s
->pc
++);
5497 gen_op_movl_T0_im(val
);
5498 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5499 svm_is_rep(prefixes
));
5500 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5502 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5503 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
5504 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[1]);
5505 tcg_gen_helper_0_2(helper_out_func
[ot
], cpu_tmp2_i32
, cpu_tmp3_i32
);
5512 ot
= dflag
? OT_LONG
: OT_WORD
;
5513 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5514 gen_op_andl_T0_ffff();
5515 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5516 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
));
5517 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5518 tcg_gen_helper_1_1(helper_in_func
[ot
], cpu_T
[1], cpu_tmp2_i32
);
5519 gen_op_mov_reg_T1(ot
, R_EAX
);
5526 ot
= dflag
? OT_LONG
: OT_WORD
;
5527 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5528 gen_op_andl_T0_ffff();
5529 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5530 svm_is_rep(prefixes
));
5531 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5533 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5534 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
5535 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[1]);
5536 tcg_gen_helper_0_2(helper_out_func
[ot
], cpu_tmp2_i32
, cpu_tmp3_i32
);
5539 /************************/
5541 case 0xc2: /* ret im */
5542 val
= ldsw_code(s
->pc
);
5545 if (CODE64(s
) && s
->dflag
)
5547 gen_stack_update(s
, val
+ (2 << s
->dflag
));
5549 gen_op_andl_T0_ffff();
5553 case 0xc3: /* ret */
5557 gen_op_andl_T0_ffff();
5561 case 0xca: /* lret im */
5562 val
= ldsw_code(s
->pc
);
5565 if (s
->pe
&& !s
->vm86
) {
5566 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5567 gen_op_set_cc_op(s
->cc_op
);
5568 gen_jmp_im(pc_start
- s
->cs_base
);
5569 tcg_gen_helper_0_2(helper_lret_protected
,
5570 tcg_const_i32(s
->dflag
),
5571 tcg_const_i32(val
));
5575 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5577 gen_op_andl_T0_ffff();
5578 /* NOTE: keeping EIP updated is not a problem in case of
5582 gen_op_addl_A0_im(2 << s
->dflag
);
5583 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5584 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
5585 /* add stack offset */
5586 gen_stack_update(s
, val
+ (4 << s
->dflag
));
5590 case 0xcb: /* lret */
5593 case 0xcf: /* iret */
5594 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IRET
))
5598 tcg_gen_helper_0_1(helper_iret_real
, tcg_const_i32(s
->dflag
));
5599 s
->cc_op
= CC_OP_EFLAGS
;
5600 } else if (s
->vm86
) {
5602 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5604 tcg_gen_helper_0_1(helper_iret_real
, tcg_const_i32(s
->dflag
));
5605 s
->cc_op
= CC_OP_EFLAGS
;
5608 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5609 gen_op_set_cc_op(s
->cc_op
);
5610 gen_jmp_im(pc_start
- s
->cs_base
);
5611 tcg_gen_helper_0_2(helper_iret_protected
,
5612 tcg_const_i32(s
->dflag
),
5613 tcg_const_i32(s
->pc
- s
->cs_base
));
5614 s
->cc_op
= CC_OP_EFLAGS
;
5618 case 0xe8: /* call im */
5621 tval
= (int32_t)insn_get(s
, OT_LONG
);
5623 tval
= (int16_t)insn_get(s
, OT_WORD
);
5624 next_eip
= s
->pc
- s
->cs_base
;
5628 gen_movtl_T0_im(next_eip
);
5633 case 0x9a: /* lcall im */
5635 unsigned int selector
, offset
;
5639 ot
= dflag
? OT_LONG
: OT_WORD
;
5640 offset
= insn_get(s
, ot
);
5641 selector
= insn_get(s
, OT_WORD
);
5643 gen_op_movl_T0_im(selector
);
5644 gen_op_movl_T1_imu(offset
);
5647 case 0xe9: /* jmp im */
5649 tval
= (int32_t)insn_get(s
, OT_LONG
);
5651 tval
= (int16_t)insn_get(s
, OT_WORD
);
5652 tval
+= s
->pc
- s
->cs_base
;
5657 case 0xea: /* ljmp im */
5659 unsigned int selector
, offset
;
5663 ot
= dflag
? OT_LONG
: OT_WORD
;
5664 offset
= insn_get(s
, ot
);
5665 selector
= insn_get(s
, OT_WORD
);
5667 gen_op_movl_T0_im(selector
);
5668 gen_op_movl_T1_imu(offset
);
5671 case 0xeb: /* jmp Jb */
5672 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5673 tval
+= s
->pc
- s
->cs_base
;
5678 case 0x70 ... 0x7f: /* jcc Jb */
5679 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5681 case 0x180 ... 0x18f: /* jcc Jv */
5683 tval
= (int32_t)insn_get(s
, OT_LONG
);
5685 tval
= (int16_t)insn_get(s
, OT_WORD
);
5688 next_eip
= s
->pc
- s
->cs_base
;
5692 gen_jcc(s
, b
, tval
, next_eip
);
5695 case 0x190 ... 0x19f: /* setcc Gv */
5696 modrm
= ldub_code(s
->pc
++);
5698 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
5700 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5701 ot
= dflag
+ OT_WORD
;
5702 modrm
= ldub_code(s
->pc
++);
5703 reg
= ((modrm
>> 3) & 7) | rex_r
;
5704 mod
= (modrm
>> 6) & 3;
5707 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5708 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
5710 rm
= (modrm
& 7) | REX_B(s
);
5711 gen_op_mov_TN_reg(ot
, 1, rm
);
5713 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
5716 /************************/
5718 case 0x9c: /* pushf */
5719 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PUSHF
))
5721 if (s
->vm86
&& s
->iopl
!= 3) {
5722 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5724 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5725 gen_op_set_cc_op(s
->cc_op
);
5726 gen_op_movl_T0_eflags();
5730 case 0x9d: /* popf */
5731 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_POPF
))
5733 if (s
->vm86
&& s
->iopl
!= 3) {
5734 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5739 gen_op_movl_eflags_T0_cpl0();
5741 gen_op_movw_eflags_T0_cpl0();
5744 if (s
->cpl
<= s
->iopl
) {
5746 gen_op_movl_eflags_T0_io();
5748 gen_op_movw_eflags_T0_io();
5752 gen_op_movl_eflags_T0();
5754 gen_op_movw_eflags_T0();
5759 s
->cc_op
= CC_OP_EFLAGS
;
5760 /* abort translation because TF flag may change */
5761 gen_jmp_im(s
->pc
- s
->cs_base
);
5765 case 0x9e: /* sahf */
5768 gen_op_mov_TN_reg(OT_BYTE
, 0, R_AH
);
5769 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5770 gen_op_set_cc_op(s
->cc_op
);
5771 gen_op_movb_eflags_T0();
5772 s
->cc_op
= CC_OP_EFLAGS
;
5774 case 0x9f: /* lahf */
5777 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5778 gen_op_set_cc_op(s
->cc_op
);
5779 gen_op_movl_T0_eflags();
5780 gen_op_mov_reg_T0(OT_BYTE
, R_AH
);
5782 case 0xf5: /* cmc */
5783 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5784 gen_op_set_cc_op(s
->cc_op
);
5786 s
->cc_op
= CC_OP_EFLAGS
;
5788 case 0xf8: /* clc */
5789 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5790 gen_op_set_cc_op(s
->cc_op
);
5792 s
->cc_op
= CC_OP_EFLAGS
;
5794 case 0xf9: /* stc */
5795 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5796 gen_op_set_cc_op(s
->cc_op
);
5798 s
->cc_op
= CC_OP_EFLAGS
;
5800 case 0xfc: /* cld */
5801 tcg_gen_movi_i32(cpu_tmp2_i32
, 1);
5802 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, offsetof(CPUState
, df
));
5804 case 0xfd: /* std */
5805 tcg_gen_movi_i32(cpu_tmp2_i32
, -1);
5806 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, offsetof(CPUState
, df
));
5809 /************************/
5810 /* bit operations */
5811 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5812 ot
= dflag
+ OT_WORD
;
5813 modrm
= ldub_code(s
->pc
++);
5814 op
= (modrm
>> 3) & 7;
5815 mod
= (modrm
>> 6) & 3;
5816 rm
= (modrm
& 7) | REX_B(s
);
5819 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5820 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5822 gen_op_mov_TN_reg(ot
, 0, rm
);
5825 val
= ldub_code(s
->pc
++);
5826 gen_op_movl_T1_im(val
);
5831 case 0x1a3: /* bt Gv, Ev */
5834 case 0x1ab: /* bts */
5837 case 0x1b3: /* btr */
5840 case 0x1bb: /* btc */
5843 ot
= dflag
+ OT_WORD
;
5844 modrm
= ldub_code(s
->pc
++);
5845 reg
= ((modrm
>> 3) & 7) | rex_r
;
5846 mod
= (modrm
>> 6) & 3;
5847 rm
= (modrm
& 7) | REX_B(s
);
5848 gen_op_mov_TN_reg(OT_LONG
, 1, reg
);
5850 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5851 /* specific case: we need to add a displacement */
5852 gen_exts(ot
, cpu_T
[1]);
5853 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[1], 3 + ot
);
5854 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, ot
);
5855 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
5856 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5858 gen_op_mov_TN_reg(ot
, 0, rm
);
5861 tcg_gen_andi_tl(cpu_T
[1], cpu_T
[1], (1 << (3 + ot
)) - 1);
5864 tcg_gen_shr_tl(cpu_cc_src
, cpu_T
[0], cpu_T
[1]);
5865 tcg_gen_movi_tl(cpu_cc_dst
, 0);
5868 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
5869 tcg_gen_movi_tl(cpu_tmp0
, 1);
5870 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
5871 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
5874 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
5875 tcg_gen_movi_tl(cpu_tmp0
, 1);
5876 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
5877 tcg_gen_not_tl(cpu_tmp0
, cpu_tmp0
);
5878 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
5882 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
5883 tcg_gen_movi_tl(cpu_tmp0
, 1);
5884 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
5885 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
5888 s
->cc_op
= CC_OP_SARB
+ ot
;
5891 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5893 gen_op_mov_reg_T0(ot
, rm
);
5894 tcg_gen_mov_tl(cpu_cc_src
, cpu_tmp4
);
5895 tcg_gen_movi_tl(cpu_cc_dst
, 0);
5898 case 0x1bc: /* bsf */
5899 case 0x1bd: /* bsr */
5902 ot
= dflag
+ OT_WORD
;
5903 modrm
= ldub_code(s
->pc
++);
5904 reg
= ((modrm
>> 3) & 7) | rex_r
;
5905 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5906 gen_extu(ot
, cpu_T
[0]);
5907 label1
= gen_new_label();
5908 tcg_gen_movi_tl(cpu_cc_dst
, 0);
5909 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T
[0], tcg_const_tl(0), label1
);
5911 tcg_gen_helper_1_1(helper_bsr
, cpu_T
[0], cpu_T
[0]);
5913 tcg_gen_helper_1_1(helper_bsf
, cpu_T
[0], cpu_T
[0]);
5915 gen_op_mov_reg_T0(ot
, reg
);
5916 tcg_gen_movi_tl(cpu_cc_dst
, 1);
5917 gen_set_label(label1
);
5918 tcg_gen_discard_tl(cpu_cc_src
);
5919 s
->cc_op
= CC_OP_LOGICB
+ ot
;
5922 /************************/
5924 case 0x27: /* daa */
5927 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5928 gen_op_set_cc_op(s
->cc_op
);
5929 tcg_gen_helper_0_0(helper_daa
);
5930 s
->cc_op
= CC_OP_EFLAGS
;
5932 case 0x2f: /* das */
5935 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5936 gen_op_set_cc_op(s
->cc_op
);
5937 tcg_gen_helper_0_0(helper_das
);
5938 s
->cc_op
= CC_OP_EFLAGS
;
5940 case 0x37: /* aaa */
5943 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5944 gen_op_set_cc_op(s
->cc_op
);
5945 tcg_gen_helper_0_0(helper_aaa
);
5946 s
->cc_op
= CC_OP_EFLAGS
;
5948 case 0x3f: /* aas */
5951 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5952 gen_op_set_cc_op(s
->cc_op
);
5953 tcg_gen_helper_0_0(helper_aas
);
5954 s
->cc_op
= CC_OP_EFLAGS
;
5956 case 0xd4: /* aam */
5959 val
= ldub_code(s
->pc
++);
5961 gen_exception(s
, EXCP00_DIVZ
, pc_start
- s
->cs_base
);
5963 tcg_gen_helper_0_1(helper_aam
, tcg_const_i32(val
));
5964 s
->cc_op
= CC_OP_LOGICB
;
5967 case 0xd5: /* aad */
5970 val
= ldub_code(s
->pc
++);
5971 tcg_gen_helper_0_1(helper_aad
, tcg_const_i32(val
));
5972 s
->cc_op
= CC_OP_LOGICB
;
5974 /************************/
5976 case 0x90: /* nop */
5977 /* XXX: xchg + rex handling */
5978 /* XXX: correct lock test for all insn */
5979 if (prefixes
& PREFIX_LOCK
)
5981 if (prefixes
& PREFIX_REPZ
) {
5982 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PAUSE
);
5985 case 0x9b: /* fwait */
5986 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
5987 (HF_MP_MASK
| HF_TS_MASK
)) {
5988 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5990 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5991 gen_op_set_cc_op(s
->cc_op
);
5992 gen_jmp_im(pc_start
- s
->cs_base
);
5993 tcg_gen_helper_0_0(helper_fwait
);
5996 case 0xcc: /* int3 */
5997 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5999 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6001 case 0xcd: /* int N */
6002 val
= ldub_code(s
->pc
++);
6003 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
6005 if (s
->vm86
&& s
->iopl
!= 3) {
6006 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6008 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6011 case 0xce: /* into */
6014 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
6016 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6017 gen_op_set_cc_op(s
->cc_op
);
6018 gen_jmp_im(pc_start
- s
->cs_base
);
6019 gen_op_into(s
->pc
- pc_start
);
6021 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6022 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_ICEBP
))
6025 gen_debug(s
, pc_start
- s
->cs_base
);
6028 tb_flush(cpu_single_env
);
6029 cpu_set_log(CPU_LOG_INT
| CPU_LOG_TB_IN_ASM
);
6032 case 0xfa: /* cli */
6034 if (s
->cpl
<= s
->iopl
) {
6035 tcg_gen_helper_0_0(helper_cli
);
6037 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6041 tcg_gen_helper_0_0(helper_cli
);
6043 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6047 case 0xfb: /* sti */
6049 if (s
->cpl
<= s
->iopl
) {
6051 tcg_gen_helper_0_0(helper_sti
);
6052 /* interruptions are enabled only the first insn after sti */
6053 /* If several instructions disable interrupts, only the
6055 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
6056 tcg_gen_helper_0_0(helper_set_inhibit_irq
);
6057 /* give a chance to handle pending irqs */
6058 gen_jmp_im(s
->pc
- s
->cs_base
);
6061 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6067 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6071 case 0x62: /* bound */
6074 ot
= dflag
? OT_LONG
: OT_WORD
;
6075 modrm
= ldub_code(s
->pc
++);
6076 reg
= (modrm
>> 3) & 7;
6077 mod
= (modrm
>> 6) & 3;
6080 gen_op_mov_TN_reg(ot
, 0, reg
);
6081 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6082 gen_jmp_im(pc_start
- s
->cs_base
);
6083 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6085 tcg_gen_helper_0_2(helper_boundw
, cpu_A0
, cpu_tmp2_i32
);
6087 tcg_gen_helper_0_2(helper_boundl
, cpu_A0
, cpu_tmp2_i32
);
6089 case 0x1c8 ... 0x1cf: /* bswap reg */
6090 reg
= (b
& 7) | REX_B(s
);
6091 #ifdef TARGET_X86_64
6093 gen_op_mov_TN_reg(OT_QUAD
, 0, reg
);
6094 tcg_gen_bswap_i64(cpu_T
[0], cpu_T
[0]);
6095 gen_op_mov_reg_T0(OT_QUAD
, reg
);
6099 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
6101 tmp0
= tcg_temp_new(TCG_TYPE_I32
);
6102 tcg_gen_trunc_i64_i32(tmp0
, cpu_T
[0]);
6103 tcg_gen_bswap_i32(tmp0
, tmp0
);
6104 tcg_gen_extu_i32_i64(cpu_T
[0], tmp0
);
6105 gen_op_mov_reg_T0(OT_LONG
, reg
);
6109 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
6110 tcg_gen_bswap_i32(cpu_T
[0], cpu_T
[0]);
6111 gen_op_mov_reg_T0(OT_LONG
, reg
);
6115 case 0xd6: /* salc */
6118 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6119 gen_op_set_cc_op(s
->cc_op
);
6122 case 0xe0: /* loopnz */
6123 case 0xe1: /* loopz */
6124 case 0xe2: /* loop */
6125 case 0xe3: /* jecxz */
6129 tval
= (int8_t)insn_get(s
, OT_BYTE
);
6130 next_eip
= s
->pc
- s
->cs_base
;
6135 l1
= gen_new_label();
6136 l2
= gen_new_label();
6137 l3
= gen_new_label();
6140 case 0: /* loopnz */
6142 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6143 gen_op_set_cc_op(s
->cc_op
);
6144 gen_op_add_reg_im(s
->aflag
, R_ECX
, -1);
6145 gen_op_jz_ecx(s
->aflag
, l3
);
6146 gen_compute_eflags(cpu_tmp0
);
6147 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, CC_Z
);
6149 tcg_gen_brcond_tl(TCG_COND_EQ
,
6150 cpu_tmp0
, tcg_const_tl(0), l1
);
6152 tcg_gen_brcond_tl(TCG_COND_NE
,
6153 cpu_tmp0
, tcg_const_tl(0), l1
);
6157 gen_op_add_reg_im(s
->aflag
, R_ECX
, -1);
6158 gen_op_jnz_ecx(s
->aflag
, l1
);
6162 gen_op_jz_ecx(s
->aflag
, l1
);
6167 gen_jmp_im(next_eip
);
6168 gen_op_jmp_label(l2
);
6176 case 0x130: /* wrmsr */
6177 case 0x132: /* rdmsr */
6179 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6183 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 0);
6184 tcg_gen_helper_0_0(helper_rdmsr
);
6186 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 1);
6187 tcg_gen_helper_0_0(helper_wrmsr
);
6193 case 0x131: /* rdtsc */
6194 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RDTSC
))
6196 gen_jmp_im(pc_start
- s
->cs_base
);
6197 tcg_gen_helper_0_0(helper_rdtsc
);
6199 case 0x133: /* rdpmc */
6200 gen_jmp_im(pc_start
- s
->cs_base
);
6201 tcg_gen_helper_0_0(helper_rdpmc
);
6203 case 0x134: /* sysenter */
6207 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6209 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6210 gen_op_set_cc_op(s
->cc_op
);
6211 s
->cc_op
= CC_OP_DYNAMIC
;
6213 gen_jmp_im(pc_start
- s
->cs_base
);
6214 tcg_gen_helper_0_0(helper_sysenter
);
6218 case 0x135: /* sysexit */
6222 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6224 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6225 gen_op_set_cc_op(s
->cc_op
);
6226 s
->cc_op
= CC_OP_DYNAMIC
;
6228 gen_jmp_im(pc_start
- s
->cs_base
);
6229 tcg_gen_helper_0_0(helper_sysexit
);
6233 #ifdef TARGET_X86_64
6234 case 0x105: /* syscall */
6235 /* XXX: is it usable in real mode ? */
6236 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6237 gen_op_set_cc_op(s
->cc_op
);
6238 s
->cc_op
= CC_OP_DYNAMIC
;
6240 gen_jmp_im(pc_start
- s
->cs_base
);
6241 tcg_gen_helper_0_1(helper_syscall
, tcg_const_i32(s
->pc
- pc_start
));
6244 case 0x107: /* sysret */
6246 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6248 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6249 gen_op_set_cc_op(s
->cc_op
);
6250 s
->cc_op
= CC_OP_DYNAMIC
;
6252 gen_jmp_im(pc_start
- s
->cs_base
);
6253 tcg_gen_helper_0_1(helper_sysret
, tcg_const_i32(s
->dflag
));
6254 /* condition codes are modified only in long mode */
6256 s
->cc_op
= CC_OP_EFLAGS
;
6261 case 0x1a2: /* cpuid */
6262 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CPUID
))
6264 tcg_gen_helper_0_0(helper_cpuid
);
6266 case 0xf4: /* hlt */
6268 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6270 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_HLT
))
6272 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6273 gen_op_set_cc_op(s
->cc_op
);
6274 gen_jmp_im(s
->pc
- s
->cs_base
);
6275 tcg_gen_helper_0_0(helper_hlt
);
6280 modrm
= ldub_code(s
->pc
++);
6281 mod
= (modrm
>> 6) & 3;
6282 op
= (modrm
>> 3) & 7;
6285 if (!s
->pe
|| s
->vm86
)
6287 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_READ
))
6289 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
6293 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
6296 if (!s
->pe
|| s
->vm86
)
6299 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6301 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_WRITE
))
6303 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6304 gen_jmp_im(pc_start
- s
->cs_base
);
6305 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6306 tcg_gen_helper_0_1(helper_lldt
, cpu_tmp2_i32
);
6310 if (!s
->pe
|| s
->vm86
)
6312 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_READ
))
6314 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
6318 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
6321 if (!s
->pe
|| s
->vm86
)
6324 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6326 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_WRITE
))
6328 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6329 gen_jmp_im(pc_start
- s
->cs_base
);
6330 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6331 tcg_gen_helper_0_1(helper_ltr
, cpu_tmp2_i32
);
6336 if (!s
->pe
|| s
->vm86
)
6338 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6339 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6340 gen_op_set_cc_op(s
->cc_op
);
6345 s
->cc_op
= CC_OP_EFLAGS
;
6352 modrm
= ldub_code(s
->pc
++);
6353 mod
= (modrm
>> 6) & 3;
6354 op
= (modrm
>> 3) & 7;
6360 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_GDTR_READ
))
6362 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6363 gen_op_movl_T0_env(offsetof(CPUX86State
, gdt
.limit
));
6364 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
6365 gen_add_A0_im(s
, 2);
6366 gen_op_movtl_T0_env(offsetof(CPUX86State
, gdt
.base
));
6368 gen_op_andl_T0_im(0xffffff);
6369 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6374 case 0: /* monitor */
6375 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
6378 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MONITOR
))
6380 gen_jmp_im(pc_start
- s
->cs_base
);
6381 #ifdef TARGET_X86_64
6382 if (s
->aflag
== 2) {
6383 gen_op_movq_A0_reg(R_EAX
);
6387 gen_op_movl_A0_reg(R_EAX
);
6389 gen_op_andl_A0_ffff();
6391 gen_add_A0_ds_seg(s
);
6392 tcg_gen_helper_0_1(helper_monitor
, cpu_A0
);
6395 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
6398 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6399 gen_op_set_cc_op(s
->cc_op
);
6400 s
->cc_op
= CC_OP_DYNAMIC
;
6402 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MWAIT
))
6404 gen_jmp_im(s
->pc
- s
->cs_base
);
6405 tcg_gen_helper_0_0(helper_mwait
);
6412 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IDTR_READ
))
6414 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6415 gen_op_movl_T0_env(offsetof(CPUX86State
, idt
.limit
));
6416 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
6417 gen_add_A0_im(s
, 2);
6418 gen_op_movtl_T0_env(offsetof(CPUX86State
, idt
.base
));
6420 gen_op_andl_T0_im(0xffffff);
6421 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6429 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMRUN
))
6431 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6432 gen_op_set_cc_op(s
->cc_op
);
6433 gen_jmp_im(s
->pc
- s
->cs_base
);
6434 tcg_gen_helper_0_0(helper_vmrun
);
6435 s
->cc_op
= CC_OP_EFLAGS
;
6438 case 1: /* VMMCALL */
6439 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMMCALL
))
6441 /* FIXME: cause #UD if hflags & SVM */
6442 tcg_gen_helper_0_0(helper_vmmcall
);
6444 case 2: /* VMLOAD */
6445 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMLOAD
))
6447 tcg_gen_helper_0_0(helper_vmload
);
6449 case 3: /* VMSAVE */
6450 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMSAVE
))
6452 tcg_gen_helper_0_0(helper_vmsave
);
6455 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_STGI
))
6457 tcg_gen_helper_0_0(helper_stgi
);
6460 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CLGI
))
6462 tcg_gen_helper_0_0(helper_clgi
);
6464 case 6: /* SKINIT */
6465 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SKINIT
))
6467 tcg_gen_helper_0_0(helper_skinit
);
6469 case 7: /* INVLPGA */
6470 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPGA
))
6472 tcg_gen_helper_0_0(helper_invlpga
);
6477 } else if (s
->cpl
!= 0) {
6478 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6480 if (gen_svm_check_intercept(s
, pc_start
,
6481 op
==2 ? SVM_EXIT_GDTR_WRITE
: SVM_EXIT_IDTR_WRITE
))
6483 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6484 gen_op_ld_T1_A0(OT_WORD
+ s
->mem_index
);
6485 gen_add_A0_im(s
, 2);
6486 gen_op_ld_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6488 gen_op_andl_T0_im(0xffffff);
6490 gen_op_movtl_env_T0(offsetof(CPUX86State
,gdt
.base
));
6491 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
6493 gen_op_movtl_env_T0(offsetof(CPUX86State
,idt
.base
));
6494 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
6499 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
))
6501 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
6502 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
6506 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6508 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
))
6510 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6511 tcg_gen_helper_0_1(helper_lmsw
, cpu_T
[0]);
6512 gen_jmp_im(s
->pc
- s
->cs_base
);
6516 case 7: /* invlpg */
6518 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6521 #ifdef TARGET_X86_64
6522 if (CODE64(s
) && rm
== 0) {
6524 gen_op_movtl_T0_env(offsetof(CPUX86State
,segs
[R_GS
].base
));
6525 gen_op_movtl_T1_env(offsetof(CPUX86State
,kernelgsbase
));
6526 gen_op_movtl_env_T1(offsetof(CPUX86State
,segs
[R_GS
].base
));
6527 gen_op_movtl_env_T0(offsetof(CPUX86State
,kernelgsbase
));
6534 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPG
))
6536 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6537 tcg_gen_helper_0_1(helper_invlpg
, cpu_A0
);
6538 gen_jmp_im(s
->pc
- s
->cs_base
);
6547 case 0x108: /* invd */
6548 case 0x109: /* wbinvd */
6550 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6552 if (gen_svm_check_intercept(s
, pc_start
, (b
& 2) ? SVM_EXIT_INVD
: SVM_EXIT_WBINVD
))
6557 case 0x63: /* arpl or movslS (x86_64) */
6558 #ifdef TARGET_X86_64
6561 /* d_ot is the size of destination */
6562 d_ot
= dflag
+ OT_WORD
;
6564 modrm
= ldub_code(s
->pc
++);
6565 reg
= ((modrm
>> 3) & 7) | rex_r
;
6566 mod
= (modrm
>> 6) & 3;
6567 rm
= (modrm
& 7) | REX_B(s
);
6570 gen_op_mov_TN_reg(OT_LONG
, 0, rm
);
6572 if (d_ot
== OT_QUAD
)
6573 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
6574 gen_op_mov_reg_T0(d_ot
, reg
);
6576 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6577 if (d_ot
== OT_QUAD
) {
6578 gen_op_lds_T0_A0(OT_LONG
+ s
->mem_index
);
6580 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6582 gen_op_mov_reg_T0(d_ot
, reg
);
6587 if (!s
->pe
|| s
->vm86
)
6589 ot
= dflag
? OT_LONG
: OT_WORD
;
6590 modrm
= ldub_code(s
->pc
++);
6591 reg
= (modrm
>> 3) & 7;
6592 mod
= (modrm
>> 6) & 3;
6595 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6596 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
6598 gen_op_mov_TN_reg(ot
, 0, rm
);
6600 gen_op_mov_TN_reg(ot
, 1, reg
);
6601 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6602 gen_op_set_cc_op(s
->cc_op
);
6604 s
->cc_op
= CC_OP_EFLAGS
;
6606 gen_op_st_T0_A0(ot
+ s
->mem_index
);
6608 gen_op_mov_reg_T0(ot
, rm
);
6610 gen_op_arpl_update();
6613 case 0x102: /* lar */
6614 case 0x103: /* lsl */
6615 if (!s
->pe
|| s
->vm86
)
6617 ot
= dflag
? OT_LONG
: OT_WORD
;
6618 modrm
= ldub_code(s
->pc
++);
6619 reg
= ((modrm
>> 3) & 7) | rex_r
;
6620 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
6621 gen_op_mov_TN_reg(ot
, 1, reg
);
6622 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6623 gen_op_set_cc_op(s
->cc_op
);
6628 s
->cc_op
= CC_OP_EFLAGS
;
6629 gen_op_mov_reg_T1(ot
, reg
);
6632 modrm
= ldub_code(s
->pc
++);
6633 mod
= (modrm
>> 6) & 3;
6634 op
= (modrm
>> 3) & 7;
6636 case 0: /* prefetchnta */
6637 case 1: /* prefetchnt0 */
6638 case 2: /* prefetchnt0 */
6639 case 3: /* prefetchnt0 */
6642 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6643 /* nothing more to do */
6645 default: /* nop (multi byte) */
6646 gen_nop_modrm(s
, modrm
);
6650 case 0x119 ... 0x11f: /* nop (multi byte) */
6651 modrm
= ldub_code(s
->pc
++);
6652 gen_nop_modrm(s
, modrm
);
6654 case 0x120: /* mov reg, crN */
6655 case 0x122: /* mov crN, reg */
6657 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6659 modrm
= ldub_code(s
->pc
++);
6660 if ((modrm
& 0xc0) != 0xc0)
6662 rm
= (modrm
& 7) | REX_B(s
);
6663 reg
= ((modrm
>> 3) & 7) | rex_r
;
6675 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
+ reg
);
6676 gen_op_mov_TN_reg(ot
, 0, rm
);
6677 tcg_gen_helper_0_2(helper_movl_crN_T0
,
6678 tcg_const_i32(reg
), cpu_T
[0]);
6679 gen_jmp_im(s
->pc
- s
->cs_base
);
6682 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
+ reg
);
6683 #if !defined(CONFIG_USER_ONLY)
6685 tcg_gen_helper_1_0(helper_movtl_T0_cr8
, cpu_T
[0]);
6688 gen_op_movtl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
6689 gen_op_mov_reg_T0(ot
, rm
);
6697 case 0x121: /* mov reg, drN */
6698 case 0x123: /* mov drN, reg */
6700 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6702 modrm
= ldub_code(s
->pc
++);
6703 if ((modrm
& 0xc0) != 0xc0)
6705 rm
= (modrm
& 7) | REX_B(s
);
6706 reg
= ((modrm
>> 3) & 7) | rex_r
;
6711 /* XXX: do it dynamically with CR4.DE bit */
6712 if (reg
== 4 || reg
== 5 || reg
>= 8)
6715 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_DR0
+ reg
);
6716 gen_op_mov_TN_reg(ot
, 0, rm
);
6717 tcg_gen_helper_0_2(helper_movl_drN_T0
,
6718 tcg_const_i32(reg
), cpu_T
[0]);
6719 gen_jmp_im(s
->pc
- s
->cs_base
);
6722 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_DR0
+ reg
);
6723 gen_op_movtl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
6724 gen_op_mov_reg_T0(ot
, rm
);
6728 case 0x106: /* clts */
6730 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6732 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
6733 tcg_gen_helper_0_0(helper_clts
);
6734 /* abort block because static cpu state changed */
6735 gen_jmp_im(s
->pc
- s
->cs_base
);
6739 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6740 case 0x1c3: /* MOVNTI reg, mem */
6741 if (!(s
->cpuid_features
& CPUID_SSE2
))
6743 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
6744 modrm
= ldub_code(s
->pc
++);
6745 mod
= (modrm
>> 6) & 3;
6748 reg
= ((modrm
>> 3) & 7) | rex_r
;
6749 /* generate a generic store */
6750 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
6753 modrm
= ldub_code(s
->pc
++);
6754 mod
= (modrm
>> 6) & 3;
6755 op
= (modrm
>> 3) & 7;
6757 case 0: /* fxsave */
6758 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6759 (s
->flags
& HF_EM_MASK
))
6761 if (s
->flags
& HF_TS_MASK
) {
6762 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6765 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6766 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6767 gen_op_set_cc_op(s
->cc_op
);
6768 gen_jmp_im(pc_start
- s
->cs_base
);
6769 tcg_gen_helper_0_2(helper_fxsave
,
6770 cpu_A0
, tcg_const_i32((s
->dflag
== 2)));
6772 case 1: /* fxrstor */
6773 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6774 (s
->flags
& HF_EM_MASK
))
6776 if (s
->flags
& HF_TS_MASK
) {
6777 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6780 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6781 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6782 gen_op_set_cc_op(s
->cc_op
);
6783 gen_jmp_im(pc_start
- s
->cs_base
);
6784 tcg_gen_helper_0_2(helper_fxrstor
,
6785 cpu_A0
, tcg_const_i32((s
->dflag
== 2)));
6787 case 2: /* ldmxcsr */
6788 case 3: /* stmxcsr */
6789 if (s
->flags
& HF_TS_MASK
) {
6790 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6793 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
6796 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6798 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6799 gen_op_movl_env_T0(offsetof(CPUX86State
, mxcsr
));
6801 gen_op_movl_T0_env(offsetof(CPUX86State
, mxcsr
));
6802 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
6805 case 5: /* lfence */
6806 case 6: /* mfence */
6807 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
6810 case 7: /* sfence / clflush */
6811 if ((modrm
& 0xc7) == 0xc0) {
6813 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6814 if (!(s
->cpuid_features
& CPUID_SSE
))
6818 if (!(s
->cpuid_features
& CPUID_CLFLUSH
))
6820 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6827 case 0x10d: /* 3DNow! prefetch(w) */
6828 modrm
= ldub_code(s
->pc
++);
6829 mod
= (modrm
>> 6) & 3;
6832 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6833 /* ignore for now */
6835 case 0x1aa: /* rsm */
6836 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RSM
))
6838 if (!(s
->flags
& HF_SMM_MASK
))
6840 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6841 gen_op_set_cc_op(s
->cc_op
);
6842 s
->cc_op
= CC_OP_DYNAMIC
;
6844 gen_jmp_im(s
->pc
- s
->cs_base
);
6845 tcg_gen_helper_0_0(helper_rsm
);
6848 case 0x10e ... 0x10f:
6849 /* 3DNow! instructions, ignore prefixes */
6850 s
->prefix
&= ~(PREFIX_REPZ
| PREFIX_REPNZ
| PREFIX_DATA
);
6851 case 0x110 ... 0x117:
6852 case 0x128 ... 0x12f:
6853 case 0x150 ... 0x177:
6854 case 0x17c ... 0x17f:
6856 case 0x1c4 ... 0x1c6:
6857 case 0x1d0 ... 0x1fe:
6858 gen_sse(s
, b
, pc_start
, rex_r
);
6863 /* lock generation */
6864 if (s
->prefix
& PREFIX_LOCK
)
6865 tcg_gen_helper_0_0(helper_unlock
);
6868 if (s
->prefix
& PREFIX_LOCK
)
6869 tcg_gen_helper_0_0(helper_unlock
);
6870 /* XXX: ensure that no lock was generated */
6871 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
6875 static void tcg_macro_func(TCGContext
*s
, int macro_id
, const int *dead_args
)
6880 tcg_gen_helper_0_1(helper_divl_EAX_T0
, cpu_T
[0]);
6886 void optimize_flags_init(void)
6888 #if TCG_TARGET_REG_BITS == 32
6889 assert(sizeof(CCTable
) == (1 << 3));
6891 assert(sizeof(CCTable
) == (1 << 4));
6893 tcg_set_macro_func(&tcg_ctx
, tcg_macro_func
);
6895 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
6896 #if TARGET_LONG_BITS > HOST_LONG_BITS
6897 cpu_T
[0] = tcg_global_mem_new(TCG_TYPE_TL
,
6898 TCG_AREG0
, offsetof(CPUState
, t0
), "T0");
6899 cpu_T
[1] = tcg_global_mem_new(TCG_TYPE_TL
,
6900 TCG_AREG0
, offsetof(CPUState
, t1
), "T1");
6901 cpu_A0
= tcg_global_mem_new(TCG_TYPE_TL
,
6902 TCG_AREG0
, offsetof(CPUState
, t2
), "A0");
6904 cpu_T
[0] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG1
, "T0");
6905 cpu_T
[1] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG2
, "T1");
6906 cpu_A0
= tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG3
, "A0");
6908 cpu_T3
= tcg_global_mem_new(TCG_TYPE_TL
,
6909 TCG_AREG0
, offsetof(CPUState
, t3
), "T3");
6910 #if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6911 /* XXX: must be suppressed once there are less fixed registers */
6912 cpu_tmp1_i64
= tcg_global_reg2_new_hack(TCG_TYPE_I64
, TCG_AREG1
, TCG_AREG2
, "tmp1");
6914 cpu_cc_op
= tcg_global_mem_new(TCG_TYPE_I32
,
6915 TCG_AREG0
, offsetof(CPUState
, cc_op
), "cc_op");
6916 cpu_cc_src
= tcg_global_mem_new(TCG_TYPE_TL
,
6917 TCG_AREG0
, offsetof(CPUState
, cc_src
), "cc_src");
6918 cpu_cc_dst
= tcg_global_mem_new(TCG_TYPE_TL
,
6919 TCG_AREG0
, offsetof(CPUState
, cc_dst
), "cc_dst");
6922 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6923 basic block 'tb'. If search_pc is TRUE, also generate PC
6924 information for each intermediate instruction. */
6925 static inline int gen_intermediate_code_internal(CPUState
*env
,
6926 TranslationBlock
*tb
,
6929 DisasContext dc1
, *dc
= &dc1
;
6930 target_ulong pc_ptr
;
6931 uint16_t *gen_opc_end
;
6934 target_ulong pc_start
;
6935 target_ulong cs_base
;
6937 /* generate intermediate code */
6939 cs_base
= tb
->cs_base
;
6941 cflags
= tb
->cflags
;
6943 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
6944 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
6945 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
6946 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
6948 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
6949 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
6950 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
6951 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
6952 dc
->singlestep_enabled
= env
->singlestep_enabled
;
6953 dc
->cc_op
= CC_OP_DYNAMIC
;
6954 dc
->cs_base
= cs_base
;
6956 dc
->popl_esp_hack
= 0;
6957 /* select memory access functions */
6959 if (flags
& HF_SOFTMMU_MASK
) {
6961 dc
->mem_index
= 2 * 4;
6963 dc
->mem_index
= 1 * 4;
6965 dc
->cpuid_features
= env
->cpuid_features
;
6966 dc
->cpuid_ext_features
= env
->cpuid_ext_features
;
6967 dc
->cpuid_ext2_features
= env
->cpuid_ext2_features
;
6968 #ifdef TARGET_X86_64
6969 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
6970 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
6973 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
6974 (flags
& HF_INHIBIT_IRQ_MASK
)
6975 #ifndef CONFIG_SOFTMMU
6976 || (flags
& HF_SOFTMMU_MASK
)
6980 /* check addseg logic */
6981 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
6982 printf("ERROR addseg\n");
6985 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
6986 #if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
6987 cpu_tmp1_i64
= tcg_temp_new(TCG_TYPE_I64
);
6989 cpu_tmp2_i32
= tcg_temp_new(TCG_TYPE_I32
);
6990 cpu_tmp3_i32
= tcg_temp_new(TCG_TYPE_I32
);
6991 cpu_tmp4
= tcg_temp_new(TCG_TYPE_TL
);
6992 cpu_tmp5
= tcg_temp_new(TCG_TYPE_TL
);
6993 cpu_tmp6
= tcg_temp_new(TCG_TYPE_TL
);
6994 cpu_ptr0
= tcg_temp_new(TCG_TYPE_PTR
);
6995 cpu_ptr1
= tcg_temp_new(TCG_TYPE_PTR
);
6997 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
6999 dc
->is_jmp
= DISAS_NEXT
;
7004 if (env
->nb_breakpoints
> 0) {
7005 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
7006 if (env
->breakpoints
[j
] == pc_ptr
) {
7007 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
7013 j
= gen_opc_ptr
- gen_opc_buf
;
7017 gen_opc_instr_start
[lj
++] = 0;
7019 gen_opc_pc
[lj
] = pc_ptr
;
7020 gen_opc_cc_op
[lj
] = dc
->cc_op
;
7021 gen_opc_instr_start
[lj
] = 1;
7023 pc_ptr
= disas_insn(dc
, pc_ptr
);
7024 /* stop translation if indicated */
7027 /* if single step mode, we generate only one instruction and
7028 generate an exception */
7029 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7030 the flag and abort the translation to give the irqs a
7031 change to be happen */
7032 if (dc
->tf
|| dc
->singlestep_enabled
||
7033 (flags
& HF_INHIBIT_IRQ_MASK
) ||
7034 (cflags
& CF_SINGLE_INSN
)) {
7035 gen_jmp_im(pc_ptr
- dc
->cs_base
);
7039 /* if too long translation, stop generation too */
7040 if (gen_opc_ptr
>= gen_opc_end
||
7041 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
7042 gen_jmp_im(pc_ptr
- dc
->cs_base
);
7047 *gen_opc_ptr
= INDEX_op_end
;
7048 /* we don't forget to fill the last values */
7050 j
= gen_opc_ptr
- gen_opc_buf
;
7053 gen_opc_instr_start
[lj
++] = 0;
7057 if (loglevel
& CPU_LOG_TB_CPU
) {
7058 cpu_dump_state(env
, logfile
, fprintf
, X86_DUMP_CCOP
);
7060 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
7062 fprintf(logfile
, "----------------\n");
7063 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
7064 #ifdef TARGET_X86_64
7069 disas_flags
= !dc
->code32
;
7070 target_disas(logfile
, pc_start
, pc_ptr
- pc_start
, disas_flags
);
7071 fprintf(logfile
, "\n");
7072 if (loglevel
& CPU_LOG_TB_OP_OPT
) {
7073 fprintf(logfile
, "OP before opt:\n");
7074 tcg_dump_ops(&tcg_ctx
, logfile
);
7075 fprintf(logfile
, "\n");
7081 tb
->size
= pc_ptr
- pc_start
;
7085 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
7087 return gen_intermediate_code_internal(env
, tb
, 0);
7090 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
7092 return gen_intermediate_code_internal(env
, tb
, 1);
7095 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
7096 unsigned long searched_pc
, int pc_pos
, void *puc
)
7100 if (loglevel
& CPU_LOG_TB_OP
) {
7102 fprintf(logfile
, "RESTORE:\n");
7103 for(i
= 0;i
<= pc_pos
; i
++) {
7104 if (gen_opc_instr_start
[i
]) {
7105 fprintf(logfile
, "0x%04x: " TARGET_FMT_lx
"\n", i
, gen_opc_pc
[i
]);
7108 fprintf(logfile
, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx
" cs_base=%x\n",
7109 searched_pc
, pc_pos
, gen_opc_pc
[pc_pos
] - tb
->cs_base
,
7110 (uint32_t)tb
->cs_base
);
7113 env
->eip
= gen_opc_pc
[pc_pos
] - tb
->cs_base
;
7114 cc_op
= gen_opc_cc_op
[pc_pos
];
7115 if (cc_op
!= CC_OP_DYNAMIC
)