4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env
, cpu_A0
, cpu_cc_op
, cpu_cc_src
, cpu_cc_dst
, cpu_cc_tmp
;
63 static TCGv cpu_T
[2], cpu_T3
;
64 /* local register indexes (only used inside old micro ops) */
65 static TCGv cpu_tmp0
, cpu_tmp1_i64
, cpu_tmp2_i32
, cpu_tmp3_i32
, cpu_tmp4
, cpu_ptr0
, cpu_ptr1
;
66 static TCGv cpu_tmp5
, cpu_tmp6
;
68 #include "gen-icount.h"
71 static int x86_64_hregs
;
74 typedef struct DisasContext
{
75 /* current insn context */
76 int override
; /* -1 if no override */
79 target_ulong pc
; /* pc = eip + cs_base */
80 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
81 static state change (stop translation) */
82 /* current block context */
83 target_ulong cs_base
; /* base of CS segment */
84 int pe
; /* protected mode */
85 int code32
; /* 32 bit code segment */
87 int lma
; /* long mode active */
88 int code64
; /* 64 bit code segment */
91 int ss32
; /* 32 bit stack segment */
92 int cc_op
; /* current CC operation */
93 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
94 int f_st
; /* currently unused */
95 int vm86
; /* vm86 mode */
98 int tf
; /* TF cpu flag */
99 int singlestep_enabled
; /* "hardware" single step enabled */
100 int jmp_opt
; /* use direct block chaining for direct jumps */
101 int mem_index
; /* select memory access functions */
102 uint64_t flags
; /* all execution flags */
103 struct TranslationBlock
*tb
;
104 int popl_esp_hack
; /* for correct popl with esp base handling */
105 int rip_offset
; /* only used in x86_64, but left for simplicity */
107 int cpuid_ext_features
;
108 int cpuid_ext2_features
;
109 int cpuid_ext3_features
;
112 static void gen_eob(DisasContext
*s
);
113 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
114 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
116 /* i386 arith/logic operations */
136 OP_SHL1
, /* undocumented */
160 /* I386 int registers */
161 OR_EAX
, /* MUST be even numbered */
170 OR_TMP0
= 16, /* temporary operand register */
172 OR_A0
, /* temporary register used when doing address evaluation */
175 static inline void gen_op_movl_T0_0(void)
177 tcg_gen_movi_tl(cpu_T
[0], 0);
180 static inline void gen_op_movl_T0_im(int32_t val
)
182 tcg_gen_movi_tl(cpu_T
[0], val
);
185 static inline void gen_op_movl_T0_imu(uint32_t val
)
187 tcg_gen_movi_tl(cpu_T
[0], val
);
190 static inline void gen_op_movl_T1_im(int32_t val
)
192 tcg_gen_movi_tl(cpu_T
[1], val
);
195 static inline void gen_op_movl_T1_imu(uint32_t val
)
197 tcg_gen_movi_tl(cpu_T
[1], val
);
200 static inline void gen_op_movl_A0_im(uint32_t val
)
202 tcg_gen_movi_tl(cpu_A0
, val
);
206 static inline void gen_op_movq_A0_im(int64_t val
)
208 tcg_gen_movi_tl(cpu_A0
, val
);
212 static inline void gen_movtl_T0_im(target_ulong val
)
214 tcg_gen_movi_tl(cpu_T
[0], val
);
217 static inline void gen_movtl_T1_im(target_ulong val
)
219 tcg_gen_movi_tl(cpu_T
[1], val
);
222 static inline void gen_op_andl_T0_ffff(void)
224 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
227 static inline void gen_op_andl_T0_im(uint32_t val
)
229 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], val
);
232 static inline void gen_op_movl_T0_T1(void)
234 tcg_gen_mov_tl(cpu_T
[0], cpu_T
[1]);
237 static inline void gen_op_andl_A0_ffff(void)
239 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffff);
244 #define NB_OP_SIZES 4
246 #else /* !TARGET_X86_64 */
248 #define NB_OP_SIZES 3
250 #endif /* !TARGET_X86_64 */
252 #if defined(WORDS_BIGENDIAN)
253 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
254 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
255 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
256 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
257 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
259 #define REG_B_OFFSET 0
260 #define REG_H_OFFSET 1
261 #define REG_W_OFFSET 0
262 #define REG_L_OFFSET 0
263 #define REG_LH_OFFSET 4
266 static inline void gen_op_mov_reg_v(int ot
, int reg
, TCGv t0
)
270 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
271 tcg_gen_st8_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_B_OFFSET
);
273 tcg_gen_st8_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
277 tcg_gen_st16_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
281 tcg_gen_st32_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
282 /* high part of register set to zero */
283 tcg_gen_movi_tl(cpu_tmp0
, 0);
284 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
288 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
293 tcg_gen_st32_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
299 static inline void gen_op_mov_reg_T0(int ot
, int reg
)
301 gen_op_mov_reg_v(ot
, reg
, cpu_T
[0]);
304 static inline void gen_op_mov_reg_T1(int ot
, int reg
)
306 gen_op_mov_reg_v(ot
, reg
, cpu_T
[1]);
309 static inline void gen_op_mov_reg_A0(int size
, int reg
)
313 tcg_gen_st16_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
317 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
318 /* high part of register set to zero */
319 tcg_gen_movi_tl(cpu_tmp0
, 0);
320 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
324 tcg_gen_st_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
329 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
335 static inline void gen_op_mov_v_reg(int ot
, TCGv t0
, int reg
)
339 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
342 tcg_gen_ld8u_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
347 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
352 static inline void gen_op_mov_TN_reg(int ot
, int t_index
, int reg
)
354 gen_op_mov_v_reg(ot
, cpu_T
[t_index
], reg
);
357 static inline void gen_op_movl_A0_reg(int reg
)
359 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
362 static inline void gen_op_addl_A0_im(int32_t val
)
364 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
366 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
371 static inline void gen_op_addq_A0_im(int64_t val
)
373 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
377 static void gen_add_A0_im(DisasContext
*s
, int val
)
381 gen_op_addq_A0_im(val
);
384 gen_op_addl_A0_im(val
);
387 static inline void gen_op_addl_T0_T1(void)
389 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
392 static inline void gen_op_jmp_T0(void)
394 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUState
, eip
));
397 static inline void gen_op_add_reg_im(int size
, int reg
, int32_t val
)
401 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
402 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
403 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
406 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
407 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
409 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffff);
411 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
415 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
416 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
417 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
423 static inline void gen_op_add_reg_T0(int size
, int reg
)
427 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
428 tcg_gen_add_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[0]);
429 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
432 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
433 tcg_gen_add_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[0]);
435 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffff);
437 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
441 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
442 tcg_gen_add_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[0]);
443 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
449 static inline void gen_op_set_cc_op(int32_t val
)
451 tcg_gen_movi_i32(cpu_cc_op
, val
);
454 static inline void gen_op_addl_A0_reg_sN(int shift
, int reg
)
456 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
458 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
459 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
461 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
465 static inline void gen_op_movl_A0_seg(int reg
)
467 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
) + REG_L_OFFSET
);
470 static inline void gen_op_addl_A0_seg(int reg
)
472 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
473 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
475 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
480 static inline void gen_op_movq_A0_seg(int reg
)
482 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
485 static inline void gen_op_addq_A0_seg(int reg
)
487 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
488 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
491 static inline void gen_op_movq_A0_reg(int reg
)
493 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
496 static inline void gen_op_addq_A0_reg_sN(int shift
, int reg
)
498 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
500 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
501 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
505 static inline void gen_op_lds_T0_A0(int idx
)
507 int mem_index
= (idx
>> 2) - 1;
510 tcg_gen_qemu_ld8s(cpu_T
[0], cpu_A0
, mem_index
);
513 tcg_gen_qemu_ld16s(cpu_T
[0], cpu_A0
, mem_index
);
517 tcg_gen_qemu_ld32s(cpu_T
[0], cpu_A0
, mem_index
);
522 static inline void gen_op_ld_v(int idx
, TCGv t0
, TCGv a0
)
524 int mem_index
= (idx
>> 2) - 1;
527 tcg_gen_qemu_ld8u(t0
, a0
, mem_index
);
530 tcg_gen_qemu_ld16u(t0
, a0
, mem_index
);
533 tcg_gen_qemu_ld32u(t0
, a0
, mem_index
);
537 tcg_gen_qemu_ld64(t0
, a0
, mem_index
);
542 /* XXX: always use ldu or lds */
543 static inline void gen_op_ld_T0_A0(int idx
)
545 gen_op_ld_v(idx
, cpu_T
[0], cpu_A0
);
548 static inline void gen_op_ldu_T0_A0(int idx
)
550 gen_op_ld_v(idx
, cpu_T
[0], cpu_A0
);
553 static inline void gen_op_ld_T1_A0(int idx
)
555 gen_op_ld_v(idx
, cpu_T
[1], cpu_A0
);
558 static inline void gen_op_st_v(int idx
, TCGv t0
, TCGv a0
)
560 int mem_index
= (idx
>> 2) - 1;
563 tcg_gen_qemu_st8(t0
, a0
, mem_index
);
566 tcg_gen_qemu_st16(t0
, a0
, mem_index
);
569 tcg_gen_qemu_st32(t0
, a0
, mem_index
);
573 tcg_gen_qemu_st64(t0
, a0
, mem_index
);
578 static inline void gen_op_st_T0_A0(int idx
)
580 gen_op_st_v(idx
, cpu_T
[0], cpu_A0
);
583 static inline void gen_op_st_T1_A0(int idx
)
585 gen_op_st_v(idx
, cpu_T
[1], cpu_A0
);
588 static inline void gen_jmp_im(target_ulong pc
)
590 tcg_gen_movi_tl(cpu_tmp0
, pc
);
591 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, eip
));
594 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
598 override
= s
->override
;
602 gen_op_movq_A0_seg(override
);
603 gen_op_addq_A0_reg_sN(0, R_ESI
);
605 gen_op_movq_A0_reg(R_ESI
);
611 if (s
->addseg
&& override
< 0)
614 gen_op_movl_A0_seg(override
);
615 gen_op_addl_A0_reg_sN(0, R_ESI
);
617 gen_op_movl_A0_reg(R_ESI
);
620 /* 16 address, always override */
623 gen_op_movl_A0_reg(R_ESI
);
624 gen_op_andl_A0_ffff();
625 gen_op_addl_A0_seg(override
);
629 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
633 gen_op_movq_A0_reg(R_EDI
);
638 gen_op_movl_A0_seg(R_ES
);
639 gen_op_addl_A0_reg_sN(0, R_EDI
);
641 gen_op_movl_A0_reg(R_EDI
);
644 gen_op_movl_A0_reg(R_EDI
);
645 gen_op_andl_A0_ffff();
646 gen_op_addl_A0_seg(R_ES
);
650 static inline void gen_op_movl_T0_Dshift(int ot
)
652 tcg_gen_ld32s_tl(cpu_T
[0], cpu_env
, offsetof(CPUState
, df
));
653 tcg_gen_shli_tl(cpu_T
[0], cpu_T
[0], ot
);
656 static void gen_extu(int ot
, TCGv reg
)
660 tcg_gen_ext8u_tl(reg
, reg
);
663 tcg_gen_ext16u_tl(reg
, reg
);
666 tcg_gen_ext32u_tl(reg
, reg
);
673 static void gen_exts(int ot
, TCGv reg
)
677 tcg_gen_ext8s_tl(reg
, reg
);
680 tcg_gen_ext16s_tl(reg
, reg
);
683 tcg_gen_ext32s_tl(reg
, reg
);
690 static inline void gen_op_jnz_ecx(int size
, int label1
)
692 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ECX
]));
693 gen_extu(size
+ 1, cpu_tmp0
);
694 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_tmp0
, 0, label1
);
697 static inline void gen_op_jz_ecx(int size
, int label1
)
699 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ECX
]));
700 gen_extu(size
+ 1, cpu_tmp0
);
701 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, label1
);
704 static void *helper_in_func
[3] = {
710 static void *helper_out_func
[3] = {
716 static void *gen_check_io_func
[3] = {
722 static void gen_check_io(DisasContext
*s
, int ot
, target_ulong cur_eip
,
726 target_ulong next_eip
;
729 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
730 if (s
->cc_op
!= CC_OP_DYNAMIC
)
731 gen_op_set_cc_op(s
->cc_op
);
734 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
735 tcg_gen_helper_0_1(gen_check_io_func
[ot
],
738 if(s
->flags
& HF_SVMI_MASK
) {
740 if (s
->cc_op
!= CC_OP_DYNAMIC
)
741 gen_op_set_cc_op(s
->cc_op
);
745 svm_flags
|= (1 << (4 + ot
));
746 next_eip
= s
->pc
- s
->cs_base
;
747 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
748 tcg_gen_helper_0_3(helper_svm_check_io
,
750 tcg_const_i32(svm_flags
),
751 tcg_const_i32(next_eip
- cur_eip
));
755 static inline void gen_movs(DisasContext
*s
, int ot
)
757 gen_string_movl_A0_ESI(s
);
758 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
759 gen_string_movl_A0_EDI(s
);
760 gen_op_st_T0_A0(ot
+ s
->mem_index
);
761 gen_op_movl_T0_Dshift(ot
);
762 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
763 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
766 static inline void gen_update_cc_op(DisasContext
*s
)
768 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
769 gen_op_set_cc_op(s
->cc_op
);
770 s
->cc_op
= CC_OP_DYNAMIC
;
774 static void gen_op_update1_cc(void)
776 tcg_gen_discard_tl(cpu_cc_src
);
777 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
780 static void gen_op_update2_cc(void)
782 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
783 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
786 static inline void gen_op_cmpl_T0_T1_cc(void)
788 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
789 tcg_gen_sub_tl(cpu_cc_dst
, cpu_T
[0], cpu_T
[1]);
792 static inline void gen_op_testl_T0_T1_cc(void)
794 tcg_gen_discard_tl(cpu_cc_src
);
795 tcg_gen_and_tl(cpu_cc_dst
, cpu_T
[0], cpu_T
[1]);
798 static void gen_op_update_neg_cc(void)
800 tcg_gen_neg_tl(cpu_cc_src
, cpu_T
[0]);
801 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
804 /* compute eflags.C to reg */
805 static void gen_compute_eflags_c(TCGv reg
)
807 #if TCG_TARGET_REG_BITS == 32
808 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_cc_op
, 3);
809 tcg_gen_addi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
,
810 (long)cc_table
+ offsetof(CCTable
, compute_c
));
811 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0);
812 tcg_gen_call(&tcg_ctx
, cpu_tmp2_i32
, TCG_CALL_PURE
,
813 1, &cpu_tmp2_i32
, 0, NULL
);
815 tcg_gen_extu_i32_tl(cpu_tmp1_i64
, cpu_cc_op
);
816 tcg_gen_shli_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 4);
817 tcg_gen_addi_i64(cpu_tmp1_i64
, cpu_tmp1_i64
,
818 (long)cc_table
+ offsetof(CCTable
, compute_c
));
819 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 0);
820 tcg_gen_call(&tcg_ctx
, cpu_tmp1_i64
, TCG_CALL_PURE
,
821 1, &cpu_tmp2_i32
, 0, NULL
);
823 tcg_gen_extu_i32_tl(reg
, cpu_tmp2_i32
);
826 /* compute all eflags to cc_src */
827 static void gen_compute_eflags(TCGv reg
)
829 #if TCG_TARGET_REG_BITS == 32
830 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_cc_op
, 3);
831 tcg_gen_addi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
,
832 (long)cc_table
+ offsetof(CCTable
, compute_all
));
833 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0);
834 tcg_gen_call(&tcg_ctx
, cpu_tmp2_i32
, TCG_CALL_PURE
,
835 1, &cpu_tmp2_i32
, 0, NULL
);
837 tcg_gen_extu_i32_tl(cpu_tmp1_i64
, cpu_cc_op
);
838 tcg_gen_shli_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 4);
839 tcg_gen_addi_i64(cpu_tmp1_i64
, cpu_tmp1_i64
,
840 (long)cc_table
+ offsetof(CCTable
, compute_all
));
841 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_tmp1_i64
, 0);
842 tcg_gen_call(&tcg_ctx
, cpu_tmp1_i64
, TCG_CALL_PURE
,
843 1, &cpu_tmp2_i32
, 0, NULL
);
845 tcg_gen_extu_i32_tl(reg
, cpu_tmp2_i32
);
848 static inline void gen_setcc_slow_T0(DisasContext
*s
, int jcc_op
)
850 if (s
->cc_op
!= CC_OP_DYNAMIC
)
851 gen_op_set_cc_op(s
->cc_op
);
854 gen_compute_eflags(cpu_T
[0]);
855 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 11);
856 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
859 gen_compute_eflags_c(cpu_T
[0]);
862 gen_compute_eflags(cpu_T
[0]);
863 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 6);
864 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
867 gen_compute_eflags(cpu_tmp0
);
868 tcg_gen_shri_tl(cpu_T
[0], cpu_tmp0
, 6);
869 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
870 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
873 gen_compute_eflags(cpu_T
[0]);
874 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 7);
875 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
878 gen_compute_eflags(cpu_T
[0]);
879 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 2);
880 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
883 gen_compute_eflags(cpu_tmp0
);
884 tcg_gen_shri_tl(cpu_T
[0], cpu_tmp0
, 11); /* CC_O */
885 tcg_gen_shri_tl(cpu_tmp0
, cpu_tmp0
, 7); /* CC_S */
886 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
887 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
891 gen_compute_eflags(cpu_tmp0
);
892 tcg_gen_shri_tl(cpu_T
[0], cpu_tmp0
, 11); /* CC_O */
893 tcg_gen_shri_tl(cpu_tmp4
, cpu_tmp0
, 7); /* CC_S */
894 tcg_gen_shri_tl(cpu_tmp0
, cpu_tmp0
, 6); /* CC_Z */
895 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
896 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
897 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
902 /* return true if setcc_slow is not needed (WARNING: must be kept in
903 sync with gen_jcc1) */
904 static int is_fast_jcc_case(DisasContext
*s
, int b
)
907 jcc_op
= (b
>> 1) & 7;
909 /* we optimize the cmp/jcc case */
914 if (jcc_op
== JCC_O
|| jcc_op
== JCC_P
)
918 /* some jumps are easy to compute */
943 if (jcc_op
!= JCC_Z
&& jcc_op
!= JCC_S
)
953 /* generate a conditional jump to label 'l1' according to jump opcode
954 value 'b'. In the fast case, T0 is guaranted not to be used. */
955 static inline void gen_jcc1(DisasContext
*s
, int cc_op
, int b
, int l1
)
957 int inv
, jcc_op
, size
, cond
;
961 jcc_op
= (b
>> 1) & 7;
964 /* we optimize the cmp/jcc case */
970 size
= cc_op
- CC_OP_SUBB
;
976 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0xff);
980 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0xffff);
985 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0xffffffff);
993 tcg_gen_brcondi_tl(inv
? TCG_COND_NE
: TCG_COND_EQ
, t0
, 0, l1
);
999 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0x80);
1000 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
, cpu_tmp0
,
1004 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0x8000);
1005 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
, cpu_tmp0
,
1008 #ifdef TARGET_X86_64
1010 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0x80000000);
1011 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
, cpu_tmp0
,
1016 tcg_gen_brcondi_tl(inv
? TCG_COND_GE
: TCG_COND_LT
, cpu_cc_dst
,
1023 cond
= inv
? TCG_COND_GEU
: TCG_COND_LTU
;
1026 cond
= inv
? TCG_COND_GTU
: TCG_COND_LEU
;
1028 tcg_gen_add_tl(cpu_tmp4
, cpu_cc_dst
, cpu_cc_src
);
1032 tcg_gen_andi_tl(cpu_tmp4
, cpu_tmp4
, 0xff);
1033 tcg_gen_andi_tl(t0
, cpu_cc_src
, 0xff);
1037 tcg_gen_andi_tl(cpu_tmp4
, cpu_tmp4
, 0xffff);
1038 tcg_gen_andi_tl(t0
, cpu_cc_src
, 0xffff);
1040 #ifdef TARGET_X86_64
1043 tcg_gen_andi_tl(cpu_tmp4
, cpu_tmp4
, 0xffffffff);
1044 tcg_gen_andi_tl(t0
, cpu_cc_src
, 0xffffffff);
1051 tcg_gen_brcond_tl(cond
, cpu_tmp4
, t0
, l1
);
1055 cond
= inv
? TCG_COND_GE
: TCG_COND_LT
;
1058 cond
= inv
? TCG_COND_GT
: TCG_COND_LE
;
1060 tcg_gen_add_tl(cpu_tmp4
, cpu_cc_dst
, cpu_cc_src
);
1064 tcg_gen_ext8s_tl(cpu_tmp4
, cpu_tmp4
);
1065 tcg_gen_ext8s_tl(t0
, cpu_cc_src
);
1069 tcg_gen_ext16s_tl(cpu_tmp4
, cpu_tmp4
);
1070 tcg_gen_ext16s_tl(t0
, cpu_cc_src
);
1072 #ifdef TARGET_X86_64
1075 tcg_gen_ext32s_tl(cpu_tmp4
, cpu_tmp4
);
1076 tcg_gen_ext32s_tl(t0
, cpu_cc_src
);
1083 tcg_gen_brcond_tl(cond
, cpu_tmp4
, t0
, l1
);
1091 /* some jumps are easy to compute */
1133 size
= (cc_op
- CC_OP_ADDB
) & 3;
1136 size
= (cc_op
- CC_OP_ADDB
) & 3;
1144 gen_setcc_slow_T0(s
, jcc_op
);
1145 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
,
1151 /* XXX: does not work with gdbstub "ice" single step - not a
1153 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
1157 l1
= gen_new_label();
1158 l2
= gen_new_label();
1159 gen_op_jnz_ecx(s
->aflag
, l1
);
1161 gen_jmp_tb(s
, next_eip
, 1);
1166 static inline void gen_stos(DisasContext
*s
, int ot
)
1168 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1169 gen_string_movl_A0_EDI(s
);
1170 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1171 gen_op_movl_T0_Dshift(ot
);
1172 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1175 static inline void gen_lods(DisasContext
*s
, int ot
)
1177 gen_string_movl_A0_ESI(s
);
1178 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1179 gen_op_mov_reg_T0(ot
, R_EAX
);
1180 gen_op_movl_T0_Dshift(ot
);
1181 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
1184 static inline void gen_scas(DisasContext
*s
, int ot
)
1186 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1187 gen_string_movl_A0_EDI(s
);
1188 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1189 gen_op_cmpl_T0_T1_cc();
1190 gen_op_movl_T0_Dshift(ot
);
1191 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1194 static inline void gen_cmps(DisasContext
*s
, int ot
)
1196 gen_string_movl_A0_ESI(s
);
1197 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1198 gen_string_movl_A0_EDI(s
);
1199 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1200 gen_op_cmpl_T0_T1_cc();
1201 gen_op_movl_T0_Dshift(ot
);
1202 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
1203 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1206 static inline void gen_ins(DisasContext
*s
, int ot
)
1210 gen_string_movl_A0_EDI(s
);
1211 /* Note: we must do this dummy write first to be restartable in
1212 case of page fault. */
1214 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1215 gen_op_mov_TN_reg(OT_WORD
, 1, R_EDX
);
1216 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[1]);
1217 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
1218 tcg_gen_helper_1_1(helper_in_func
[ot
], cpu_T
[0], cpu_tmp2_i32
);
1219 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1220 gen_op_movl_T0_Dshift(ot
);
1221 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1226 static inline void gen_outs(DisasContext
*s
, int ot
)
1230 gen_string_movl_A0_ESI(s
);
1231 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1233 gen_op_mov_TN_reg(OT_WORD
, 1, R_EDX
);
1234 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[1]);
1235 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
1236 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[0]);
1237 tcg_gen_helper_0_2(helper_out_func
[ot
], cpu_tmp2_i32
, cpu_tmp3_i32
);
1239 gen_op_movl_T0_Dshift(ot
);
1240 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
1245 /* same method as Valgrind : we generate jumps to current or next
1247 #define GEN_REPZ(op) \
1248 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1249 target_ulong cur_eip, target_ulong next_eip) \
1252 gen_update_cc_op(s); \
1253 l2 = gen_jz_ecx_string(s, next_eip); \
1254 gen_ ## op(s, ot); \
1255 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1256 /* a loop would cause two single step exceptions if ECX = 1 \
1257 before rep string_insn */ \
1259 gen_op_jz_ecx(s->aflag, l2); \
1260 gen_jmp(s, cur_eip); \
1263 #define GEN_REPZ2(op) \
1264 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1265 target_ulong cur_eip, \
1266 target_ulong next_eip, \
1270 gen_update_cc_op(s); \
1271 l2 = gen_jz_ecx_string(s, next_eip); \
1272 gen_ ## op(s, ot); \
1273 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1274 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1275 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1277 gen_op_jz_ecx(s->aflag, l2); \
1278 gen_jmp(s, cur_eip); \
1289 static void *helper_fp_arith_ST0_FT0
[8] = {
1290 helper_fadd_ST0_FT0
,
1291 helper_fmul_ST0_FT0
,
1292 helper_fcom_ST0_FT0
,
1293 helper_fcom_ST0_FT0
,
1294 helper_fsub_ST0_FT0
,
1295 helper_fsubr_ST0_FT0
,
1296 helper_fdiv_ST0_FT0
,
1297 helper_fdivr_ST0_FT0
,
1300 /* NOTE the exception in "r" op ordering */
1301 static void *helper_fp_arith_STN_ST0
[8] = {
1302 helper_fadd_STN_ST0
,
1303 helper_fmul_STN_ST0
,
1306 helper_fsubr_STN_ST0
,
1307 helper_fsub_STN_ST0
,
1308 helper_fdivr_STN_ST0
,
1309 helper_fdiv_STN_ST0
,
1312 /* if d == OR_TMP0, it means memory operand (address in A0) */
1313 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1316 gen_op_mov_TN_reg(ot
, 0, d
);
1318 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1322 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1323 gen_op_set_cc_op(s1
->cc_op
);
1324 gen_compute_eflags_c(cpu_tmp4
);
1325 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1326 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
1328 gen_op_mov_reg_T0(ot
, d
);
1330 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1331 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
1332 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1333 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp4
);
1334 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 2);
1335 tcg_gen_addi_i32(cpu_cc_op
, cpu_tmp2_i32
, CC_OP_ADDB
+ ot
);
1336 s1
->cc_op
= CC_OP_DYNAMIC
;
1339 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1340 gen_op_set_cc_op(s1
->cc_op
);
1341 gen_compute_eflags_c(cpu_tmp4
);
1342 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1343 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
1345 gen_op_mov_reg_T0(ot
, d
);
1347 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1348 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
1349 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1350 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp4
);
1351 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 2);
1352 tcg_gen_addi_i32(cpu_cc_op
, cpu_tmp2_i32
, CC_OP_SUBB
+ ot
);
1353 s1
->cc_op
= CC_OP_DYNAMIC
;
1356 gen_op_addl_T0_T1();
1358 gen_op_mov_reg_T0(ot
, d
);
1360 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1361 gen_op_update2_cc();
1362 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1365 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1367 gen_op_mov_reg_T0(ot
, d
);
1369 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1370 gen_op_update2_cc();
1371 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1375 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1377 gen_op_mov_reg_T0(ot
, d
);
1379 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1380 gen_op_update1_cc();
1381 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1384 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1386 gen_op_mov_reg_T0(ot
, d
);
1388 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1389 gen_op_update1_cc();
1390 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1393 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1395 gen_op_mov_reg_T0(ot
, d
);
1397 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1398 gen_op_update1_cc();
1399 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1402 gen_op_cmpl_T0_T1_cc();
1403 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1408 /* if d == OR_TMP0, it means memory operand (address in A0) */
1409 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1412 gen_op_mov_TN_reg(ot
, 0, d
);
1414 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1415 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1416 gen_op_set_cc_op(s1
->cc_op
);
1418 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], 1);
1419 s1
->cc_op
= CC_OP_INCB
+ ot
;
1421 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], -1);
1422 s1
->cc_op
= CC_OP_DECB
+ ot
;
1425 gen_op_mov_reg_T0(ot
, d
);
1427 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1428 gen_compute_eflags_c(cpu_cc_src
);
1429 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1432 static void gen_shift_rm_T1(DisasContext
*s
, int ot
, int op1
,
1433 int is_right
, int is_arith
)
1446 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1448 gen_op_mov_TN_reg(ot
, 0, op1
);
1450 tcg_gen_andi_tl(cpu_T
[1], cpu_T
[1], mask
);
1452 tcg_gen_addi_tl(cpu_tmp5
, cpu_T
[1], -1);
1456 gen_exts(ot
, cpu_T
[0]);
1457 tcg_gen_sar_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1458 tcg_gen_sar_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1460 gen_extu(ot
, cpu_T
[0]);
1461 tcg_gen_shr_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1462 tcg_gen_shr_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1465 tcg_gen_shl_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1466 tcg_gen_shl_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1471 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1473 gen_op_mov_reg_T0(ot
, op1
);
1475 /* update eflags if non zero shift */
1476 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1477 gen_op_set_cc_op(s
->cc_op
);
1479 /* XXX: inefficient */
1480 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
1481 t1
= tcg_temp_local_new(TCG_TYPE_TL
);
1483 tcg_gen_mov_tl(t0
, cpu_T
[0]);
1484 tcg_gen_mov_tl(t1
, cpu_T3
);
1486 shift_label
= gen_new_label();
1487 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_T
[1], 0, shift_label
);
1489 tcg_gen_mov_tl(cpu_cc_src
, t1
);
1490 tcg_gen_mov_tl(cpu_cc_dst
, t0
);
1492 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SARB
+ ot
);
1494 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SHLB
+ ot
);
1496 gen_set_label(shift_label
);
1497 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1503 static void gen_shift_rm_im(DisasContext
*s
, int ot
, int op1
, int op2
,
1504 int is_right
, int is_arith
)
1515 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1517 gen_op_mov_TN_reg(ot
, 0, op1
);
1523 gen_exts(ot
, cpu_T
[0]);
1524 tcg_gen_sari_tl(cpu_tmp4
, cpu_T
[0], op2
- 1);
1525 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], op2
);
1527 gen_extu(ot
, cpu_T
[0]);
1528 tcg_gen_shri_tl(cpu_tmp4
, cpu_T
[0], op2
- 1);
1529 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], op2
);
1532 tcg_gen_shli_tl(cpu_tmp4
, cpu_T
[0], op2
- 1);
1533 tcg_gen_shli_tl(cpu_T
[0], cpu_T
[0], op2
);
1539 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1541 gen_op_mov_reg_T0(ot
, op1
);
1543 /* update eflags if non zero shift */
1545 tcg_gen_mov_tl(cpu_cc_src
, cpu_tmp4
);
1546 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1548 s
->cc_op
= CC_OP_SARB
+ ot
;
1550 s
->cc_op
= CC_OP_SHLB
+ ot
;
1554 static inline void tcg_gen_lshift(TCGv ret
, TCGv arg1
, target_long arg2
)
1557 tcg_gen_shli_tl(ret
, arg1
, arg2
);
1559 tcg_gen_shri_tl(ret
, arg1
, -arg2
);
1562 /* XXX: add faster immediate case */
1563 static void gen_rot_rm_T1(DisasContext
*s
, int ot
, int op1
,
1567 int label1
, label2
, data_bits
;
1568 TCGv t0
, t1
, t2
, a0
;
1570 /* XXX: inefficient, but we must use local temps */
1571 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
1572 t1
= tcg_temp_local_new(TCG_TYPE_TL
);
1573 t2
= tcg_temp_local_new(TCG_TYPE_TL
);
1574 a0
= tcg_temp_local_new(TCG_TYPE_TL
);
1582 if (op1
== OR_TMP0
) {
1583 tcg_gen_mov_tl(a0
, cpu_A0
);
1584 gen_op_ld_v(ot
+ s
->mem_index
, t0
, a0
);
1586 gen_op_mov_v_reg(ot
, t0
, op1
);
1589 tcg_gen_mov_tl(t1
, cpu_T
[1]);
1591 tcg_gen_andi_tl(t1
, t1
, mask
);
1593 /* Must test zero case to avoid using undefined behaviour in TCG
1595 label1
= gen_new_label();
1596 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, label1
);
1599 tcg_gen_andi_tl(cpu_tmp0
, t1
, (1 << (3 + ot
)) - 1);
1601 tcg_gen_mov_tl(cpu_tmp0
, t1
);
1604 tcg_gen_mov_tl(t2
, t0
);
1606 data_bits
= 8 << ot
;
1607 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1608 fix TCG definition) */
1610 tcg_gen_shr_tl(cpu_tmp4
, t0
, cpu_tmp0
);
1611 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(data_bits
), cpu_tmp0
);
1612 tcg_gen_shl_tl(t0
, t0
, cpu_tmp0
);
1614 tcg_gen_shl_tl(cpu_tmp4
, t0
, cpu_tmp0
);
1615 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(data_bits
), cpu_tmp0
);
1616 tcg_gen_shr_tl(t0
, t0
, cpu_tmp0
);
1618 tcg_gen_or_tl(t0
, t0
, cpu_tmp4
);
1620 gen_set_label(label1
);
1622 if (op1
== OR_TMP0
) {
1623 gen_op_st_v(ot
+ s
->mem_index
, t0
, a0
);
1625 gen_op_mov_reg_v(ot
, op1
, t0
);
1629 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1630 gen_op_set_cc_op(s
->cc_op
);
1632 label2
= gen_new_label();
1633 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, label2
);
1635 gen_compute_eflags(cpu_cc_src
);
1636 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~(CC_O
| CC_C
));
1637 tcg_gen_xor_tl(cpu_tmp0
, t2
, t0
);
1638 tcg_gen_lshift(cpu_tmp0
, cpu_tmp0
, 11 - (data_bits
- 1));
1639 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, CC_O
);
1640 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
1642 tcg_gen_shri_tl(t0
, t0
, data_bits
- 1);
1644 tcg_gen_andi_tl(t0
, t0
, CC_C
);
1645 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, t0
);
1647 tcg_gen_discard_tl(cpu_cc_dst
);
1648 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_EFLAGS
);
1650 gen_set_label(label2
);
1651 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1659 static void *helper_rotc
[8] = {
1663 X86_64_ONLY(helper_rclq
),
1667 X86_64_ONLY(helper_rcrq
),
1670 /* XXX: add faster immediate = 1 case */
1671 static void gen_rotc_rm_T1(DisasContext
*s
, int ot
, int op1
,
1676 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1677 gen_op_set_cc_op(s
->cc_op
);
1681 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1683 gen_op_mov_TN_reg(ot
, 0, op1
);
1685 tcg_gen_helper_1_2(helper_rotc
[ot
+ (is_right
* 4)],
1686 cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1689 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1691 gen_op_mov_reg_T0(ot
, op1
);
1694 label1
= gen_new_label();
1695 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cc_tmp
, -1, label1
);
1697 tcg_gen_mov_tl(cpu_cc_src
, cpu_cc_tmp
);
1698 tcg_gen_discard_tl(cpu_cc_dst
);
1699 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_EFLAGS
);
1701 gen_set_label(label1
);
1702 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1705 /* XXX: add faster immediate case */
1706 static void gen_shiftd_rm_T1_T3(DisasContext
*s
, int ot
, int op1
,
1709 int label1
, label2
, data_bits
;
1711 TCGv t0
, t1
, t2
, a0
;
1713 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
1714 t1
= tcg_temp_local_new(TCG_TYPE_TL
);
1715 t2
= tcg_temp_local_new(TCG_TYPE_TL
);
1716 a0
= tcg_temp_local_new(TCG_TYPE_TL
);
1724 if (op1
== OR_TMP0
) {
1725 tcg_gen_mov_tl(a0
, cpu_A0
);
1726 gen_op_ld_v(ot
+ s
->mem_index
, t0
, a0
);
1728 gen_op_mov_v_reg(ot
, t0
, op1
);
1731 tcg_gen_andi_tl(cpu_T3
, cpu_T3
, mask
);
1733 tcg_gen_mov_tl(t1
, cpu_T
[1]);
1734 tcg_gen_mov_tl(t2
, cpu_T3
);
1736 /* Must test zero case to avoid using undefined behaviour in TCG
1738 label1
= gen_new_label();
1739 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, label1
);
1741 tcg_gen_addi_tl(cpu_tmp5
, t2
, -1);
1742 if (ot
== OT_WORD
) {
1743 /* Note: we implement the Intel behaviour for shift count > 16 */
1745 tcg_gen_andi_tl(t0
, t0
, 0xffff);
1746 tcg_gen_shli_tl(cpu_tmp0
, t1
, 16);
1747 tcg_gen_or_tl(t0
, t0
, cpu_tmp0
);
1748 tcg_gen_ext32u_tl(t0
, t0
);
1750 tcg_gen_shr_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1752 /* only needed if count > 16, but a test would complicate */
1753 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(32), t2
);
1754 tcg_gen_shl_tl(cpu_tmp0
, t0
, cpu_tmp5
);
1756 tcg_gen_shr_tl(t0
, t0
, t2
);
1758 tcg_gen_or_tl(t0
, t0
, cpu_tmp0
);
1760 /* XXX: not optimal */
1761 tcg_gen_andi_tl(t0
, t0
, 0xffff);
1762 tcg_gen_shli_tl(t1
, t1
, 16);
1763 tcg_gen_or_tl(t1
, t1
, t0
);
1764 tcg_gen_ext32u_tl(t1
, t1
);
1766 tcg_gen_shl_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1767 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(32), cpu_tmp5
);
1768 tcg_gen_shr_tl(cpu_tmp6
, t1
, cpu_tmp0
);
1769 tcg_gen_or_tl(cpu_tmp4
, cpu_tmp4
, cpu_tmp6
);
1771 tcg_gen_shl_tl(t0
, t0
, t2
);
1772 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(32), t2
);
1773 tcg_gen_shr_tl(t1
, t1
, cpu_tmp5
);
1774 tcg_gen_or_tl(t0
, t0
, t1
);
1777 data_bits
= 8 << ot
;
1780 tcg_gen_ext32u_tl(t0
, t0
);
1782 tcg_gen_shr_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1784 tcg_gen_shr_tl(t0
, t0
, t2
);
1785 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(data_bits
), t2
);
1786 tcg_gen_shl_tl(t1
, t1
, cpu_tmp5
);
1787 tcg_gen_or_tl(t0
, t0
, t1
);
1791 tcg_gen_ext32u_tl(t1
, t1
);
1793 tcg_gen_shl_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1795 tcg_gen_shl_tl(t0
, t0
, t2
);
1796 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(data_bits
), t2
);
1797 tcg_gen_shr_tl(t1
, t1
, cpu_tmp5
);
1798 tcg_gen_or_tl(t0
, t0
, t1
);
1801 tcg_gen_mov_tl(t1
, cpu_tmp4
);
1803 gen_set_label(label1
);
1805 if (op1
== OR_TMP0
) {
1806 gen_op_st_v(ot
+ s
->mem_index
, t0
, a0
);
1808 gen_op_mov_reg_v(ot
, op1
, t0
);
1812 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1813 gen_op_set_cc_op(s
->cc_op
);
1815 label2
= gen_new_label();
1816 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, label2
);
1818 tcg_gen_mov_tl(cpu_cc_src
, t1
);
1819 tcg_gen_mov_tl(cpu_cc_dst
, t0
);
1821 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SARB
+ ot
);
1823 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SHLB
+ ot
);
1825 gen_set_label(label2
);
1826 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1834 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1837 gen_op_mov_TN_reg(ot
, 1, s
);
1840 gen_rot_rm_T1(s1
, ot
, d
, 0);
1843 gen_rot_rm_T1(s1
, ot
, d
, 1);
1847 gen_shift_rm_T1(s1
, ot
, d
, 0, 0);
1850 gen_shift_rm_T1(s1
, ot
, d
, 1, 0);
1853 gen_shift_rm_T1(s1
, ot
, d
, 1, 1);
1856 gen_rotc_rm_T1(s1
, ot
, d
, 0);
1859 gen_rotc_rm_T1(s1
, ot
, d
, 1);
1864 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1869 gen_shift_rm_im(s1
, ot
, d
, c
, 0, 0);
1872 gen_shift_rm_im(s1
, ot
, d
, c
, 1, 0);
1875 gen_shift_rm_im(s1
, ot
, d
, c
, 1, 1);
1878 /* currently not optimized */
1879 gen_op_movl_T1_im(c
);
1880 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1885 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1893 int mod
, rm
, code
, override
, must_add_seg
;
1895 override
= s
->override
;
1896 must_add_seg
= s
->addseg
;
1899 mod
= (modrm
>> 6) & 3;
1911 code
= ldub_code(s
->pc
++);
1912 scale
= (code
>> 6) & 3;
1913 index
= ((code
>> 3) & 7) | REX_X(s
);
1920 if ((base
& 7) == 5) {
1922 disp
= (int32_t)ldl_code(s
->pc
);
1924 if (CODE64(s
) && !havesib
) {
1925 disp
+= s
->pc
+ s
->rip_offset
;
1932 disp
= (int8_t)ldub_code(s
->pc
++);
1936 disp
= ldl_code(s
->pc
);
1942 /* for correct popl handling with esp */
1943 if (base
== 4 && s
->popl_esp_hack
)
1944 disp
+= s
->popl_esp_hack
;
1945 #ifdef TARGET_X86_64
1946 if (s
->aflag
== 2) {
1947 gen_op_movq_A0_reg(base
);
1949 gen_op_addq_A0_im(disp
);
1954 gen_op_movl_A0_reg(base
);
1956 gen_op_addl_A0_im(disp
);
1959 #ifdef TARGET_X86_64
1960 if (s
->aflag
== 2) {
1961 gen_op_movq_A0_im(disp
);
1965 gen_op_movl_A0_im(disp
);
1968 /* XXX: index == 4 is always invalid */
1969 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1970 #ifdef TARGET_X86_64
1971 if (s
->aflag
== 2) {
1972 gen_op_addq_A0_reg_sN(scale
, index
);
1976 gen_op_addl_A0_reg_sN(scale
, index
);
1981 if (base
== R_EBP
|| base
== R_ESP
)
1986 #ifdef TARGET_X86_64
1987 if (s
->aflag
== 2) {
1988 gen_op_addq_A0_seg(override
);
1992 gen_op_addl_A0_seg(override
);
1999 disp
= lduw_code(s
->pc
);
2001 gen_op_movl_A0_im(disp
);
2002 rm
= 0; /* avoid SS override */
2009 disp
= (int8_t)ldub_code(s
->pc
++);
2013 disp
= lduw_code(s
->pc
);
2019 gen_op_movl_A0_reg(R_EBX
);
2020 gen_op_addl_A0_reg_sN(0, R_ESI
);
2023 gen_op_movl_A0_reg(R_EBX
);
2024 gen_op_addl_A0_reg_sN(0, R_EDI
);
2027 gen_op_movl_A0_reg(R_EBP
);
2028 gen_op_addl_A0_reg_sN(0, R_ESI
);
2031 gen_op_movl_A0_reg(R_EBP
);
2032 gen_op_addl_A0_reg_sN(0, R_EDI
);
2035 gen_op_movl_A0_reg(R_ESI
);
2038 gen_op_movl_A0_reg(R_EDI
);
2041 gen_op_movl_A0_reg(R_EBP
);
2045 gen_op_movl_A0_reg(R_EBX
);
2049 gen_op_addl_A0_im(disp
);
2050 gen_op_andl_A0_ffff();
2054 if (rm
== 2 || rm
== 3 || rm
== 6)
2059 gen_op_addl_A0_seg(override
);
2069 static void gen_nop_modrm(DisasContext
*s
, int modrm
)
2071 int mod
, rm
, base
, code
;
2073 mod
= (modrm
>> 6) & 3;
2083 code
= ldub_code(s
->pc
++);
2119 /* used for LEA and MOV AX, mem */
2120 static void gen_add_A0_ds_seg(DisasContext
*s
)
2122 int override
, must_add_seg
;
2123 must_add_seg
= s
->addseg
;
2125 if (s
->override
>= 0) {
2126 override
= s
->override
;
2132 #ifdef TARGET_X86_64
2134 gen_op_addq_A0_seg(override
);
2138 gen_op_addl_A0_seg(override
);
2143 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2145 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
2147 int mod
, rm
, opreg
, disp
;
2149 mod
= (modrm
>> 6) & 3;
2150 rm
= (modrm
& 7) | REX_B(s
);
2154 gen_op_mov_TN_reg(ot
, 0, reg
);
2155 gen_op_mov_reg_T0(ot
, rm
);
2157 gen_op_mov_TN_reg(ot
, 0, rm
);
2159 gen_op_mov_reg_T0(ot
, reg
);
2162 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
2165 gen_op_mov_TN_reg(ot
, 0, reg
);
2166 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2168 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
2170 gen_op_mov_reg_T0(ot
, reg
);
2175 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
2181 ret
= ldub_code(s
->pc
);
2185 ret
= lduw_code(s
->pc
);
2190 ret
= ldl_code(s
->pc
);
2197 static inline int insn_const_size(unsigned int ot
)
2205 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong eip
)
2207 TranslationBlock
*tb
;
2210 pc
= s
->cs_base
+ eip
;
2212 /* NOTE: we handle the case where the TB spans two pages here */
2213 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
2214 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
2215 /* jump to same page: we can use a direct jump */
2216 tcg_gen_goto_tb(tb_num
);
2218 tcg_gen_exit_tb((long)tb
+ tb_num
);
2220 /* jump to another page: currently not optimized */
2226 static inline void gen_jcc(DisasContext
*s
, int b
,
2227 target_ulong val
, target_ulong next_eip
)
2232 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2233 gen_op_set_cc_op(s
->cc_op
);
2234 s
->cc_op
= CC_OP_DYNAMIC
;
2237 l1
= gen_new_label();
2238 gen_jcc1(s
, cc_op
, b
, l1
);
2240 gen_goto_tb(s
, 0, next_eip
);
2243 gen_goto_tb(s
, 1, val
);
2247 l1
= gen_new_label();
2248 l2
= gen_new_label();
2249 gen_jcc1(s
, cc_op
, b
, l1
);
2251 gen_jmp_im(next_eip
);
2261 static void gen_setcc(DisasContext
*s
, int b
)
2263 int inv
, jcc_op
, l1
;
2266 if (is_fast_jcc_case(s
, b
)) {
2267 /* nominal case: we use a jump */
2268 /* XXX: make it faster by adding new instructions in TCG */
2269 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
2270 tcg_gen_movi_tl(t0
, 0);
2271 l1
= gen_new_label();
2272 gen_jcc1(s
, s
->cc_op
, b
^ 1, l1
);
2273 tcg_gen_movi_tl(t0
, 1);
2275 tcg_gen_mov_tl(cpu_T
[0], t0
);
2278 /* slow case: it is more efficient not to generate a jump,
2279 although it is questionnable whether this optimization is
2282 jcc_op
= (b
>> 1) & 7;
2283 gen_setcc_slow_T0(s
, jcc_op
);
2285 tcg_gen_xori_tl(cpu_T
[0], cpu_T
[0], 1);
2290 static inline void gen_op_movl_T0_seg(int seg_reg
)
2292 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
2293 offsetof(CPUX86State
,segs
[seg_reg
].selector
));
2296 static inline void gen_op_movl_seg_T0_vm(int seg_reg
)
2298 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
2299 tcg_gen_st32_tl(cpu_T
[0], cpu_env
,
2300 offsetof(CPUX86State
,segs
[seg_reg
].selector
));
2301 tcg_gen_shli_tl(cpu_T
[0], cpu_T
[0], 4);
2302 tcg_gen_st_tl(cpu_T
[0], cpu_env
,
2303 offsetof(CPUX86State
,segs
[seg_reg
].base
));
2306 /* move T0 to seg_reg and compute if the CPU state may change. Never
2307 call this function with seg_reg == R_CS */
2308 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
2310 if (s
->pe
&& !s
->vm86
) {
2311 /* XXX: optimize by finding processor state dynamically */
2312 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2313 gen_op_set_cc_op(s
->cc_op
);
2314 gen_jmp_im(cur_eip
);
2315 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
2316 tcg_gen_helper_0_2(helper_load_seg
, tcg_const_i32(seg_reg
), cpu_tmp2_i32
);
2317 /* abort translation because the addseg value may change or
2318 because ss32 may change. For R_SS, translation must always
2319 stop as a special handling must be done to disable hardware
2320 interrupts for the next instruction */
2321 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
2324 gen_op_movl_seg_T0_vm(seg_reg
);
2325 if (seg_reg
== R_SS
)
2330 static inline int svm_is_rep(int prefixes
)
2332 return ((prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) ? 8 : 0);
2336 gen_svm_check_intercept_param(DisasContext
*s
, target_ulong pc_start
,
2337 uint32_t type
, uint64_t param
)
2339 /* no SVM activated; fast case */
2340 if (likely(!(s
->flags
& HF_SVMI_MASK
)))
2342 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2343 gen_op_set_cc_op(s
->cc_op
);
2344 gen_jmp_im(pc_start
- s
->cs_base
);
2345 tcg_gen_helper_0_2(helper_svm_check_intercept_param
,
2346 tcg_const_i32(type
), tcg_const_i64(param
));
2350 gen_svm_check_intercept(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
2352 gen_svm_check_intercept_param(s
, pc_start
, type
, 0);
2355 static inline void gen_stack_update(DisasContext
*s
, int addend
)
2357 #ifdef TARGET_X86_64
2359 gen_op_add_reg_im(2, R_ESP
, addend
);
2363 gen_op_add_reg_im(1, R_ESP
, addend
);
2365 gen_op_add_reg_im(0, R_ESP
, addend
);
2369 /* generate a push. It depends on ss32, addseg and dflag */
2370 static void gen_push_T0(DisasContext
*s
)
2372 #ifdef TARGET_X86_64
2374 gen_op_movq_A0_reg(R_ESP
);
2376 gen_op_addq_A0_im(-8);
2377 gen_op_st_T0_A0(OT_QUAD
+ s
->mem_index
);
2379 gen_op_addq_A0_im(-2);
2380 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2382 gen_op_mov_reg_A0(2, R_ESP
);
2386 gen_op_movl_A0_reg(R_ESP
);
2388 gen_op_addl_A0_im(-2);
2390 gen_op_addl_A0_im(-4);
2393 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2394 gen_op_addl_A0_seg(R_SS
);
2397 gen_op_andl_A0_ffff();
2398 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2399 gen_op_addl_A0_seg(R_SS
);
2401 gen_op_st_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2402 if (s
->ss32
&& !s
->addseg
)
2403 gen_op_mov_reg_A0(1, R_ESP
);
2405 gen_op_mov_reg_T1(s
->ss32
+ 1, R_ESP
);
2409 /* generate a push. It depends on ss32, addseg and dflag */
2410 /* slower version for T1, only used for call Ev */
2411 static void gen_push_T1(DisasContext
*s
)
2413 #ifdef TARGET_X86_64
2415 gen_op_movq_A0_reg(R_ESP
);
2417 gen_op_addq_A0_im(-8);
2418 gen_op_st_T1_A0(OT_QUAD
+ s
->mem_index
);
2420 gen_op_addq_A0_im(-2);
2421 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2423 gen_op_mov_reg_A0(2, R_ESP
);
2427 gen_op_movl_A0_reg(R_ESP
);
2429 gen_op_addl_A0_im(-2);
2431 gen_op_addl_A0_im(-4);
2434 gen_op_addl_A0_seg(R_SS
);
2437 gen_op_andl_A0_ffff();
2438 gen_op_addl_A0_seg(R_SS
);
2440 gen_op_st_T1_A0(s
->dflag
+ 1 + s
->mem_index
);
2442 if (s
->ss32
&& !s
->addseg
)
2443 gen_op_mov_reg_A0(1, R_ESP
);
2445 gen_stack_update(s
, (-2) << s
->dflag
);
2449 /* two step pop is necessary for precise exceptions */
2450 static void gen_pop_T0(DisasContext
*s
)
2452 #ifdef TARGET_X86_64
2454 gen_op_movq_A0_reg(R_ESP
);
2455 gen_op_ld_T0_A0((s
->dflag
? OT_QUAD
: OT_WORD
) + s
->mem_index
);
2459 gen_op_movl_A0_reg(R_ESP
);
2462 gen_op_addl_A0_seg(R_SS
);
2464 gen_op_andl_A0_ffff();
2465 gen_op_addl_A0_seg(R_SS
);
2467 gen_op_ld_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2471 static void gen_pop_update(DisasContext
*s
)
2473 #ifdef TARGET_X86_64
2474 if (CODE64(s
) && s
->dflag
) {
2475 gen_stack_update(s
, 8);
2479 gen_stack_update(s
, 2 << s
->dflag
);
2483 static void gen_stack_A0(DisasContext
*s
)
2485 gen_op_movl_A0_reg(R_ESP
);
2487 gen_op_andl_A0_ffff();
2488 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2490 gen_op_addl_A0_seg(R_SS
);
2493 /* NOTE: wrap around in 16 bit not fully handled */
2494 static void gen_pusha(DisasContext
*s
)
2497 gen_op_movl_A0_reg(R_ESP
);
2498 gen_op_addl_A0_im(-16 << s
->dflag
);
2500 gen_op_andl_A0_ffff();
2501 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2503 gen_op_addl_A0_seg(R_SS
);
2504 for(i
= 0;i
< 8; i
++) {
2505 gen_op_mov_TN_reg(OT_LONG
, 0, 7 - i
);
2506 gen_op_st_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2507 gen_op_addl_A0_im(2 << s
->dflag
);
2509 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2512 /* NOTE: wrap around in 16 bit not fully handled */
2513 static void gen_popa(DisasContext
*s
)
2516 gen_op_movl_A0_reg(R_ESP
);
2518 gen_op_andl_A0_ffff();
2519 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2520 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], 16 << s
->dflag
);
2522 gen_op_addl_A0_seg(R_SS
);
2523 for(i
= 0;i
< 8; i
++) {
2524 /* ESP is not reloaded */
2526 gen_op_ld_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2527 gen_op_mov_reg_T0(OT_WORD
+ s
->dflag
, 7 - i
);
2529 gen_op_addl_A0_im(2 << s
->dflag
);
2531 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2534 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2539 #ifdef TARGET_X86_64
2541 ot
= s
->dflag
? OT_QUAD
: OT_WORD
;
2544 gen_op_movl_A0_reg(R_ESP
);
2545 gen_op_addq_A0_im(-opsize
);
2546 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2549 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2550 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2552 /* XXX: must save state */
2553 tcg_gen_helper_0_3(helper_enter64_level
,
2554 tcg_const_i32(level
),
2555 tcg_const_i32((ot
== OT_QUAD
)),
2558 gen_op_mov_reg_T1(ot
, R_EBP
);
2559 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], -esp_addend
+ (-opsize
* level
));
2560 gen_op_mov_reg_T1(OT_QUAD
, R_ESP
);
2564 ot
= s
->dflag
+ OT_WORD
;
2565 opsize
= 2 << s
->dflag
;
2567 gen_op_movl_A0_reg(R_ESP
);
2568 gen_op_addl_A0_im(-opsize
);
2570 gen_op_andl_A0_ffff();
2571 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2573 gen_op_addl_A0_seg(R_SS
);
2575 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2576 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2578 /* XXX: must save state */
2579 tcg_gen_helper_0_3(helper_enter_level
,
2580 tcg_const_i32(level
),
2581 tcg_const_i32(s
->dflag
),
2584 gen_op_mov_reg_T1(ot
, R_EBP
);
2585 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], -esp_addend
+ (-opsize
* level
));
2586 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2590 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2592 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2593 gen_op_set_cc_op(s
->cc_op
);
2594 gen_jmp_im(cur_eip
);
2595 tcg_gen_helper_0_1(helper_raise_exception
, tcg_const_i32(trapno
));
2599 /* an interrupt is different from an exception because of the
2601 static void gen_interrupt(DisasContext
*s
, int intno
,
2602 target_ulong cur_eip
, target_ulong next_eip
)
2604 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2605 gen_op_set_cc_op(s
->cc_op
);
2606 gen_jmp_im(cur_eip
);
2607 tcg_gen_helper_0_2(helper_raise_interrupt
,
2608 tcg_const_i32(intno
),
2609 tcg_const_i32(next_eip
- cur_eip
));
2613 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2615 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2616 gen_op_set_cc_op(s
->cc_op
);
2617 gen_jmp_im(cur_eip
);
2618 tcg_gen_helper_0_0(helper_debug
);
2622 /* generate a generic end of block. Trace exception is also generated
2624 static void gen_eob(DisasContext
*s
)
2626 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2627 gen_op_set_cc_op(s
->cc_op
);
2628 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2629 tcg_gen_helper_0_0(helper_reset_inhibit_irq
);
2631 if (s
->singlestep_enabled
) {
2632 tcg_gen_helper_0_0(helper_debug
);
2634 tcg_gen_helper_0_0(helper_single_step
);
2641 /* generate a jump to eip. No segment change must happen before as a
2642 direct call to the next block may occur */
2643 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2646 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2647 gen_op_set_cc_op(s
->cc_op
);
2648 s
->cc_op
= CC_OP_DYNAMIC
;
2650 gen_goto_tb(s
, tb_num
, eip
);
2658 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2660 gen_jmp_tb(s
, eip
, 0);
2663 static inline void gen_ldq_env_A0(int idx
, int offset
)
2665 int mem_index
= (idx
>> 2) - 1;
2666 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2667 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
);
2670 static inline void gen_stq_env_A0(int idx
, int offset
)
2672 int mem_index
= (idx
>> 2) - 1;
2673 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
);
2674 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2677 static inline void gen_ldo_env_A0(int idx
, int offset
)
2679 int mem_index
= (idx
>> 2) - 1;
2680 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2681 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2682 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2683 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_tmp0
, mem_index
);
2684 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2687 static inline void gen_sto_env_A0(int idx
, int offset
)
2689 int mem_index
= (idx
>> 2) - 1;
2690 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2691 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2692 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2693 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2694 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_tmp0
, mem_index
);
2697 static inline void gen_op_movo(int d_offset
, int s_offset
)
2699 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
);
2700 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2701 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
+ 8);
2702 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
+ 8);
2705 static inline void gen_op_movq(int d_offset
, int s_offset
)
2707 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
);
2708 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2711 static inline void gen_op_movl(int d_offset
, int s_offset
)
2713 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_env
, s_offset
);
2714 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, d_offset
);
2717 static inline void gen_op_movq_env_0(int d_offset
)
2719 tcg_gen_movi_i64(cpu_tmp1_i64
, 0);
2720 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2723 #define SSE_SPECIAL ((void *)1)
2724 #define SSE_DUMMY ((void *)2)
2726 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2727 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2728 helper_ ## x ## ss, helper_ ## x ## sd, }
2730 static void *sse_op_table1
[256][4] = {
2731 /* 3DNow! extensions */
2732 [0x0e] = { SSE_DUMMY
}, /* femms */
2733 [0x0f] = { SSE_DUMMY
}, /* pf... */
2734 /* pure SSE operations */
2735 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2736 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2737 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd, movsldup, movddup */
2738 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2739 [0x14] = { helper_punpckldq_xmm
, helper_punpcklqdq_xmm
},
2740 [0x15] = { helper_punpckhdq_xmm
, helper_punpckhqdq_xmm
},
2741 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2742 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2744 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2745 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2746 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2747 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2748 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2749 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2750 [0x2e] = { helper_ucomiss
, helper_ucomisd
},
2751 [0x2f] = { helper_comiss
, helper_comisd
},
2752 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2753 [0x51] = SSE_FOP(sqrt
),
2754 [0x52] = { helper_rsqrtps
, NULL
, helper_rsqrtss
, NULL
},
2755 [0x53] = { helper_rcpps
, NULL
, helper_rcpss
, NULL
},
2756 [0x54] = { helper_pand_xmm
, helper_pand_xmm
}, /* andps, andpd */
2757 [0x55] = { helper_pandn_xmm
, helper_pandn_xmm
}, /* andnps, andnpd */
2758 [0x56] = { helper_por_xmm
, helper_por_xmm
}, /* orps, orpd */
2759 [0x57] = { helper_pxor_xmm
, helper_pxor_xmm
}, /* xorps, xorpd */
2760 [0x58] = SSE_FOP(add
),
2761 [0x59] = SSE_FOP(mul
),
2762 [0x5a] = { helper_cvtps2pd
, helper_cvtpd2ps
,
2763 helper_cvtss2sd
, helper_cvtsd2ss
},
2764 [0x5b] = { helper_cvtdq2ps
, helper_cvtps2dq
, helper_cvttps2dq
},
2765 [0x5c] = SSE_FOP(sub
),
2766 [0x5d] = SSE_FOP(min
),
2767 [0x5e] = SSE_FOP(div
),
2768 [0x5f] = SSE_FOP(max
),
2770 [0xc2] = SSE_FOP(cmpeq
),
2771 [0xc6] = { helper_shufps
, helper_shufpd
},
2773 [0x38] = { SSE_SPECIAL
, SSE_SPECIAL
, NULL
, SSE_SPECIAL
}, /* SSSE3/SSE4 */
2774 [0x3a] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* SSSE3/SSE4 */
2776 /* MMX ops and their SSE extensions */
2777 [0x60] = MMX_OP2(punpcklbw
),
2778 [0x61] = MMX_OP2(punpcklwd
),
2779 [0x62] = MMX_OP2(punpckldq
),
2780 [0x63] = MMX_OP2(packsswb
),
2781 [0x64] = MMX_OP2(pcmpgtb
),
2782 [0x65] = MMX_OP2(pcmpgtw
),
2783 [0x66] = MMX_OP2(pcmpgtl
),
2784 [0x67] = MMX_OP2(packuswb
),
2785 [0x68] = MMX_OP2(punpckhbw
),
2786 [0x69] = MMX_OP2(punpckhwd
),
2787 [0x6a] = MMX_OP2(punpckhdq
),
2788 [0x6b] = MMX_OP2(packssdw
),
2789 [0x6c] = { NULL
, helper_punpcklqdq_xmm
},
2790 [0x6d] = { NULL
, helper_punpckhqdq_xmm
},
2791 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2792 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2793 [0x70] = { helper_pshufw_mmx
,
2796 helper_pshuflw_xmm
},
2797 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2798 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2799 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2800 [0x74] = MMX_OP2(pcmpeqb
),
2801 [0x75] = MMX_OP2(pcmpeqw
),
2802 [0x76] = MMX_OP2(pcmpeql
),
2803 [0x77] = { SSE_DUMMY
}, /* emms */
2804 [0x7c] = { NULL
, helper_haddpd
, NULL
, helper_haddps
},
2805 [0x7d] = { NULL
, helper_hsubpd
, NULL
, helper_hsubps
},
2806 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2807 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2808 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2809 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2810 [0xd0] = { NULL
, helper_addsubpd
, NULL
, helper_addsubps
},
2811 [0xd1] = MMX_OP2(psrlw
),
2812 [0xd2] = MMX_OP2(psrld
),
2813 [0xd3] = MMX_OP2(psrlq
),
2814 [0xd4] = MMX_OP2(paddq
),
2815 [0xd5] = MMX_OP2(pmullw
),
2816 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2817 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2818 [0xd8] = MMX_OP2(psubusb
),
2819 [0xd9] = MMX_OP2(psubusw
),
2820 [0xda] = MMX_OP2(pminub
),
2821 [0xdb] = MMX_OP2(pand
),
2822 [0xdc] = MMX_OP2(paddusb
),
2823 [0xdd] = MMX_OP2(paddusw
),
2824 [0xde] = MMX_OP2(pmaxub
),
2825 [0xdf] = MMX_OP2(pandn
),
2826 [0xe0] = MMX_OP2(pavgb
),
2827 [0xe1] = MMX_OP2(psraw
),
2828 [0xe2] = MMX_OP2(psrad
),
2829 [0xe3] = MMX_OP2(pavgw
),
2830 [0xe4] = MMX_OP2(pmulhuw
),
2831 [0xe5] = MMX_OP2(pmulhw
),
2832 [0xe6] = { NULL
, helper_cvttpd2dq
, helper_cvtdq2pd
, helper_cvtpd2dq
},
2833 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2834 [0xe8] = MMX_OP2(psubsb
),
2835 [0xe9] = MMX_OP2(psubsw
),
2836 [0xea] = MMX_OP2(pminsw
),
2837 [0xeb] = MMX_OP2(por
),
2838 [0xec] = MMX_OP2(paddsb
),
2839 [0xed] = MMX_OP2(paddsw
),
2840 [0xee] = MMX_OP2(pmaxsw
),
2841 [0xef] = MMX_OP2(pxor
),
2842 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu */
2843 [0xf1] = MMX_OP2(psllw
),
2844 [0xf2] = MMX_OP2(pslld
),
2845 [0xf3] = MMX_OP2(psllq
),
2846 [0xf4] = MMX_OP2(pmuludq
),
2847 [0xf5] = MMX_OP2(pmaddwd
),
2848 [0xf6] = MMX_OP2(psadbw
),
2849 [0xf7] = MMX_OP2(maskmov
),
2850 [0xf8] = MMX_OP2(psubb
),
2851 [0xf9] = MMX_OP2(psubw
),
2852 [0xfa] = MMX_OP2(psubl
),
2853 [0xfb] = MMX_OP2(psubq
),
2854 [0xfc] = MMX_OP2(paddb
),
2855 [0xfd] = MMX_OP2(paddw
),
2856 [0xfe] = MMX_OP2(paddl
),
2859 static void *sse_op_table2
[3 * 8][2] = {
2860 [0 + 2] = MMX_OP2(psrlw
),
2861 [0 + 4] = MMX_OP2(psraw
),
2862 [0 + 6] = MMX_OP2(psllw
),
2863 [8 + 2] = MMX_OP2(psrld
),
2864 [8 + 4] = MMX_OP2(psrad
),
2865 [8 + 6] = MMX_OP2(pslld
),
2866 [16 + 2] = MMX_OP2(psrlq
),
2867 [16 + 3] = { NULL
, helper_psrldq_xmm
},
2868 [16 + 6] = MMX_OP2(psllq
),
2869 [16 + 7] = { NULL
, helper_pslldq_xmm
},
2872 static void *sse_op_table3
[4 * 3] = {
2875 X86_64_ONLY(helper_cvtsq2ss
),
2876 X86_64_ONLY(helper_cvtsq2sd
),
2880 X86_64_ONLY(helper_cvttss2sq
),
2881 X86_64_ONLY(helper_cvttsd2sq
),
2885 X86_64_ONLY(helper_cvtss2sq
),
2886 X86_64_ONLY(helper_cvtsd2sq
),
2889 static void *sse_op_table4
[8][4] = {
2900 static void *sse_op_table5
[256] = {
2901 [0x0c] = helper_pi2fw
,
2902 [0x0d] = helper_pi2fd
,
2903 [0x1c] = helper_pf2iw
,
2904 [0x1d] = helper_pf2id
,
2905 [0x8a] = helper_pfnacc
,
2906 [0x8e] = helper_pfpnacc
,
2907 [0x90] = helper_pfcmpge
,
2908 [0x94] = helper_pfmin
,
2909 [0x96] = helper_pfrcp
,
2910 [0x97] = helper_pfrsqrt
,
2911 [0x9a] = helper_pfsub
,
2912 [0x9e] = helper_pfadd
,
2913 [0xa0] = helper_pfcmpgt
,
2914 [0xa4] = helper_pfmax
,
2915 [0xa6] = helper_movq
, /* pfrcpit1; no need to actually increase precision */
2916 [0xa7] = helper_movq
, /* pfrsqit1 */
2917 [0xaa] = helper_pfsubr
,
2918 [0xae] = helper_pfacc
,
2919 [0xb0] = helper_pfcmpeq
,
2920 [0xb4] = helper_pfmul
,
2921 [0xb6] = helper_movq
, /* pfrcpit2 */
2922 [0xb7] = helper_pmulhrw_mmx
,
2923 [0xbb] = helper_pswapd
,
2924 [0xbf] = helper_pavgb_mmx
/* pavgusb */
2927 struct sse_op_helper_s
{
2928 void *op
[2]; uint32_t ext_mask
;
2930 #define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
2931 #define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
2932 #define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
2933 #define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
2934 static struct sse_op_helper_s sse_op_table6
[256] = {
2935 [0x00] = SSSE3_OP(pshufb
),
2936 [0x01] = SSSE3_OP(phaddw
),
2937 [0x02] = SSSE3_OP(phaddd
),
2938 [0x03] = SSSE3_OP(phaddsw
),
2939 [0x04] = SSSE3_OP(pmaddubsw
),
2940 [0x05] = SSSE3_OP(phsubw
),
2941 [0x06] = SSSE3_OP(phsubd
),
2942 [0x07] = SSSE3_OP(phsubsw
),
2943 [0x08] = SSSE3_OP(psignb
),
2944 [0x09] = SSSE3_OP(psignw
),
2945 [0x0a] = SSSE3_OP(psignd
),
2946 [0x0b] = SSSE3_OP(pmulhrsw
),
2947 [0x10] = SSE41_OP(pblendvb
),
2948 [0x14] = SSE41_OP(blendvps
),
2949 [0x15] = SSE41_OP(blendvpd
),
2950 [0x17] = SSE41_OP(ptest
),
2951 [0x1c] = SSSE3_OP(pabsb
),
2952 [0x1d] = SSSE3_OP(pabsw
),
2953 [0x1e] = SSSE3_OP(pabsd
),
2954 [0x20] = SSE41_OP(pmovsxbw
),
2955 [0x21] = SSE41_OP(pmovsxbd
),
2956 [0x22] = SSE41_OP(pmovsxbq
),
2957 [0x23] = SSE41_OP(pmovsxwd
),
2958 [0x24] = SSE41_OP(pmovsxwq
),
2959 [0x25] = SSE41_OP(pmovsxdq
),
2960 [0x28] = SSE41_OP(pmuldq
),
2961 [0x29] = SSE41_OP(pcmpeqq
),
2962 [0x2a] = SSE41_SPECIAL
, /* movntqda */
2963 [0x2b] = SSE41_OP(packusdw
),
2964 [0x30] = SSE41_OP(pmovzxbw
),
2965 [0x31] = SSE41_OP(pmovzxbd
),
2966 [0x32] = SSE41_OP(pmovzxbq
),
2967 [0x33] = SSE41_OP(pmovzxwd
),
2968 [0x34] = SSE41_OP(pmovzxwq
),
2969 [0x35] = SSE41_OP(pmovzxdq
),
2970 [0x37] = SSE42_OP(pcmpgtq
),
2971 [0x38] = SSE41_OP(pminsb
),
2972 [0x39] = SSE41_OP(pminsd
),
2973 [0x3a] = SSE41_OP(pminuw
),
2974 [0x3b] = SSE41_OP(pminud
),
2975 [0x3c] = SSE41_OP(pmaxsb
),
2976 [0x3d] = SSE41_OP(pmaxsd
),
2977 [0x3e] = SSE41_OP(pmaxuw
),
2978 [0x3f] = SSE41_OP(pmaxud
),
2979 [0x40] = SSE41_OP(pmulld
),
2980 [0x41] = SSE41_OP(phminposuw
),
2983 static struct sse_op_helper_s sse_op_table7
[256] = {
2984 [0x08] = SSE41_OP(roundps
),
2985 [0x09] = SSE41_OP(roundpd
),
2986 [0x0a] = SSE41_OP(roundss
),
2987 [0x0b] = SSE41_OP(roundsd
),
2988 [0x0c] = SSE41_OP(blendps
),
2989 [0x0d] = SSE41_OP(blendpd
),
2990 [0x0e] = SSE41_OP(pblendw
),
2991 [0x0f] = SSSE3_OP(palignr
),
2992 [0x14] = SSE41_SPECIAL
, /* pextrb */
2993 [0x15] = SSE41_SPECIAL
, /* pextrw */
2994 [0x16] = SSE41_SPECIAL
, /* pextrd/pextrq */
2995 [0x17] = SSE41_SPECIAL
, /* extractps */
2996 [0x20] = SSE41_SPECIAL
, /* pinsrb */
2997 [0x21] = SSE41_SPECIAL
, /* insertps */
2998 [0x22] = SSE41_SPECIAL
, /* pinsrd/pinsrq */
2999 [0x40] = SSE41_OP(dpps
),
3000 [0x41] = SSE41_OP(dppd
),
3001 [0x42] = SSE41_OP(mpsadbw
),
3002 [0x60] = SSE42_OP(pcmpestrm
),
3003 [0x61] = SSE42_OP(pcmpestri
),
3004 [0x62] = SSE42_OP(pcmpistrm
),
3005 [0x63] = SSE42_OP(pcmpistri
),
3008 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
3010 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
3011 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
3015 if (s
->prefix
& PREFIX_DATA
)
3017 else if (s
->prefix
& PREFIX_REPZ
)
3019 else if (s
->prefix
& PREFIX_REPNZ
)
3023 sse_op2
= sse_op_table1
[b
][b1
];
3026 if ((b
<= 0x5f && b
>= 0x10) || b
== 0xc6 || b
== 0xc2) {
3036 /* simple MMX/SSE operation */
3037 if (s
->flags
& HF_TS_MASK
) {
3038 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
3041 if (s
->flags
& HF_EM_MASK
) {
3043 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
3046 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
3047 if ((b
!= 0x38 && b
!= 0x3a) || (s
->prefix
& PREFIX_DATA
))
3050 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
3053 tcg_gen_helper_0_0(helper_emms
);
3058 tcg_gen_helper_0_0(helper_emms
);
3061 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3062 the static cpu state) */
3064 tcg_gen_helper_0_0(helper_enter_mmx
);
3067 modrm
= ldub_code(s
->pc
++);
3068 reg
= ((modrm
>> 3) & 7);
3071 mod
= (modrm
>> 6) & 3;
3072 if (sse_op2
== SSE_SPECIAL
) {
3075 case 0x0e7: /* movntq */
3078 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3079 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3081 case 0x1e7: /* movntdq */
3082 case 0x02b: /* movntps */
3083 case 0x12b: /* movntps */
3084 case 0x3f0: /* lddqu */
3087 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3088 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3090 case 0x6e: /* movd mm, ea */
3091 #ifdef TARGET_X86_64
3092 if (s
->dflag
== 2) {
3093 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
3094 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3098 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
3099 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3100 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3101 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx
, cpu_ptr0
, cpu_T
[0]);
3104 case 0x16e: /* movd xmm, ea */
3105 #ifdef TARGET_X86_64
3106 if (s
->dflag
== 2) {
3107 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
3108 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3109 offsetof(CPUX86State
,xmm_regs
[reg
]));
3110 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm
, cpu_ptr0
, cpu_T
[0]);
3114 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
3115 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3116 offsetof(CPUX86State
,xmm_regs
[reg
]));
3117 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3118 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm
, cpu_ptr0
, cpu_tmp2_i32
);
3121 case 0x6f: /* movq mm, ea */
3123 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3124 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3127 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
,
3128 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3129 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
,
3130 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3133 case 0x010: /* movups */
3134 case 0x110: /* movupd */
3135 case 0x028: /* movaps */
3136 case 0x128: /* movapd */
3137 case 0x16f: /* movdqa xmm, ea */
3138 case 0x26f: /* movdqu xmm, ea */
3140 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3141 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3143 rm
= (modrm
& 7) | REX_B(s
);
3144 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
3145 offsetof(CPUX86State
,xmm_regs
[rm
]));
3148 case 0x210: /* movss xmm, ea */
3150 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3151 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3152 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3154 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
3155 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3156 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3158 rm
= (modrm
& 7) | REX_B(s
);
3159 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3160 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
3163 case 0x310: /* movsd xmm, ea */
3165 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3166 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3168 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3169 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3171 rm
= (modrm
& 7) | REX_B(s
);
3172 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3173 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3176 case 0x012: /* movlps */
3177 case 0x112: /* movlpd */
3179 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3180 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3183 rm
= (modrm
& 7) | REX_B(s
);
3184 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3185 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3188 case 0x212: /* movsldup */
3190 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3191 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3193 rm
= (modrm
& 7) | REX_B(s
);
3194 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3195 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
3196 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
3197 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(2)));
3199 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
3200 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3201 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
3202 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3204 case 0x312: /* movddup */
3206 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3207 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3209 rm
= (modrm
& 7) | REX_B(s
);
3210 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3211 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3213 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
3214 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3216 case 0x016: /* movhps */
3217 case 0x116: /* movhpd */
3219 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3220 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3223 rm
= (modrm
& 7) | REX_B(s
);
3224 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
3225 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3228 case 0x216: /* movshdup */
3230 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3231 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3233 rm
= (modrm
& 7) | REX_B(s
);
3234 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
3235 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
3236 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
3237 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
3239 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3240 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
3241 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
3242 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3244 case 0x7e: /* movd ea, mm */
3245 #ifdef TARGET_X86_64
3246 if (s
->dflag
== 2) {
3247 tcg_gen_ld_i64(cpu_T
[0], cpu_env
,
3248 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3249 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
3253 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
3254 offsetof(CPUX86State
,fpregs
[reg
].mmx
.MMX_L(0)));
3255 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
3258 case 0x17e: /* movd ea, xmm */
3259 #ifdef TARGET_X86_64
3260 if (s
->dflag
== 2) {
3261 tcg_gen_ld_i64(cpu_T
[0], cpu_env
,
3262 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3263 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
3267 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
3268 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3269 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
3272 case 0x27e: /* movq xmm, ea */
3274 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3275 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3277 rm
= (modrm
& 7) | REX_B(s
);
3278 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3279 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3281 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3283 case 0x7f: /* movq ea, mm */
3285 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3286 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3289 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
3290 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3293 case 0x011: /* movups */
3294 case 0x111: /* movupd */
3295 case 0x029: /* movaps */
3296 case 0x129: /* movapd */
3297 case 0x17f: /* movdqa ea, xmm */
3298 case 0x27f: /* movdqu ea, xmm */
3300 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3301 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3303 rm
= (modrm
& 7) | REX_B(s
);
3304 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
3305 offsetof(CPUX86State
,xmm_regs
[reg
]));
3308 case 0x211: /* movss ea, xmm */
3310 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3311 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3312 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
3314 rm
= (modrm
& 7) | REX_B(s
);
3315 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
3316 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3319 case 0x311: /* movsd ea, xmm */
3321 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3322 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3324 rm
= (modrm
& 7) | REX_B(s
);
3325 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3326 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3329 case 0x013: /* movlps */
3330 case 0x113: /* movlpd */
3332 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3333 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3338 case 0x017: /* movhps */
3339 case 0x117: /* movhpd */
3341 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3342 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3347 case 0x71: /* shift mm, im */
3350 case 0x171: /* shift xmm, im */
3353 val
= ldub_code(s
->pc
++);
3355 gen_op_movl_T0_im(val
);
3356 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3358 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
3359 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
3361 gen_op_movl_T0_im(val
);
3362 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
3364 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
3365 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
3367 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
3371 rm
= (modrm
& 7) | REX_B(s
);
3372 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3375 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3377 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op2_offset
);
3378 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op1_offset
);
3379 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3381 case 0x050: /* movmskps */
3382 rm
= (modrm
& 7) | REX_B(s
);
3383 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3384 offsetof(CPUX86State
,xmm_regs
[rm
]));
3385 tcg_gen_helper_1_1(helper_movmskps
, cpu_tmp2_i32
, cpu_ptr0
);
3386 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3387 gen_op_mov_reg_T0(OT_LONG
, reg
);
3389 case 0x150: /* movmskpd */
3390 rm
= (modrm
& 7) | REX_B(s
);
3391 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3392 offsetof(CPUX86State
,xmm_regs
[rm
]));
3393 tcg_gen_helper_1_1(helper_movmskpd
, cpu_tmp2_i32
, cpu_ptr0
);
3394 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3395 gen_op_mov_reg_T0(OT_LONG
, reg
);
3397 case 0x02a: /* cvtpi2ps */
3398 case 0x12a: /* cvtpi2pd */
3399 tcg_gen_helper_0_0(helper_enter_mmx
);
3401 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3402 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3403 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3406 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3408 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3409 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3410 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3413 tcg_gen_helper_0_2(helper_cvtpi2ps
, cpu_ptr0
, cpu_ptr1
);
3417 tcg_gen_helper_0_2(helper_cvtpi2pd
, cpu_ptr0
, cpu_ptr1
);
3421 case 0x22a: /* cvtsi2ss */
3422 case 0x32a: /* cvtsi2sd */
3423 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3424 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3425 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3426 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3427 sse_op2
= sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)];
3428 if (ot
== OT_LONG
) {
3429 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3430 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_tmp2_i32
);
3432 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_T
[0]);
3435 case 0x02c: /* cvttps2pi */
3436 case 0x12c: /* cvttpd2pi */
3437 case 0x02d: /* cvtps2pi */
3438 case 0x12d: /* cvtpd2pi */
3439 tcg_gen_helper_0_0(helper_enter_mmx
);
3441 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3442 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3443 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3445 rm
= (modrm
& 7) | REX_B(s
);
3446 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3448 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
3449 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3450 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3453 tcg_gen_helper_0_2(helper_cvttps2pi
, cpu_ptr0
, cpu_ptr1
);
3456 tcg_gen_helper_0_2(helper_cvttpd2pi
, cpu_ptr0
, cpu_ptr1
);
3459 tcg_gen_helper_0_2(helper_cvtps2pi
, cpu_ptr0
, cpu_ptr1
);
3462 tcg_gen_helper_0_2(helper_cvtpd2pi
, cpu_ptr0
, cpu_ptr1
);
3466 case 0x22c: /* cvttss2si */
3467 case 0x32c: /* cvttsd2si */
3468 case 0x22d: /* cvtss2si */
3469 case 0x32d: /* cvtsd2si */
3470 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3472 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3474 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_Q(0)));
3476 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3477 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3479 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3481 rm
= (modrm
& 7) | REX_B(s
);
3482 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3484 sse_op2
= sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
3486 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op2_offset
);
3487 if (ot
== OT_LONG
) {
3488 tcg_gen_helper_1_1(sse_op2
, cpu_tmp2_i32
, cpu_ptr0
);
3489 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3491 tcg_gen_helper_1_1(sse_op2
, cpu_T
[0], cpu_ptr0
);
3493 gen_op_mov_reg_T0(ot
, reg
);
3495 case 0xc4: /* pinsrw */
3498 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3499 val
= ldub_code(s
->pc
++);
3502 tcg_gen_st16_tl(cpu_T
[0], cpu_env
,
3503 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_W(val
)));
3506 tcg_gen_st16_tl(cpu_T
[0], cpu_env
,
3507 offsetof(CPUX86State
,fpregs
[reg
].mmx
.MMX_W(val
)));
3510 case 0xc5: /* pextrw */
3514 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3515 val
= ldub_code(s
->pc
++);
3518 rm
= (modrm
& 7) | REX_B(s
);
3519 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
,
3520 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_W(val
)));
3524 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
,
3525 offsetof(CPUX86State
,fpregs
[rm
].mmx
.MMX_W(val
)));
3527 reg
= ((modrm
>> 3) & 7) | rex_r
;
3528 gen_op_mov_reg_T0(ot
, reg
);
3530 case 0x1d6: /* movq ea, xmm */
3532 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3533 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3535 rm
= (modrm
& 7) | REX_B(s
);
3536 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3537 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3538 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3541 case 0x2d6: /* movq2dq */
3542 tcg_gen_helper_0_0(helper_enter_mmx
);
3544 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3545 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3546 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3548 case 0x3d6: /* movdq2q */
3549 tcg_gen_helper_0_0(helper_enter_mmx
);
3550 rm
= (modrm
& 7) | REX_B(s
);
3551 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
),
3552 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3554 case 0xd7: /* pmovmskb */
3559 rm
= (modrm
& 7) | REX_B(s
);
3560 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, offsetof(CPUX86State
,xmm_regs
[rm
]));
3561 tcg_gen_helper_1_1(helper_pmovmskb_xmm
, cpu_tmp2_i32
, cpu_ptr0
);
3564 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3565 tcg_gen_helper_1_1(helper_pmovmskb_mmx
, cpu_tmp2_i32
, cpu_ptr0
);
3567 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3568 reg
= ((modrm
>> 3) & 7) | rex_r
;
3569 gen_op_mov_reg_T0(OT_LONG
, reg
);
3574 modrm
= ldub_code(s
->pc
++);
3576 reg
= ((modrm
>> 3) & 7) | rex_r
;
3577 mod
= (modrm
>> 6) & 3;
3579 if (s
->prefix
& PREFIX_REPNZ
)
3582 sse_op2
= sse_op_table6
[b
].op
[b1
];
3585 if (!(s
->cpuid_ext_features
& sse_op_table6
[b
].ext_mask
))
3589 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3591 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
| REX_B(s
)]);
3593 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3594 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3596 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
3597 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
3598 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
3599 gen_ldq_env_A0(s
->mem_index
, op2_offset
+
3600 offsetof(XMMReg
, XMM_Q(0)));
3602 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
3603 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
3604 tcg_gen_qemu_ld32u(cpu_tmp2_i32
, cpu_A0
,
3605 (s
->mem_index
>> 2) - 1);
3606 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, op2_offset
+
3607 offsetof(XMMReg
, XMM_L(0)));
3609 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
3610 tcg_gen_qemu_ld16u(cpu_tmp0
, cpu_A0
,
3611 (s
->mem_index
>> 2) - 1);
3612 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, op2_offset
+
3613 offsetof(XMMReg
, XMM_W(0)));
3615 case 0x2a: /* movntqda */
3616 gen_ldo_env_A0(s
->mem_index
, op1_offset
);
3619 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3623 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3625 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3627 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3628 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3629 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3632 if (sse_op2
== SSE_SPECIAL
)
3635 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3636 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3637 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3640 s
->cc_op
= CC_OP_EFLAGS
;
3642 case 0x338: /* crc32 */
3645 modrm
= ldub_code(s
->pc
++);
3646 reg
= ((modrm
>> 3) & 7) | rex_r
;
3648 if (b
!= 0xf0 && b
!= 0xf1)
3650 if (!(s
->cpuid_ext_features
& CPUID_EXT_SSE42
))
3655 else if (b
== 0xf1 && s
->dflag
!= 2)
3656 if (s
->prefix
& PREFIX_DATA
)
3663 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
3664 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3665 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3666 tcg_gen_helper_1_3(helper_crc32
, cpu_T
[0], cpu_tmp2_i32
,
3667 cpu_T
[0], tcg_const_i32(8 << ot
));
3669 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3670 gen_op_mov_reg_T0(ot
, reg
);
3675 modrm
= ldub_code(s
->pc
++);
3677 reg
= ((modrm
>> 3) & 7) | rex_r
;
3678 mod
= (modrm
>> 6) & 3;
3680 sse_op2
= sse_op_table7
[b
].op
[b1
];
3683 if (!(s
->cpuid_ext_features
& sse_op_table7
[b
].ext_mask
))
3686 if (sse_op2
== SSE_SPECIAL
) {
3687 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3688 rm
= (modrm
& 7) | REX_B(s
);
3690 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3691 reg
= ((modrm
>> 3) & 7) | rex_r
;
3692 val
= ldub_code(s
->pc
++);
3694 case 0x14: /* pextrb */
3695 tcg_gen_ld8u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,
3696 xmm_regs
[reg
].XMM_B(val
& 15)));
3698 gen_op_mov_reg_T0(ot
, rm
);
3700 tcg_gen_qemu_st8(cpu_T
[0], cpu_A0
,
3701 (s
->mem_index
>> 2) - 1);
3703 case 0x15: /* pextrw */
3704 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,
3705 xmm_regs
[reg
].XMM_W(val
& 7)));
3707 gen_op_mov_reg_T0(ot
, rm
);
3709 tcg_gen_qemu_st16(cpu_T
[0], cpu_A0
,
3710 (s
->mem_index
>> 2) - 1);
3713 if (ot
== OT_LONG
) { /* pextrd */
3714 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_env
,
3715 offsetof(CPUX86State
,
3716 xmm_regs
[reg
].XMM_L(val
& 3)));
3718 gen_op_mov_reg_v(ot
, rm
, cpu_tmp2_i32
);
3720 tcg_gen_qemu_st32(cpu_tmp2_i32
, cpu_A0
,
3721 (s
->mem_index
>> 2) - 1);
3722 } else { /* pextrq */
3723 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
,
3724 offsetof(CPUX86State
,
3725 xmm_regs
[reg
].XMM_Q(val
& 1)));
3727 gen_op_mov_reg_v(ot
, rm
, cpu_tmp1_i64
);
3729 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
3730 (s
->mem_index
>> 2) - 1);
3733 case 0x17: /* extractps */
3734 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,
3735 xmm_regs
[reg
].XMM_L(val
& 3)));
3737 gen_op_mov_reg_T0(ot
, rm
);
3739 tcg_gen_qemu_st32(cpu_T
[0], cpu_A0
,
3740 (s
->mem_index
>> 2) - 1);
3742 case 0x20: /* pinsrb */
3744 gen_op_mov_TN_reg(OT_LONG
, 0, rm
);
3746 tcg_gen_qemu_ld8u(cpu_T
[0], cpu_A0
,
3747 (s
->mem_index
>> 2) - 1);
3748 tcg_gen_st8_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,
3749 xmm_regs
[reg
].XMM_B(val
& 15)));
3751 case 0x21: /* insertps */
3753 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_env
,
3754 offsetof(CPUX86State
,xmm_regs
[rm
]
3755 .XMM_L((val
>> 6) & 3)));
3757 tcg_gen_qemu_ld32u(cpu_tmp2_i32
, cpu_A0
,
3758 (s
->mem_index
>> 2) - 1);
3759 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
,
3760 offsetof(CPUX86State
,xmm_regs
[reg
]
3761 .XMM_L((val
>> 4) & 3)));
3763 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3764 cpu_env
, offsetof(CPUX86State
,
3765 xmm_regs
[reg
].XMM_L(0)));
3767 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3768 cpu_env
, offsetof(CPUX86State
,
3769 xmm_regs
[reg
].XMM_L(1)));
3771 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3772 cpu_env
, offsetof(CPUX86State
,
3773 xmm_regs
[reg
].XMM_L(2)));
3775 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3776 cpu_env
, offsetof(CPUX86State
,
3777 xmm_regs
[reg
].XMM_L(3)));
3780 if (ot
== OT_LONG
) { /* pinsrd */
3782 gen_op_mov_v_reg(ot
, cpu_tmp2_i32
, rm
);
3784 tcg_gen_qemu_ld32u(cpu_tmp2_i32
, cpu_A0
,
3785 (s
->mem_index
>> 2) - 1);
3786 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
,
3787 offsetof(CPUX86State
,
3788 xmm_regs
[reg
].XMM_L(val
& 3)));
3789 } else { /* pinsrq */
3791 gen_op_mov_v_reg(ot
, cpu_tmp1_i64
, rm
);
3793 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
3794 (s
->mem_index
>> 2) - 1);
3795 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
,
3796 offsetof(CPUX86State
,
3797 xmm_regs
[reg
].XMM_Q(val
& 1)));
3805 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3807 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
| REX_B(s
)]);
3809 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3810 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3811 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3814 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3816 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3818 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3819 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3820 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3823 val
= ldub_code(s
->pc
++);
3825 if ((b
& 0xfc) == 0x60) { /* pcmpXstrX */
3826 s
->cc_op
= CC_OP_EFLAGS
;
3829 /* The helper must use entire 64-bit gp registers */
3833 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3834 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3835 tcg_gen_helper_0_3(sse_op2
, cpu_ptr0
, cpu_ptr1
, tcg_const_i32(val
));
3841 /* generic MMX or SSE operation */
3843 case 0x70: /* pshufx insn */
3844 case 0xc6: /* pshufx insn */
3845 case 0xc2: /* compare insns */
3852 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3854 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3855 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3856 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f && b
!= 0x5b) ||
3858 /* specific case for SSE single instructions */
3861 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3862 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3865 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
3868 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3871 rm
= (modrm
& 7) | REX_B(s
);
3872 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3875 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3877 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3878 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3879 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3882 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3886 case 0x0f: /* 3DNow! data insns */
3887 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
3889 val
= ldub_code(s
->pc
++);
3890 sse_op2
= sse_op_table5
[val
];
3893 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3894 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3895 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3897 case 0x70: /* pshufx insn */
3898 case 0xc6: /* pshufx insn */
3899 val
= ldub_code(s
->pc
++);
3900 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3901 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3902 tcg_gen_helper_0_3(sse_op2
, cpu_ptr0
, cpu_ptr1
, tcg_const_i32(val
));
3906 val
= ldub_code(s
->pc
++);
3909 sse_op2
= sse_op_table4
[val
][b1
];
3910 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3911 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3912 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3915 /* maskmov : we must prepare A0 */
3918 #ifdef TARGET_X86_64
3919 if (s
->aflag
== 2) {
3920 gen_op_movq_A0_reg(R_EDI
);
3924 gen_op_movl_A0_reg(R_EDI
);
3926 gen_op_andl_A0_ffff();
3928 gen_add_A0_ds_seg(s
);
3930 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3931 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3932 tcg_gen_helper_0_3(sse_op2
, cpu_ptr0
, cpu_ptr1
, cpu_A0
);
3935 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3936 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3937 tcg_gen_helper_0_2(sse_op2
, cpu_ptr0
, cpu_ptr1
);
3940 if (b
== 0x2e || b
== 0x2f) {
3941 s
->cc_op
= CC_OP_EFLAGS
;
3946 /* convert one instruction. s->is_jmp is set if the translation must
3947 be stopped. Return the next pc value */
3948 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
3950 int b
, prefixes
, aflag
, dflag
;
3952 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
3953 target_ulong next_eip
, tval
;
3956 if (unlikely(loglevel
& CPU_LOG_TB_OP
))
3957 tcg_gen_debug_insn_start(pc_start
);
3965 #ifdef TARGET_X86_64
3970 s
->rip_offset
= 0; /* for relative ip address */
3972 b
= ldub_code(s
->pc
);
3974 /* check prefixes */
3975 #ifdef TARGET_X86_64
3979 prefixes
|= PREFIX_REPZ
;
3982 prefixes
|= PREFIX_REPNZ
;
3985 prefixes
|= PREFIX_LOCK
;
4006 prefixes
|= PREFIX_DATA
;
4009 prefixes
|= PREFIX_ADR
;
4013 rex_w
= (b
>> 3) & 1;
4014 rex_r
= (b
& 0x4) << 1;
4015 s
->rex_x
= (b
& 0x2) << 2;
4016 REX_B(s
) = (b
& 0x1) << 3;
4017 x86_64_hregs
= 1; /* select uniform byte register addressing */
4021 /* 0x66 is ignored if rex.w is set */
4024 if (prefixes
& PREFIX_DATA
)
4027 if (!(prefixes
& PREFIX_ADR
))
4034 prefixes
|= PREFIX_REPZ
;
4037 prefixes
|= PREFIX_REPNZ
;
4040 prefixes
|= PREFIX_LOCK
;
4061 prefixes
|= PREFIX_DATA
;
4064 prefixes
|= PREFIX_ADR
;
4067 if (prefixes
& PREFIX_DATA
)
4069 if (prefixes
& PREFIX_ADR
)
4073 s
->prefix
= prefixes
;
4077 /* lock generation */
4078 if (prefixes
& PREFIX_LOCK
)
4079 tcg_gen_helper_0_0(helper_lock
);
4081 /* now check op code */
4085 /**************************/
4086 /* extended op code */
4087 b
= ldub_code(s
->pc
++) | 0x100;
4090 /**************************/
4108 ot
= dflag
+ OT_WORD
;
4111 case 0: /* OP Ev, Gv */
4112 modrm
= ldub_code(s
->pc
++);
4113 reg
= ((modrm
>> 3) & 7) | rex_r
;
4114 mod
= (modrm
>> 6) & 3;
4115 rm
= (modrm
& 7) | REX_B(s
);
4117 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4119 } else if (op
== OP_XORL
&& rm
== reg
) {
4121 /* xor reg, reg optimisation */
4123 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4124 gen_op_mov_reg_T0(ot
, reg
);
4125 gen_op_update1_cc();
4130 gen_op_mov_TN_reg(ot
, 1, reg
);
4131 gen_op(s
, op
, ot
, opreg
);
4133 case 1: /* OP Gv, Ev */
4134 modrm
= ldub_code(s
->pc
++);
4135 mod
= (modrm
>> 6) & 3;
4136 reg
= ((modrm
>> 3) & 7) | rex_r
;
4137 rm
= (modrm
& 7) | REX_B(s
);
4139 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4140 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4141 } else if (op
== OP_XORL
&& rm
== reg
) {
4144 gen_op_mov_TN_reg(ot
, 1, rm
);
4146 gen_op(s
, op
, ot
, reg
);
4148 case 2: /* OP A, Iv */
4149 val
= insn_get(s
, ot
);
4150 gen_op_movl_T1_im(val
);
4151 gen_op(s
, op
, ot
, OR_EAX
);
4160 case 0x80: /* GRP1 */
4169 ot
= dflag
+ OT_WORD
;
4171 modrm
= ldub_code(s
->pc
++);
4172 mod
= (modrm
>> 6) & 3;
4173 rm
= (modrm
& 7) | REX_B(s
);
4174 op
= (modrm
>> 3) & 7;
4180 s
->rip_offset
= insn_const_size(ot
);
4181 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4192 val
= insn_get(s
, ot
);
4195 val
= (int8_t)insn_get(s
, OT_BYTE
);
4198 gen_op_movl_T1_im(val
);
4199 gen_op(s
, op
, ot
, opreg
);
4203 /**************************/
4204 /* inc, dec, and other misc arith */
4205 case 0x40 ... 0x47: /* inc Gv */
4206 ot
= dflag
? OT_LONG
: OT_WORD
;
4207 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
4209 case 0x48 ... 0x4f: /* dec Gv */
4210 ot
= dflag
? OT_LONG
: OT_WORD
;
4211 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
4213 case 0xf6: /* GRP3 */
4218 ot
= dflag
+ OT_WORD
;
4220 modrm
= ldub_code(s
->pc
++);
4221 mod
= (modrm
>> 6) & 3;
4222 rm
= (modrm
& 7) | REX_B(s
);
4223 op
= (modrm
>> 3) & 7;
4226 s
->rip_offset
= insn_const_size(ot
);
4227 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4228 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4230 gen_op_mov_TN_reg(ot
, 0, rm
);
4235 val
= insn_get(s
, ot
);
4236 gen_op_movl_T1_im(val
);
4237 gen_op_testl_T0_T1_cc();
4238 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4241 tcg_gen_not_tl(cpu_T
[0], cpu_T
[0]);
4243 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4245 gen_op_mov_reg_T0(ot
, rm
);
4249 tcg_gen_neg_tl(cpu_T
[0], cpu_T
[0]);
4251 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4253 gen_op_mov_reg_T0(ot
, rm
);
4255 gen_op_update_neg_cc();
4256 s
->cc_op
= CC_OP_SUBB
+ ot
;
4261 gen_op_mov_TN_reg(OT_BYTE
, 1, R_EAX
);
4262 tcg_gen_ext8u_tl(cpu_T
[0], cpu_T
[0]);
4263 tcg_gen_ext8u_tl(cpu_T
[1], cpu_T
[1]);
4264 /* XXX: use 32 bit mul which could be faster */
4265 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4266 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4267 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4268 tcg_gen_andi_tl(cpu_cc_src
, cpu_T
[0], 0xff00);
4269 s
->cc_op
= CC_OP_MULB
;
4272 gen_op_mov_TN_reg(OT_WORD
, 1, R_EAX
);
4273 tcg_gen_ext16u_tl(cpu_T
[0], cpu_T
[0]);
4274 tcg_gen_ext16u_tl(cpu_T
[1], cpu_T
[1]);
4275 /* XXX: use 32 bit mul which could be faster */
4276 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4277 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4278 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4279 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 16);
4280 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
4281 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
4282 s
->cc_op
= CC_OP_MULW
;
4286 #ifdef TARGET_X86_64
4287 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
4288 tcg_gen_ext32u_tl(cpu_T
[0], cpu_T
[0]);
4289 tcg_gen_ext32u_tl(cpu_T
[1], cpu_T
[1]);
4290 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4291 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4292 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4293 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 32);
4294 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4295 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
4299 t0
= tcg_temp_new(TCG_TYPE_I64
);
4300 t1
= tcg_temp_new(TCG_TYPE_I64
);
4301 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
4302 tcg_gen_extu_i32_i64(t0
, cpu_T
[0]);
4303 tcg_gen_extu_i32_i64(t1
, cpu_T
[1]);
4304 tcg_gen_mul_i64(t0
, t0
, t1
);
4305 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4306 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4307 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4308 tcg_gen_shri_i64(t0
, t0
, 32);
4309 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4310 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4311 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
4314 s
->cc_op
= CC_OP_MULL
;
4316 #ifdef TARGET_X86_64
4318 tcg_gen_helper_0_1(helper_mulq_EAX_T0
, cpu_T
[0]);
4319 s
->cc_op
= CC_OP_MULQ
;
4327 gen_op_mov_TN_reg(OT_BYTE
, 1, R_EAX
);
4328 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
4329 tcg_gen_ext8s_tl(cpu_T
[1], cpu_T
[1]);
4330 /* XXX: use 32 bit mul which could be faster */
4331 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4332 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4333 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4334 tcg_gen_ext8s_tl(cpu_tmp0
, cpu_T
[0]);
4335 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4336 s
->cc_op
= CC_OP_MULB
;
4339 gen_op_mov_TN_reg(OT_WORD
, 1, R_EAX
);
4340 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4341 tcg_gen_ext16s_tl(cpu_T
[1], cpu_T
[1]);
4342 /* XXX: use 32 bit mul which could be faster */
4343 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4344 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4345 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4346 tcg_gen_ext16s_tl(cpu_tmp0
, cpu_T
[0]);
4347 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4348 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 16);
4349 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
4350 s
->cc_op
= CC_OP_MULW
;
4354 #ifdef TARGET_X86_64
4355 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
4356 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4357 tcg_gen_ext32s_tl(cpu_T
[1], cpu_T
[1]);
4358 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4359 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4360 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4361 tcg_gen_ext32s_tl(cpu_tmp0
, cpu_T
[0]);
4362 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4363 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 32);
4364 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4368 t0
= tcg_temp_new(TCG_TYPE_I64
);
4369 t1
= tcg_temp_new(TCG_TYPE_I64
);
4370 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
4371 tcg_gen_ext_i32_i64(t0
, cpu_T
[0]);
4372 tcg_gen_ext_i32_i64(t1
, cpu_T
[1]);
4373 tcg_gen_mul_i64(t0
, t0
, t1
);
4374 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4375 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4376 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4377 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[0], 31);
4378 tcg_gen_shri_i64(t0
, t0
, 32);
4379 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4380 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4381 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4384 s
->cc_op
= CC_OP_MULL
;
4386 #ifdef TARGET_X86_64
4388 tcg_gen_helper_0_1(helper_imulq_EAX_T0
, cpu_T
[0]);
4389 s
->cc_op
= CC_OP_MULQ
;
4397 gen_jmp_im(pc_start
- s
->cs_base
);
4398 tcg_gen_helper_0_1(helper_divb_AL
, cpu_T
[0]);
4401 gen_jmp_im(pc_start
- s
->cs_base
);
4402 tcg_gen_helper_0_1(helper_divw_AX
, cpu_T
[0]);
4406 gen_jmp_im(pc_start
- s
->cs_base
);
4407 tcg_gen_helper_0_1(helper_divl_EAX
, cpu_T
[0]);
4409 #ifdef TARGET_X86_64
4411 gen_jmp_im(pc_start
- s
->cs_base
);
4412 tcg_gen_helper_0_1(helper_divq_EAX
, cpu_T
[0]);
4420 gen_jmp_im(pc_start
- s
->cs_base
);
4421 tcg_gen_helper_0_1(helper_idivb_AL
, cpu_T
[0]);
4424 gen_jmp_im(pc_start
- s
->cs_base
);
4425 tcg_gen_helper_0_1(helper_idivw_AX
, cpu_T
[0]);
4429 gen_jmp_im(pc_start
- s
->cs_base
);
4430 tcg_gen_helper_0_1(helper_idivl_EAX
, cpu_T
[0]);
4432 #ifdef TARGET_X86_64
4434 gen_jmp_im(pc_start
- s
->cs_base
);
4435 tcg_gen_helper_0_1(helper_idivq_EAX
, cpu_T
[0]);
4445 case 0xfe: /* GRP4 */
4446 case 0xff: /* GRP5 */
4450 ot
= dflag
+ OT_WORD
;
4452 modrm
= ldub_code(s
->pc
++);
4453 mod
= (modrm
>> 6) & 3;
4454 rm
= (modrm
& 7) | REX_B(s
);
4455 op
= (modrm
>> 3) & 7;
4456 if (op
>= 2 && b
== 0xfe) {
4460 if (op
== 2 || op
== 4) {
4461 /* operand size for jumps is 64 bit */
4463 } else if (op
== 3 || op
== 5) {
4464 /* for call calls, the operand is 16 or 32 bit, even
4466 ot
= dflag
? OT_LONG
: OT_WORD
;
4467 } else if (op
== 6) {
4468 /* default push size is 64 bit */
4469 ot
= dflag
? OT_QUAD
: OT_WORD
;
4473 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4474 if (op
>= 2 && op
!= 3 && op
!= 5)
4475 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4477 gen_op_mov_TN_reg(ot
, 0, rm
);
4481 case 0: /* inc Ev */
4486 gen_inc(s
, ot
, opreg
, 1);
4488 case 1: /* dec Ev */
4493 gen_inc(s
, ot
, opreg
, -1);
4495 case 2: /* call Ev */
4496 /* XXX: optimize if memory (no 'and' is necessary) */
4498 gen_op_andl_T0_ffff();
4499 next_eip
= s
->pc
- s
->cs_base
;
4500 gen_movtl_T1_im(next_eip
);
4505 case 3: /* lcall Ev */
4506 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4507 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4508 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4510 if (s
->pe
&& !s
->vm86
) {
4511 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4512 gen_op_set_cc_op(s
->cc_op
);
4513 gen_jmp_im(pc_start
- s
->cs_base
);
4514 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4515 tcg_gen_helper_0_4(helper_lcall_protected
,
4516 cpu_tmp2_i32
, cpu_T
[1],
4517 tcg_const_i32(dflag
),
4518 tcg_const_i32(s
->pc
- pc_start
));
4520 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4521 tcg_gen_helper_0_4(helper_lcall_real
,
4522 cpu_tmp2_i32
, cpu_T
[1],
4523 tcg_const_i32(dflag
),
4524 tcg_const_i32(s
->pc
- s
->cs_base
));
4528 case 4: /* jmp Ev */
4530 gen_op_andl_T0_ffff();
4534 case 5: /* ljmp Ev */
4535 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4536 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4537 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4539 if (s
->pe
&& !s
->vm86
) {
4540 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4541 gen_op_set_cc_op(s
->cc_op
);
4542 gen_jmp_im(pc_start
- s
->cs_base
);
4543 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4544 tcg_gen_helper_0_3(helper_ljmp_protected
,
4547 tcg_const_i32(s
->pc
- pc_start
));
4549 gen_op_movl_seg_T0_vm(R_CS
);
4550 gen_op_movl_T0_T1();
4555 case 6: /* push Ev */
4563 case 0x84: /* test Ev, Gv */
4568 ot
= dflag
+ OT_WORD
;
4570 modrm
= ldub_code(s
->pc
++);
4571 mod
= (modrm
>> 6) & 3;
4572 rm
= (modrm
& 7) | REX_B(s
);
4573 reg
= ((modrm
>> 3) & 7) | rex_r
;
4575 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4576 gen_op_mov_TN_reg(ot
, 1, reg
);
4577 gen_op_testl_T0_T1_cc();
4578 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4581 case 0xa8: /* test eAX, Iv */
4586 ot
= dflag
+ OT_WORD
;
4587 val
= insn_get(s
, ot
);
4589 gen_op_mov_TN_reg(ot
, 0, OR_EAX
);
4590 gen_op_movl_T1_im(val
);
4591 gen_op_testl_T0_T1_cc();
4592 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4595 case 0x98: /* CWDE/CBW */
4596 #ifdef TARGET_X86_64
4598 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
4599 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4600 gen_op_mov_reg_T0(OT_QUAD
, R_EAX
);
4604 gen_op_mov_TN_reg(OT_WORD
, 0, R_EAX
);
4605 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4606 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4608 gen_op_mov_TN_reg(OT_BYTE
, 0, R_EAX
);
4609 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
4610 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4613 case 0x99: /* CDQ/CWD */
4614 #ifdef TARGET_X86_64
4616 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EAX
);
4617 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 63);
4618 gen_op_mov_reg_T0(OT_QUAD
, R_EDX
);
4622 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
4623 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4624 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 31);
4625 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4627 gen_op_mov_TN_reg(OT_WORD
, 0, R_EAX
);
4628 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4629 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 15);
4630 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
4633 case 0x1af: /* imul Gv, Ev */
4634 case 0x69: /* imul Gv, Ev, I */
4636 ot
= dflag
+ OT_WORD
;
4637 modrm
= ldub_code(s
->pc
++);
4638 reg
= ((modrm
>> 3) & 7) | rex_r
;
4640 s
->rip_offset
= insn_const_size(ot
);
4643 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4645 val
= insn_get(s
, ot
);
4646 gen_op_movl_T1_im(val
);
4647 } else if (b
== 0x6b) {
4648 val
= (int8_t)insn_get(s
, OT_BYTE
);
4649 gen_op_movl_T1_im(val
);
4651 gen_op_mov_TN_reg(ot
, 1, reg
);
4654 #ifdef TARGET_X86_64
4655 if (ot
== OT_QUAD
) {
4656 tcg_gen_helper_1_2(helper_imulq_T0_T1
, cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4659 if (ot
== OT_LONG
) {
4660 #ifdef TARGET_X86_64
4661 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4662 tcg_gen_ext32s_tl(cpu_T
[1], cpu_T
[1]);
4663 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4664 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4665 tcg_gen_ext32s_tl(cpu_tmp0
, cpu_T
[0]);
4666 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4670 t0
= tcg_temp_new(TCG_TYPE_I64
);
4671 t1
= tcg_temp_new(TCG_TYPE_I64
);
4672 tcg_gen_ext_i32_i64(t0
, cpu_T
[0]);
4673 tcg_gen_ext_i32_i64(t1
, cpu_T
[1]);
4674 tcg_gen_mul_i64(t0
, t0
, t1
);
4675 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4676 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4677 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[0], 31);
4678 tcg_gen_shri_i64(t0
, t0
, 32);
4679 tcg_gen_trunc_i64_i32(cpu_T
[1], t0
);
4680 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[1], cpu_tmp0
);
4684 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4685 tcg_gen_ext16s_tl(cpu_T
[1], cpu_T
[1]);
4686 /* XXX: use 32 bit mul which could be faster */
4687 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4688 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4689 tcg_gen_ext16s_tl(cpu_tmp0
, cpu_T
[0]);
4690 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4692 gen_op_mov_reg_T0(ot
, reg
);
4693 s
->cc_op
= CC_OP_MULB
+ ot
;
4696 case 0x1c1: /* xadd Ev, Gv */
4700 ot
= dflag
+ OT_WORD
;
4701 modrm
= ldub_code(s
->pc
++);
4702 reg
= ((modrm
>> 3) & 7) | rex_r
;
4703 mod
= (modrm
>> 6) & 3;
4705 rm
= (modrm
& 7) | REX_B(s
);
4706 gen_op_mov_TN_reg(ot
, 0, reg
);
4707 gen_op_mov_TN_reg(ot
, 1, rm
);
4708 gen_op_addl_T0_T1();
4709 gen_op_mov_reg_T1(ot
, reg
);
4710 gen_op_mov_reg_T0(ot
, rm
);
4712 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4713 gen_op_mov_TN_reg(ot
, 0, reg
);
4714 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4715 gen_op_addl_T0_T1();
4716 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4717 gen_op_mov_reg_T1(ot
, reg
);
4719 gen_op_update2_cc();
4720 s
->cc_op
= CC_OP_ADDB
+ ot
;
4723 case 0x1b1: /* cmpxchg Ev, Gv */
4726 TCGv t0
, t1
, t2
, a0
;
4731 ot
= dflag
+ OT_WORD
;
4732 modrm
= ldub_code(s
->pc
++);
4733 reg
= ((modrm
>> 3) & 7) | rex_r
;
4734 mod
= (modrm
>> 6) & 3;
4735 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
4736 t1
= tcg_temp_local_new(TCG_TYPE_TL
);
4737 t2
= tcg_temp_local_new(TCG_TYPE_TL
);
4738 a0
= tcg_temp_local_new(TCG_TYPE_TL
);
4739 gen_op_mov_v_reg(ot
, t1
, reg
);
4741 rm
= (modrm
& 7) | REX_B(s
);
4742 gen_op_mov_v_reg(ot
, t0
, rm
);
4744 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4745 tcg_gen_mov_tl(a0
, cpu_A0
);
4746 gen_op_ld_v(ot
+ s
->mem_index
, t0
, a0
);
4747 rm
= 0; /* avoid warning */
4749 label1
= gen_new_label();
4750 tcg_gen_ld_tl(t2
, cpu_env
, offsetof(CPUState
, regs
[R_EAX
]));
4751 tcg_gen_sub_tl(t2
, t2
, t0
);
4753 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, label1
);
4755 label2
= gen_new_label();
4756 gen_op_mov_reg_v(ot
, R_EAX
, t0
);
4758 gen_set_label(label1
);
4759 gen_op_mov_reg_v(ot
, rm
, t1
);
4760 gen_set_label(label2
);
4762 tcg_gen_mov_tl(t1
, t0
);
4763 gen_op_mov_reg_v(ot
, R_EAX
, t0
);
4764 gen_set_label(label1
);
4766 gen_op_st_v(ot
+ s
->mem_index
, t1
, a0
);
4768 tcg_gen_mov_tl(cpu_cc_src
, t0
);
4769 tcg_gen_mov_tl(cpu_cc_dst
, t2
);
4770 s
->cc_op
= CC_OP_SUBB
+ ot
;
4777 case 0x1c7: /* cmpxchg8b */
4778 modrm
= ldub_code(s
->pc
++);
4779 mod
= (modrm
>> 6) & 3;
4780 if ((mod
== 3) || ((modrm
& 0x38) != 0x8))
4782 #ifdef TARGET_X86_64
4784 if (!(s
->cpuid_ext_features
& CPUID_EXT_CX16
))
4786 gen_jmp_im(pc_start
- s
->cs_base
);
4787 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4788 gen_op_set_cc_op(s
->cc_op
);
4789 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4790 tcg_gen_helper_0_1(helper_cmpxchg16b
, cpu_A0
);
4794 if (!(s
->cpuid_features
& CPUID_CX8
))
4796 gen_jmp_im(pc_start
- s
->cs_base
);
4797 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4798 gen_op_set_cc_op(s
->cc_op
);
4799 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4800 tcg_gen_helper_0_1(helper_cmpxchg8b
, cpu_A0
);
4802 s
->cc_op
= CC_OP_EFLAGS
;
4805 /**************************/
4807 case 0x50 ... 0x57: /* push */
4808 gen_op_mov_TN_reg(OT_LONG
, 0, (b
& 7) | REX_B(s
));
4811 case 0x58 ... 0x5f: /* pop */
4813 ot
= dflag
? OT_QUAD
: OT_WORD
;
4815 ot
= dflag
+ OT_WORD
;
4818 /* NOTE: order is important for pop %sp */
4820 gen_op_mov_reg_T0(ot
, (b
& 7) | REX_B(s
));
4822 case 0x60: /* pusha */
4827 case 0x61: /* popa */
4832 case 0x68: /* push Iv */
4835 ot
= dflag
? OT_QUAD
: OT_WORD
;
4837 ot
= dflag
+ OT_WORD
;
4840 val
= insn_get(s
, ot
);
4842 val
= (int8_t)insn_get(s
, OT_BYTE
);
4843 gen_op_movl_T0_im(val
);
4846 case 0x8f: /* pop Ev */
4848 ot
= dflag
? OT_QUAD
: OT_WORD
;
4850 ot
= dflag
+ OT_WORD
;
4852 modrm
= ldub_code(s
->pc
++);
4853 mod
= (modrm
>> 6) & 3;
4856 /* NOTE: order is important for pop %sp */
4858 rm
= (modrm
& 7) | REX_B(s
);
4859 gen_op_mov_reg_T0(ot
, rm
);
4861 /* NOTE: order is important too for MMU exceptions */
4862 s
->popl_esp_hack
= 1 << ot
;
4863 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4864 s
->popl_esp_hack
= 0;
4868 case 0xc8: /* enter */
4871 val
= lduw_code(s
->pc
);
4873 level
= ldub_code(s
->pc
++);
4874 gen_enter(s
, val
, level
);
4877 case 0xc9: /* leave */
4878 /* XXX: exception not precise (ESP is updated before potential exception) */
4880 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EBP
);
4881 gen_op_mov_reg_T0(OT_QUAD
, R_ESP
);
4882 } else if (s
->ss32
) {
4883 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
4884 gen_op_mov_reg_T0(OT_LONG
, R_ESP
);
4886 gen_op_mov_TN_reg(OT_WORD
, 0, R_EBP
);
4887 gen_op_mov_reg_T0(OT_WORD
, R_ESP
);
4891 ot
= dflag
? OT_QUAD
: OT_WORD
;
4893 ot
= dflag
+ OT_WORD
;
4895 gen_op_mov_reg_T0(ot
, R_EBP
);
4898 case 0x06: /* push es */
4899 case 0x0e: /* push cs */
4900 case 0x16: /* push ss */
4901 case 0x1e: /* push ds */
4904 gen_op_movl_T0_seg(b
>> 3);
4907 case 0x1a0: /* push fs */
4908 case 0x1a8: /* push gs */
4909 gen_op_movl_T0_seg((b
>> 3) & 7);
4912 case 0x07: /* pop es */
4913 case 0x17: /* pop ss */
4914 case 0x1f: /* pop ds */
4919 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4922 /* if reg == SS, inhibit interrupts/trace. */
4923 /* If several instructions disable interrupts, only the
4925 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4926 tcg_gen_helper_0_0(helper_set_inhibit_irq
);
4930 gen_jmp_im(s
->pc
- s
->cs_base
);
4934 case 0x1a1: /* pop fs */
4935 case 0x1a9: /* pop gs */
4937 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
4940 gen_jmp_im(s
->pc
- s
->cs_base
);
4945 /**************************/
4948 case 0x89: /* mov Gv, Ev */
4952 ot
= dflag
+ OT_WORD
;
4953 modrm
= ldub_code(s
->pc
++);
4954 reg
= ((modrm
>> 3) & 7) | rex_r
;
4956 /* generate a generic store */
4957 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
4960 case 0xc7: /* mov Ev, Iv */
4964 ot
= dflag
+ OT_WORD
;
4965 modrm
= ldub_code(s
->pc
++);
4966 mod
= (modrm
>> 6) & 3;
4968 s
->rip_offset
= insn_const_size(ot
);
4969 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4971 val
= insn_get(s
, ot
);
4972 gen_op_movl_T0_im(val
);
4974 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4976 gen_op_mov_reg_T0(ot
, (modrm
& 7) | REX_B(s
));
4979 case 0x8b: /* mov Ev, Gv */
4983 ot
= OT_WORD
+ dflag
;
4984 modrm
= ldub_code(s
->pc
++);
4985 reg
= ((modrm
>> 3) & 7) | rex_r
;
4987 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4988 gen_op_mov_reg_T0(ot
, reg
);
4990 case 0x8e: /* mov seg, Gv */
4991 modrm
= ldub_code(s
->pc
++);
4992 reg
= (modrm
>> 3) & 7;
4993 if (reg
>= 6 || reg
== R_CS
)
4995 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
4996 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4998 /* if reg == SS, inhibit interrupts/trace */
4999 /* If several instructions disable interrupts, only the
5001 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5002 tcg_gen_helper_0_0(helper_set_inhibit_irq
);
5006 gen_jmp_im(s
->pc
- s
->cs_base
);
5010 case 0x8c: /* mov Gv, seg */
5011 modrm
= ldub_code(s
->pc
++);
5012 reg
= (modrm
>> 3) & 7;
5013 mod
= (modrm
>> 6) & 3;
5016 gen_op_movl_T0_seg(reg
);
5018 ot
= OT_WORD
+ dflag
;
5021 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5024 case 0x1b6: /* movzbS Gv, Eb */
5025 case 0x1b7: /* movzwS Gv, Eb */
5026 case 0x1be: /* movsbS Gv, Eb */
5027 case 0x1bf: /* movswS Gv, Eb */
5030 /* d_ot is the size of destination */
5031 d_ot
= dflag
+ OT_WORD
;
5032 /* ot is the size of source */
5033 ot
= (b
& 1) + OT_BYTE
;
5034 modrm
= ldub_code(s
->pc
++);
5035 reg
= ((modrm
>> 3) & 7) | rex_r
;
5036 mod
= (modrm
>> 6) & 3;
5037 rm
= (modrm
& 7) | REX_B(s
);
5040 gen_op_mov_TN_reg(ot
, 0, rm
);
5041 switch(ot
| (b
& 8)) {
5043 tcg_gen_ext8u_tl(cpu_T
[0], cpu_T
[0]);
5046 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
5049 tcg_gen_ext16u_tl(cpu_T
[0], cpu_T
[0]);
5053 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
5056 gen_op_mov_reg_T0(d_ot
, reg
);
5058 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5060 gen_op_lds_T0_A0(ot
+ s
->mem_index
);
5062 gen_op_ldu_T0_A0(ot
+ s
->mem_index
);
5064 gen_op_mov_reg_T0(d_ot
, reg
);
5069 case 0x8d: /* lea */
5070 ot
= dflag
+ OT_WORD
;
5071 modrm
= ldub_code(s
->pc
++);
5072 mod
= (modrm
>> 6) & 3;
5075 reg
= ((modrm
>> 3) & 7) | rex_r
;
5076 /* we must ensure that no segment is added */
5080 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5082 gen_op_mov_reg_A0(ot
- OT_WORD
, reg
);
5085 case 0xa0: /* mov EAX, Ov */
5087 case 0xa2: /* mov Ov, EAX */
5090 target_ulong offset_addr
;
5095 ot
= dflag
+ OT_WORD
;
5096 #ifdef TARGET_X86_64
5097 if (s
->aflag
== 2) {
5098 offset_addr
= ldq_code(s
->pc
);
5100 gen_op_movq_A0_im(offset_addr
);
5105 offset_addr
= insn_get(s
, OT_LONG
);
5107 offset_addr
= insn_get(s
, OT_WORD
);
5109 gen_op_movl_A0_im(offset_addr
);
5111 gen_add_A0_ds_seg(s
);
5113 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5114 gen_op_mov_reg_T0(ot
, R_EAX
);
5116 gen_op_mov_TN_reg(ot
, 0, R_EAX
);
5117 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5121 case 0xd7: /* xlat */
5122 #ifdef TARGET_X86_64
5123 if (s
->aflag
== 2) {
5124 gen_op_movq_A0_reg(R_EBX
);
5125 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EAX
);
5126 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xff);
5127 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_T
[0]);
5131 gen_op_movl_A0_reg(R_EBX
);
5132 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
5133 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xff);
5134 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_T
[0]);
5136 gen_op_andl_A0_ffff();
5138 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
5140 gen_add_A0_ds_seg(s
);
5141 gen_op_ldu_T0_A0(OT_BYTE
+ s
->mem_index
);
5142 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
5144 case 0xb0 ... 0xb7: /* mov R, Ib */
5145 val
= insn_get(s
, OT_BYTE
);
5146 gen_op_movl_T0_im(val
);
5147 gen_op_mov_reg_T0(OT_BYTE
, (b
& 7) | REX_B(s
));
5149 case 0xb8 ... 0xbf: /* mov R, Iv */
5150 #ifdef TARGET_X86_64
5154 tmp
= ldq_code(s
->pc
);
5156 reg
= (b
& 7) | REX_B(s
);
5157 gen_movtl_T0_im(tmp
);
5158 gen_op_mov_reg_T0(OT_QUAD
, reg
);
5162 ot
= dflag
? OT_LONG
: OT_WORD
;
5163 val
= insn_get(s
, ot
);
5164 reg
= (b
& 7) | REX_B(s
);
5165 gen_op_movl_T0_im(val
);
5166 gen_op_mov_reg_T0(ot
, reg
);
5170 case 0x91 ... 0x97: /* xchg R, EAX */
5171 ot
= dflag
+ OT_WORD
;
5172 reg
= (b
& 7) | REX_B(s
);
5176 case 0x87: /* xchg Ev, Gv */
5180 ot
= dflag
+ OT_WORD
;
5181 modrm
= ldub_code(s
->pc
++);
5182 reg
= ((modrm
>> 3) & 7) | rex_r
;
5183 mod
= (modrm
>> 6) & 3;
5185 rm
= (modrm
& 7) | REX_B(s
);
5187 gen_op_mov_TN_reg(ot
, 0, reg
);
5188 gen_op_mov_TN_reg(ot
, 1, rm
);
5189 gen_op_mov_reg_T0(ot
, rm
);
5190 gen_op_mov_reg_T1(ot
, reg
);
5192 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5193 gen_op_mov_TN_reg(ot
, 0, reg
);
5194 /* for xchg, lock is implicit */
5195 if (!(prefixes
& PREFIX_LOCK
))
5196 tcg_gen_helper_0_0(helper_lock
);
5197 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
5198 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5199 if (!(prefixes
& PREFIX_LOCK
))
5200 tcg_gen_helper_0_0(helper_unlock
);
5201 gen_op_mov_reg_T1(ot
, reg
);
5204 case 0xc4: /* les Gv */
5209 case 0xc5: /* lds Gv */
5214 case 0x1b2: /* lss Gv */
5217 case 0x1b4: /* lfs Gv */
5220 case 0x1b5: /* lgs Gv */
5223 ot
= dflag
? OT_LONG
: OT_WORD
;
5224 modrm
= ldub_code(s
->pc
++);
5225 reg
= ((modrm
>> 3) & 7) | rex_r
;
5226 mod
= (modrm
>> 6) & 3;
5229 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5230 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
5231 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
5232 /* load the segment first to handle exceptions properly */
5233 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
5234 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
5235 /* then put the data */
5236 gen_op_mov_reg_T1(ot
, reg
);
5238 gen_jmp_im(s
->pc
- s
->cs_base
);
5243 /************************/
5254 ot
= dflag
+ OT_WORD
;
5256 modrm
= ldub_code(s
->pc
++);
5257 mod
= (modrm
>> 6) & 3;
5258 op
= (modrm
>> 3) & 7;
5264 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5267 opreg
= (modrm
& 7) | REX_B(s
);
5272 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
5275 shift
= ldub_code(s
->pc
++);
5277 gen_shifti(s
, op
, ot
, opreg
, shift
);
5292 case 0x1a4: /* shld imm */
5296 case 0x1a5: /* shld cl */
5300 case 0x1ac: /* shrd imm */
5304 case 0x1ad: /* shrd cl */
5308 ot
= dflag
+ OT_WORD
;
5309 modrm
= ldub_code(s
->pc
++);
5310 mod
= (modrm
>> 6) & 3;
5311 rm
= (modrm
& 7) | REX_B(s
);
5312 reg
= ((modrm
>> 3) & 7) | rex_r
;
5314 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5319 gen_op_mov_TN_reg(ot
, 1, reg
);
5322 val
= ldub_code(s
->pc
++);
5323 tcg_gen_movi_tl(cpu_T3
, val
);
5325 tcg_gen_ld_tl(cpu_T3
, cpu_env
, offsetof(CPUState
, regs
[R_ECX
]));
5327 gen_shiftd_rm_T1_T3(s
, ot
, opreg
, op
);
5330 /************************/
5333 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
5334 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5335 /* XXX: what to do if illegal op ? */
5336 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5339 modrm
= ldub_code(s
->pc
++);
5340 mod
= (modrm
>> 6) & 3;
5342 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
5345 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5347 case 0x00 ... 0x07: /* fxxxs */
5348 case 0x10 ... 0x17: /* fixxxl */
5349 case 0x20 ... 0x27: /* fxxxl */
5350 case 0x30 ... 0x37: /* fixxx */
5357 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5358 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5359 tcg_gen_helper_0_1(helper_flds_FT0
, cpu_tmp2_i32
);
5362 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5363 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5364 tcg_gen_helper_0_1(helper_fildl_FT0
, cpu_tmp2_i32
);
5367 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
5368 (s
->mem_index
>> 2) - 1);
5369 tcg_gen_helper_0_1(helper_fldl_FT0
, cpu_tmp1_i64
);
5373 gen_op_lds_T0_A0(OT_WORD
+ s
->mem_index
);
5374 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5375 tcg_gen_helper_0_1(helper_fildl_FT0
, cpu_tmp2_i32
);
5379 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0
[op1
]);
5381 /* fcomp needs pop */
5382 tcg_gen_helper_0_0(helper_fpop
);
5386 case 0x08: /* flds */
5387 case 0x0a: /* fsts */
5388 case 0x0b: /* fstps */
5389 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5390 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5391 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5396 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5397 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5398 tcg_gen_helper_0_1(helper_flds_ST0
, cpu_tmp2_i32
);
5401 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5402 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5403 tcg_gen_helper_0_1(helper_fildl_ST0
, cpu_tmp2_i32
);
5406 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
5407 (s
->mem_index
>> 2) - 1);
5408 tcg_gen_helper_0_1(helper_fldl_ST0
, cpu_tmp1_i64
);
5412 gen_op_lds_T0_A0(OT_WORD
+ s
->mem_index
);
5413 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5414 tcg_gen_helper_0_1(helper_fildl_ST0
, cpu_tmp2_i32
);
5419 /* XXX: the corresponding CPUID bit must be tested ! */
5422 tcg_gen_helper_1_0(helper_fisttl_ST0
, cpu_tmp2_i32
);
5423 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5424 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
5427 tcg_gen_helper_1_0(helper_fisttll_ST0
, cpu_tmp1_i64
);
5428 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
5429 (s
->mem_index
>> 2) - 1);
5433 tcg_gen_helper_1_0(helper_fistt_ST0
, cpu_tmp2_i32
);
5434 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5435 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5438 tcg_gen_helper_0_0(helper_fpop
);
5443 tcg_gen_helper_1_0(helper_fsts_ST0
, cpu_tmp2_i32
);
5444 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5445 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
5448 tcg_gen_helper_1_0(helper_fistl_ST0
, cpu_tmp2_i32
);
5449 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5450 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
5453 tcg_gen_helper_1_0(helper_fstl_ST0
, cpu_tmp1_i64
);
5454 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
5455 (s
->mem_index
>> 2) - 1);
5459 tcg_gen_helper_1_0(helper_fist_ST0
, cpu_tmp2_i32
);
5460 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5461 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5465 tcg_gen_helper_0_0(helper_fpop
);
5469 case 0x0c: /* fldenv mem */
5470 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5471 gen_op_set_cc_op(s
->cc_op
);
5472 gen_jmp_im(pc_start
- s
->cs_base
);
5473 tcg_gen_helper_0_2(helper_fldenv
,
5474 cpu_A0
, tcg_const_i32(s
->dflag
));
5476 case 0x0d: /* fldcw mem */
5477 gen_op_ld_T0_A0(OT_WORD
+ s
->mem_index
);
5478 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5479 tcg_gen_helper_0_1(helper_fldcw
, cpu_tmp2_i32
);
5481 case 0x0e: /* fnstenv mem */
5482 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5483 gen_op_set_cc_op(s
->cc_op
);
5484 gen_jmp_im(pc_start
- s
->cs_base
);
5485 tcg_gen_helper_0_2(helper_fstenv
,
5486 cpu_A0
, tcg_const_i32(s
->dflag
));
5488 case 0x0f: /* fnstcw mem */
5489 tcg_gen_helper_1_0(helper_fnstcw
, cpu_tmp2_i32
);
5490 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5491 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5493 case 0x1d: /* fldt mem */
5494 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5495 gen_op_set_cc_op(s
->cc_op
);
5496 gen_jmp_im(pc_start
- s
->cs_base
);
5497 tcg_gen_helper_0_1(helper_fldt_ST0
, cpu_A0
);
5499 case 0x1f: /* fstpt mem */
5500 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5501 gen_op_set_cc_op(s
->cc_op
);
5502 gen_jmp_im(pc_start
- s
->cs_base
);
5503 tcg_gen_helper_0_1(helper_fstt_ST0
, cpu_A0
);
5504 tcg_gen_helper_0_0(helper_fpop
);
5506 case 0x2c: /* frstor mem */
5507 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5508 gen_op_set_cc_op(s
->cc_op
);
5509 gen_jmp_im(pc_start
- s
->cs_base
);
5510 tcg_gen_helper_0_2(helper_frstor
,
5511 cpu_A0
, tcg_const_i32(s
->dflag
));
5513 case 0x2e: /* fnsave mem */
5514 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5515 gen_op_set_cc_op(s
->cc_op
);
5516 gen_jmp_im(pc_start
- s
->cs_base
);
5517 tcg_gen_helper_0_2(helper_fsave
,
5518 cpu_A0
, tcg_const_i32(s
->dflag
));
5520 case 0x2f: /* fnstsw mem */
5521 tcg_gen_helper_1_0(helper_fnstsw
, cpu_tmp2_i32
);
5522 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5523 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5525 case 0x3c: /* fbld */
5526 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5527 gen_op_set_cc_op(s
->cc_op
);
5528 gen_jmp_im(pc_start
- s
->cs_base
);
5529 tcg_gen_helper_0_1(helper_fbld_ST0
, cpu_A0
);
5531 case 0x3e: /* fbstp */
5532 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5533 gen_op_set_cc_op(s
->cc_op
);
5534 gen_jmp_im(pc_start
- s
->cs_base
);
5535 tcg_gen_helper_0_1(helper_fbst_ST0
, cpu_A0
);
5536 tcg_gen_helper_0_0(helper_fpop
);
5538 case 0x3d: /* fildll */
5539 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
5540 (s
->mem_index
>> 2) - 1);
5541 tcg_gen_helper_0_1(helper_fildll_ST0
, cpu_tmp1_i64
);
5543 case 0x3f: /* fistpll */
5544 tcg_gen_helper_1_0(helper_fistll_ST0
, cpu_tmp1_i64
);
5545 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
5546 (s
->mem_index
>> 2) - 1);
5547 tcg_gen_helper_0_0(helper_fpop
);
5553 /* register float ops */
5557 case 0x08: /* fld sti */
5558 tcg_gen_helper_0_0(helper_fpush
);
5559 tcg_gen_helper_0_1(helper_fmov_ST0_STN
, tcg_const_i32((opreg
+ 1) & 7));
5561 case 0x09: /* fxchg sti */
5562 case 0x29: /* fxchg4 sti, undocumented op */
5563 case 0x39: /* fxchg7 sti, undocumented op */
5564 tcg_gen_helper_0_1(helper_fxchg_ST0_STN
, tcg_const_i32(opreg
));
5566 case 0x0a: /* grp d9/2 */
5569 /* check exceptions (FreeBSD FPU probe) */
5570 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5571 gen_op_set_cc_op(s
->cc_op
);
5572 gen_jmp_im(pc_start
- s
->cs_base
);
5573 tcg_gen_helper_0_0(helper_fwait
);
5579 case 0x0c: /* grp d9/4 */
5582 tcg_gen_helper_0_0(helper_fchs_ST0
);
5585 tcg_gen_helper_0_0(helper_fabs_ST0
);
5588 tcg_gen_helper_0_0(helper_fldz_FT0
);
5589 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5592 tcg_gen_helper_0_0(helper_fxam_ST0
);
5598 case 0x0d: /* grp d9/5 */
5602 tcg_gen_helper_0_0(helper_fpush
);
5603 tcg_gen_helper_0_0(helper_fld1_ST0
);
5606 tcg_gen_helper_0_0(helper_fpush
);
5607 tcg_gen_helper_0_0(helper_fldl2t_ST0
);
5610 tcg_gen_helper_0_0(helper_fpush
);
5611 tcg_gen_helper_0_0(helper_fldl2e_ST0
);
5614 tcg_gen_helper_0_0(helper_fpush
);
5615 tcg_gen_helper_0_0(helper_fldpi_ST0
);
5618 tcg_gen_helper_0_0(helper_fpush
);
5619 tcg_gen_helper_0_0(helper_fldlg2_ST0
);
5622 tcg_gen_helper_0_0(helper_fpush
);
5623 tcg_gen_helper_0_0(helper_fldln2_ST0
);
5626 tcg_gen_helper_0_0(helper_fpush
);
5627 tcg_gen_helper_0_0(helper_fldz_ST0
);
5634 case 0x0e: /* grp d9/6 */
5637 tcg_gen_helper_0_0(helper_f2xm1
);
5640 tcg_gen_helper_0_0(helper_fyl2x
);
5643 tcg_gen_helper_0_0(helper_fptan
);
5645 case 3: /* fpatan */
5646 tcg_gen_helper_0_0(helper_fpatan
);
5648 case 4: /* fxtract */
5649 tcg_gen_helper_0_0(helper_fxtract
);
5651 case 5: /* fprem1 */
5652 tcg_gen_helper_0_0(helper_fprem1
);
5654 case 6: /* fdecstp */
5655 tcg_gen_helper_0_0(helper_fdecstp
);
5658 case 7: /* fincstp */
5659 tcg_gen_helper_0_0(helper_fincstp
);
5663 case 0x0f: /* grp d9/7 */
5666 tcg_gen_helper_0_0(helper_fprem
);
5668 case 1: /* fyl2xp1 */
5669 tcg_gen_helper_0_0(helper_fyl2xp1
);
5672 tcg_gen_helper_0_0(helper_fsqrt
);
5674 case 3: /* fsincos */
5675 tcg_gen_helper_0_0(helper_fsincos
);
5677 case 5: /* fscale */
5678 tcg_gen_helper_0_0(helper_fscale
);
5680 case 4: /* frndint */
5681 tcg_gen_helper_0_0(helper_frndint
);
5684 tcg_gen_helper_0_0(helper_fsin
);
5688 tcg_gen_helper_0_0(helper_fcos
);
5692 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5693 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5694 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5700 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0
[op1
], tcg_const_i32(opreg
));
5702 tcg_gen_helper_0_0(helper_fpop
);
5704 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5705 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0
[op1
]);
5709 case 0x02: /* fcom */
5710 case 0x22: /* fcom2, undocumented op */
5711 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5712 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5714 case 0x03: /* fcomp */
5715 case 0x23: /* fcomp3, undocumented op */
5716 case 0x32: /* fcomp5, undocumented op */
5717 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5718 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5719 tcg_gen_helper_0_0(helper_fpop
);
5721 case 0x15: /* da/5 */
5723 case 1: /* fucompp */
5724 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(1));
5725 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
5726 tcg_gen_helper_0_0(helper_fpop
);
5727 tcg_gen_helper_0_0(helper_fpop
);
5735 case 0: /* feni (287 only, just do nop here) */
5737 case 1: /* fdisi (287 only, just do nop here) */
5740 tcg_gen_helper_0_0(helper_fclex
);
5742 case 3: /* fninit */
5743 tcg_gen_helper_0_0(helper_fninit
);
5745 case 4: /* fsetpm (287 only, just do nop here) */
5751 case 0x1d: /* fucomi */
5752 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5753 gen_op_set_cc_op(s
->cc_op
);
5754 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5755 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0
);
5756 s
->cc_op
= CC_OP_EFLAGS
;
5758 case 0x1e: /* fcomi */
5759 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5760 gen_op_set_cc_op(s
->cc_op
);
5761 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5762 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0
);
5763 s
->cc_op
= CC_OP_EFLAGS
;
5765 case 0x28: /* ffree sti */
5766 tcg_gen_helper_0_1(helper_ffree_STN
, tcg_const_i32(opreg
));
5768 case 0x2a: /* fst sti */
5769 tcg_gen_helper_0_1(helper_fmov_STN_ST0
, tcg_const_i32(opreg
));
5771 case 0x2b: /* fstp sti */
5772 case 0x0b: /* fstp1 sti, undocumented op */
5773 case 0x3a: /* fstp8 sti, undocumented op */
5774 case 0x3b: /* fstp9 sti, undocumented op */
5775 tcg_gen_helper_0_1(helper_fmov_STN_ST0
, tcg_const_i32(opreg
));
5776 tcg_gen_helper_0_0(helper_fpop
);
5778 case 0x2c: /* fucom st(i) */
5779 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5780 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
5782 case 0x2d: /* fucomp st(i) */
5783 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5784 tcg_gen_helper_0_0(helper_fucom_ST0_FT0
);
5785 tcg_gen_helper_0_0(helper_fpop
);
5787 case 0x33: /* de/3 */
5789 case 1: /* fcompp */
5790 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(1));
5791 tcg_gen_helper_0_0(helper_fcom_ST0_FT0
);
5792 tcg_gen_helper_0_0(helper_fpop
);
5793 tcg_gen_helper_0_0(helper_fpop
);
5799 case 0x38: /* ffreep sti, undocumented op */
5800 tcg_gen_helper_0_1(helper_ffree_STN
, tcg_const_i32(opreg
));
5801 tcg_gen_helper_0_0(helper_fpop
);
5803 case 0x3c: /* df/4 */
5806 tcg_gen_helper_1_0(helper_fnstsw
, cpu_tmp2_i32
);
5807 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5808 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
5814 case 0x3d: /* fucomip */
5815 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5816 gen_op_set_cc_op(s
->cc_op
);
5817 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5818 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0
);
5819 tcg_gen_helper_0_0(helper_fpop
);
5820 s
->cc_op
= CC_OP_EFLAGS
;
5822 case 0x3e: /* fcomip */
5823 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5824 gen_op_set_cc_op(s
->cc_op
);
5825 tcg_gen_helper_0_1(helper_fmov_FT0_STN
, tcg_const_i32(opreg
));
5826 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0
);
5827 tcg_gen_helper_0_0(helper_fpop
);
5828 s
->cc_op
= CC_OP_EFLAGS
;
5830 case 0x10 ... 0x13: /* fcmovxx */
5834 static const uint8_t fcmov_cc
[8] = {
5840 op1
= fcmov_cc
[op
& 3] | (((op
>> 3) & 1) ^ 1);
5841 l1
= gen_new_label();
5842 gen_jcc1(s
, s
->cc_op
, op1
, l1
);
5843 tcg_gen_helper_0_1(helper_fmov_ST0_STN
, tcg_const_i32(opreg
));
5852 /************************/
5855 case 0xa4: /* movsS */
5860 ot
= dflag
+ OT_WORD
;
5862 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5863 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5869 case 0xaa: /* stosS */
5874 ot
= dflag
+ OT_WORD
;
5876 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5877 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5882 case 0xac: /* lodsS */
5887 ot
= dflag
+ OT_WORD
;
5888 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5889 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5894 case 0xae: /* scasS */
5899 ot
= dflag
+ OT_WORD
;
5900 if (prefixes
& PREFIX_REPNZ
) {
5901 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
5902 } else if (prefixes
& PREFIX_REPZ
) {
5903 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
5906 s
->cc_op
= CC_OP_SUBB
+ ot
;
5910 case 0xa6: /* cmpsS */
5915 ot
= dflag
+ OT_WORD
;
5916 if (prefixes
& PREFIX_REPNZ
) {
5917 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
5918 } else if (prefixes
& PREFIX_REPZ
) {
5919 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
5922 s
->cc_op
= CC_OP_SUBB
+ ot
;
5925 case 0x6c: /* insS */
5930 ot
= dflag
? OT_LONG
: OT_WORD
;
5931 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5932 gen_op_andl_T0_ffff();
5933 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5934 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) | 4);
5935 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5936 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5940 gen_jmp(s
, s
->pc
- s
->cs_base
);
5944 case 0x6e: /* outsS */
5949 ot
= dflag
? OT_LONG
: OT_WORD
;
5950 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5951 gen_op_andl_T0_ffff();
5952 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5953 svm_is_rep(prefixes
) | 4);
5954 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5955 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5959 gen_jmp(s
, s
->pc
- s
->cs_base
);
5964 /************************/
5972 ot
= dflag
? OT_LONG
: OT_WORD
;
5973 val
= ldub_code(s
->pc
++);
5974 gen_op_movl_T0_im(val
);
5975 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5976 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
));
5979 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5980 tcg_gen_helper_1_1(helper_in_func
[ot
], cpu_T
[1], cpu_tmp2_i32
);
5981 gen_op_mov_reg_T1(ot
, R_EAX
);
5984 gen_jmp(s
, s
->pc
- s
->cs_base
);
5992 ot
= dflag
? OT_LONG
: OT_WORD
;
5993 val
= ldub_code(s
->pc
++);
5994 gen_op_movl_T0_im(val
);
5995 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5996 svm_is_rep(prefixes
));
5997 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
6001 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6002 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
6003 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[1]);
6004 tcg_gen_helper_0_2(helper_out_func
[ot
], cpu_tmp2_i32
, cpu_tmp3_i32
);
6007 gen_jmp(s
, s
->pc
- s
->cs_base
);
6015 ot
= dflag
? OT_LONG
: OT_WORD
;
6016 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
6017 gen_op_andl_T0_ffff();
6018 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
6019 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
));
6022 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6023 tcg_gen_helper_1_1(helper_in_func
[ot
], cpu_T
[1], cpu_tmp2_i32
);
6024 gen_op_mov_reg_T1(ot
, R_EAX
);
6027 gen_jmp(s
, s
->pc
- s
->cs_base
);
6035 ot
= dflag
? OT_LONG
: OT_WORD
;
6036 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
6037 gen_op_andl_T0_ffff();
6038 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
6039 svm_is_rep(prefixes
));
6040 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
6044 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6045 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
6046 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[1]);
6047 tcg_gen_helper_0_2(helper_out_func
[ot
], cpu_tmp2_i32
, cpu_tmp3_i32
);
6050 gen_jmp(s
, s
->pc
- s
->cs_base
);
6054 /************************/
6056 case 0xc2: /* ret im */
6057 val
= ldsw_code(s
->pc
);
6060 if (CODE64(s
) && s
->dflag
)
6062 gen_stack_update(s
, val
+ (2 << s
->dflag
));
6064 gen_op_andl_T0_ffff();
6068 case 0xc3: /* ret */
6072 gen_op_andl_T0_ffff();
6076 case 0xca: /* lret im */
6077 val
= ldsw_code(s
->pc
);
6080 if (s
->pe
&& !s
->vm86
) {
6081 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6082 gen_op_set_cc_op(s
->cc_op
);
6083 gen_jmp_im(pc_start
- s
->cs_base
);
6084 tcg_gen_helper_0_2(helper_lret_protected
,
6085 tcg_const_i32(s
->dflag
),
6086 tcg_const_i32(val
));
6090 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
6092 gen_op_andl_T0_ffff();
6093 /* NOTE: keeping EIP updated is not a problem in case of
6097 gen_op_addl_A0_im(2 << s
->dflag
);
6098 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
6099 gen_op_movl_seg_T0_vm(R_CS
);
6100 /* add stack offset */
6101 gen_stack_update(s
, val
+ (4 << s
->dflag
));
6105 case 0xcb: /* lret */
6108 case 0xcf: /* iret */
6109 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IRET
);
6112 tcg_gen_helper_0_1(helper_iret_real
, tcg_const_i32(s
->dflag
));
6113 s
->cc_op
= CC_OP_EFLAGS
;
6114 } else if (s
->vm86
) {
6116 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6118 tcg_gen_helper_0_1(helper_iret_real
, tcg_const_i32(s
->dflag
));
6119 s
->cc_op
= CC_OP_EFLAGS
;
6122 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6123 gen_op_set_cc_op(s
->cc_op
);
6124 gen_jmp_im(pc_start
- s
->cs_base
);
6125 tcg_gen_helper_0_2(helper_iret_protected
,
6126 tcg_const_i32(s
->dflag
),
6127 tcg_const_i32(s
->pc
- s
->cs_base
));
6128 s
->cc_op
= CC_OP_EFLAGS
;
6132 case 0xe8: /* call im */
6135 tval
= (int32_t)insn_get(s
, OT_LONG
);
6137 tval
= (int16_t)insn_get(s
, OT_WORD
);
6138 next_eip
= s
->pc
- s
->cs_base
;
6142 gen_movtl_T0_im(next_eip
);
6147 case 0x9a: /* lcall im */
6149 unsigned int selector
, offset
;
6153 ot
= dflag
? OT_LONG
: OT_WORD
;
6154 offset
= insn_get(s
, ot
);
6155 selector
= insn_get(s
, OT_WORD
);
6157 gen_op_movl_T0_im(selector
);
6158 gen_op_movl_T1_imu(offset
);
6161 case 0xe9: /* jmp im */
6163 tval
= (int32_t)insn_get(s
, OT_LONG
);
6165 tval
= (int16_t)insn_get(s
, OT_WORD
);
6166 tval
+= s
->pc
- s
->cs_base
;
6171 case 0xea: /* ljmp im */
6173 unsigned int selector
, offset
;
6177 ot
= dflag
? OT_LONG
: OT_WORD
;
6178 offset
= insn_get(s
, ot
);
6179 selector
= insn_get(s
, OT_WORD
);
6181 gen_op_movl_T0_im(selector
);
6182 gen_op_movl_T1_imu(offset
);
6185 case 0xeb: /* jmp Jb */
6186 tval
= (int8_t)insn_get(s
, OT_BYTE
);
6187 tval
+= s
->pc
- s
->cs_base
;
6192 case 0x70 ... 0x7f: /* jcc Jb */
6193 tval
= (int8_t)insn_get(s
, OT_BYTE
);
6195 case 0x180 ... 0x18f: /* jcc Jv */
6197 tval
= (int32_t)insn_get(s
, OT_LONG
);
6199 tval
= (int16_t)insn_get(s
, OT_WORD
);
6202 next_eip
= s
->pc
- s
->cs_base
;
6206 gen_jcc(s
, b
, tval
, next_eip
);
6209 case 0x190 ... 0x19f: /* setcc Gv */
6210 modrm
= ldub_code(s
->pc
++);
6212 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
6214 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6219 ot
= dflag
+ OT_WORD
;
6220 modrm
= ldub_code(s
->pc
++);
6221 reg
= ((modrm
>> 3) & 7) | rex_r
;
6222 mod
= (modrm
>> 6) & 3;
6223 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
6225 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6226 gen_op_ld_v(ot
+ s
->mem_index
, t0
, cpu_A0
);
6228 rm
= (modrm
& 7) | REX_B(s
);
6229 gen_op_mov_v_reg(ot
, t0
, rm
);
6231 #ifdef TARGET_X86_64
6232 if (ot
== OT_LONG
) {
6233 /* XXX: specific Intel behaviour ? */
6234 l1
= gen_new_label();
6235 gen_jcc1(s
, s
->cc_op
, b
^ 1, l1
);
6236 tcg_gen_st32_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
6238 tcg_gen_movi_tl(cpu_tmp0
, 0);
6239 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
6243 l1
= gen_new_label();
6244 gen_jcc1(s
, s
->cc_op
, b
^ 1, l1
);
6245 gen_op_mov_reg_v(ot
, reg
, t0
);
6252 /************************/
6254 case 0x9c: /* pushf */
6255 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PUSHF
);
6256 if (s
->vm86
&& s
->iopl
!= 3) {
6257 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6259 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6260 gen_op_set_cc_op(s
->cc_op
);
6261 tcg_gen_helper_1_0(helper_read_eflags
, cpu_T
[0]);
6265 case 0x9d: /* popf */
6266 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_POPF
);
6267 if (s
->vm86
&& s
->iopl
!= 3) {
6268 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6273 tcg_gen_helper_0_2(helper_write_eflags
, cpu_T
[0],
6274 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
| IOPL_MASK
)));
6276 tcg_gen_helper_0_2(helper_write_eflags
, cpu_T
[0],
6277 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
| IOPL_MASK
) & 0xffff));
6280 if (s
->cpl
<= s
->iopl
) {
6282 tcg_gen_helper_0_2(helper_write_eflags
, cpu_T
[0],
6283 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
)));
6285 tcg_gen_helper_0_2(helper_write_eflags
, cpu_T
[0],
6286 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
) & 0xffff));
6290 tcg_gen_helper_0_2(helper_write_eflags
, cpu_T
[0],
6291 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
)));
6293 tcg_gen_helper_0_2(helper_write_eflags
, cpu_T
[0],
6294 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
) & 0xffff));
6299 s
->cc_op
= CC_OP_EFLAGS
;
6300 /* abort translation because TF flag may change */
6301 gen_jmp_im(s
->pc
- s
->cs_base
);
6305 case 0x9e: /* sahf */
6306 if (CODE64(s
) && !(s
->cpuid_ext3_features
& CPUID_EXT3_LAHF_LM
))
6308 gen_op_mov_TN_reg(OT_BYTE
, 0, R_AH
);
6309 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6310 gen_op_set_cc_op(s
->cc_op
);
6311 gen_compute_eflags(cpu_cc_src
);
6312 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, CC_O
);
6313 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
);
6314 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_T
[0]);
6315 s
->cc_op
= CC_OP_EFLAGS
;
6317 case 0x9f: /* lahf */
6318 if (CODE64(s
) && !(s
->cpuid_ext3_features
& CPUID_EXT3_LAHF_LM
))
6320 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6321 gen_op_set_cc_op(s
->cc_op
);
6322 gen_compute_eflags(cpu_T
[0]);
6323 /* Note: gen_compute_eflags() only gives the condition codes */
6324 tcg_gen_ori_tl(cpu_T
[0], cpu_T
[0], 0x02);
6325 gen_op_mov_reg_T0(OT_BYTE
, R_AH
);
6327 case 0xf5: /* cmc */
6328 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6329 gen_op_set_cc_op(s
->cc_op
);
6330 gen_compute_eflags(cpu_cc_src
);
6331 tcg_gen_xori_tl(cpu_cc_src
, cpu_cc_src
, CC_C
);
6332 s
->cc_op
= CC_OP_EFLAGS
;
6334 case 0xf8: /* clc */
6335 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6336 gen_op_set_cc_op(s
->cc_op
);
6337 gen_compute_eflags(cpu_cc_src
);
6338 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~CC_C
);
6339 s
->cc_op
= CC_OP_EFLAGS
;
6341 case 0xf9: /* stc */
6342 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6343 gen_op_set_cc_op(s
->cc_op
);
6344 gen_compute_eflags(cpu_cc_src
);
6345 tcg_gen_ori_tl(cpu_cc_src
, cpu_cc_src
, CC_C
);
6346 s
->cc_op
= CC_OP_EFLAGS
;
6348 case 0xfc: /* cld */
6349 tcg_gen_movi_i32(cpu_tmp2_i32
, 1);
6350 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, offsetof(CPUState
, df
));
6352 case 0xfd: /* std */
6353 tcg_gen_movi_i32(cpu_tmp2_i32
, -1);
6354 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, offsetof(CPUState
, df
));
6357 /************************/
6358 /* bit operations */
6359 case 0x1ba: /* bt/bts/btr/btc Gv, im */
6360 ot
= dflag
+ OT_WORD
;
6361 modrm
= ldub_code(s
->pc
++);
6362 op
= (modrm
>> 3) & 7;
6363 mod
= (modrm
>> 6) & 3;
6364 rm
= (modrm
& 7) | REX_B(s
);
6367 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6368 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
6370 gen_op_mov_TN_reg(ot
, 0, rm
);
6373 val
= ldub_code(s
->pc
++);
6374 gen_op_movl_T1_im(val
);
6379 case 0x1a3: /* bt Gv, Ev */
6382 case 0x1ab: /* bts */
6385 case 0x1b3: /* btr */
6388 case 0x1bb: /* btc */
6391 ot
= dflag
+ OT_WORD
;
6392 modrm
= ldub_code(s
->pc
++);
6393 reg
= ((modrm
>> 3) & 7) | rex_r
;
6394 mod
= (modrm
>> 6) & 3;
6395 rm
= (modrm
& 7) | REX_B(s
);
6396 gen_op_mov_TN_reg(OT_LONG
, 1, reg
);
6398 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6399 /* specific case: we need to add a displacement */
6400 gen_exts(ot
, cpu_T
[1]);
6401 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[1], 3 + ot
);
6402 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, ot
);
6403 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
6404 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
6406 gen_op_mov_TN_reg(ot
, 0, rm
);
6409 tcg_gen_andi_tl(cpu_T
[1], cpu_T
[1], (1 << (3 + ot
)) - 1);
6412 tcg_gen_shr_tl(cpu_cc_src
, cpu_T
[0], cpu_T
[1]);
6413 tcg_gen_movi_tl(cpu_cc_dst
, 0);
6416 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
6417 tcg_gen_movi_tl(cpu_tmp0
, 1);
6418 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
6419 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
6422 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
6423 tcg_gen_movi_tl(cpu_tmp0
, 1);
6424 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
6425 tcg_gen_not_tl(cpu_tmp0
, cpu_tmp0
);
6426 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
6430 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
6431 tcg_gen_movi_tl(cpu_tmp0
, 1);
6432 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
6433 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
6436 s
->cc_op
= CC_OP_SARB
+ ot
;
6439 gen_op_st_T0_A0(ot
+ s
->mem_index
);
6441 gen_op_mov_reg_T0(ot
, rm
);
6442 tcg_gen_mov_tl(cpu_cc_src
, cpu_tmp4
);
6443 tcg_gen_movi_tl(cpu_cc_dst
, 0);
6446 case 0x1bc: /* bsf */
6447 case 0x1bd: /* bsr */
6452 ot
= dflag
+ OT_WORD
;
6453 modrm
= ldub_code(s
->pc
++);
6454 reg
= ((modrm
>> 3) & 7) | rex_r
;
6455 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
6456 gen_extu(ot
, cpu_T
[0]);
6457 label1
= gen_new_label();
6458 tcg_gen_movi_tl(cpu_cc_dst
, 0);
6459 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
6460 tcg_gen_mov_tl(t0
, cpu_T
[0]);
6461 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, label1
);
6463 tcg_gen_helper_1_1(helper_bsr
, cpu_T
[0], t0
);
6465 tcg_gen_helper_1_1(helper_bsf
, cpu_T
[0], t0
);
6467 gen_op_mov_reg_T0(ot
, reg
);
6468 tcg_gen_movi_tl(cpu_cc_dst
, 1);
6469 gen_set_label(label1
);
6470 tcg_gen_discard_tl(cpu_cc_src
);
6471 s
->cc_op
= CC_OP_LOGICB
+ ot
;
6475 /************************/
6477 case 0x27: /* daa */
6480 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6481 gen_op_set_cc_op(s
->cc_op
);
6482 tcg_gen_helper_0_0(helper_daa
);
6483 s
->cc_op
= CC_OP_EFLAGS
;
6485 case 0x2f: /* das */
6488 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6489 gen_op_set_cc_op(s
->cc_op
);
6490 tcg_gen_helper_0_0(helper_das
);
6491 s
->cc_op
= CC_OP_EFLAGS
;
6493 case 0x37: /* aaa */
6496 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6497 gen_op_set_cc_op(s
->cc_op
);
6498 tcg_gen_helper_0_0(helper_aaa
);
6499 s
->cc_op
= CC_OP_EFLAGS
;
6501 case 0x3f: /* aas */
6504 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6505 gen_op_set_cc_op(s
->cc_op
);
6506 tcg_gen_helper_0_0(helper_aas
);
6507 s
->cc_op
= CC_OP_EFLAGS
;
6509 case 0xd4: /* aam */
6512 val
= ldub_code(s
->pc
++);
6514 gen_exception(s
, EXCP00_DIVZ
, pc_start
- s
->cs_base
);
6516 tcg_gen_helper_0_1(helper_aam
, tcg_const_i32(val
));
6517 s
->cc_op
= CC_OP_LOGICB
;
6520 case 0xd5: /* aad */
6523 val
= ldub_code(s
->pc
++);
6524 tcg_gen_helper_0_1(helper_aad
, tcg_const_i32(val
));
6525 s
->cc_op
= CC_OP_LOGICB
;
6527 /************************/
6529 case 0x90: /* nop */
6530 /* XXX: xchg + rex handling */
6531 /* XXX: correct lock test for all insn */
6532 if (prefixes
& PREFIX_LOCK
)
6534 if (prefixes
& PREFIX_REPZ
) {
6535 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PAUSE
);
6538 case 0x9b: /* fwait */
6539 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
6540 (HF_MP_MASK
| HF_TS_MASK
)) {
6541 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6543 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6544 gen_op_set_cc_op(s
->cc_op
);
6545 gen_jmp_im(pc_start
- s
->cs_base
);
6546 tcg_gen_helper_0_0(helper_fwait
);
6549 case 0xcc: /* int3 */
6550 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6552 case 0xcd: /* int N */
6553 val
= ldub_code(s
->pc
++);
6554 if (s
->vm86
&& s
->iopl
!= 3) {
6555 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6557 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6560 case 0xce: /* into */
6563 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6564 gen_op_set_cc_op(s
->cc_op
);
6565 gen_jmp_im(pc_start
- s
->cs_base
);
6566 tcg_gen_helper_0_1(helper_into
, tcg_const_i32(s
->pc
- pc_start
));
6568 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6569 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_ICEBP
);
6571 gen_debug(s
, pc_start
- s
->cs_base
);
6574 tb_flush(cpu_single_env
);
6575 cpu_set_log(CPU_LOG_INT
| CPU_LOG_TB_IN_ASM
);
6578 case 0xfa: /* cli */
6580 if (s
->cpl
<= s
->iopl
) {
6581 tcg_gen_helper_0_0(helper_cli
);
6583 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6587 tcg_gen_helper_0_0(helper_cli
);
6589 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6593 case 0xfb: /* sti */
6595 if (s
->cpl
<= s
->iopl
) {
6597 tcg_gen_helper_0_0(helper_sti
);
6598 /* interruptions are enabled only the first insn after sti */
6599 /* If several instructions disable interrupts, only the
6601 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
6602 tcg_gen_helper_0_0(helper_set_inhibit_irq
);
6603 /* give a chance to handle pending irqs */
6604 gen_jmp_im(s
->pc
- s
->cs_base
);
6607 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6613 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6617 case 0x62: /* bound */
6620 ot
= dflag
? OT_LONG
: OT_WORD
;
6621 modrm
= ldub_code(s
->pc
++);
6622 reg
= (modrm
>> 3) & 7;
6623 mod
= (modrm
>> 6) & 3;
6626 gen_op_mov_TN_reg(ot
, 0, reg
);
6627 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6628 gen_jmp_im(pc_start
- s
->cs_base
);
6629 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6631 tcg_gen_helper_0_2(helper_boundw
, cpu_A0
, cpu_tmp2_i32
);
6633 tcg_gen_helper_0_2(helper_boundl
, cpu_A0
, cpu_tmp2_i32
);
6635 case 0x1c8 ... 0x1cf: /* bswap reg */
6636 reg
= (b
& 7) | REX_B(s
);
6637 #ifdef TARGET_X86_64
6639 gen_op_mov_TN_reg(OT_QUAD
, 0, reg
);
6640 tcg_gen_bswap_i64(cpu_T
[0], cpu_T
[0]);
6641 gen_op_mov_reg_T0(OT_QUAD
, reg
);
6645 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
6647 tmp0
= tcg_temp_new(TCG_TYPE_I32
);
6648 tcg_gen_trunc_i64_i32(tmp0
, cpu_T
[0]);
6649 tcg_gen_bswap_i32(tmp0
, tmp0
);
6650 tcg_gen_extu_i32_i64(cpu_T
[0], tmp0
);
6651 gen_op_mov_reg_T0(OT_LONG
, reg
);
6655 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
6656 tcg_gen_bswap_i32(cpu_T
[0], cpu_T
[0]);
6657 gen_op_mov_reg_T0(OT_LONG
, reg
);
6661 case 0xd6: /* salc */
6664 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6665 gen_op_set_cc_op(s
->cc_op
);
6666 gen_compute_eflags_c(cpu_T
[0]);
6667 tcg_gen_neg_tl(cpu_T
[0], cpu_T
[0]);
6668 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
6670 case 0xe0: /* loopnz */
6671 case 0xe1: /* loopz */
6672 case 0xe2: /* loop */
6673 case 0xe3: /* jecxz */
6677 tval
= (int8_t)insn_get(s
, OT_BYTE
);
6678 next_eip
= s
->pc
- s
->cs_base
;
6683 l1
= gen_new_label();
6684 l2
= gen_new_label();
6685 l3
= gen_new_label();
6688 case 0: /* loopnz */
6690 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6691 gen_op_set_cc_op(s
->cc_op
);
6692 gen_op_add_reg_im(s
->aflag
, R_ECX
, -1);
6693 gen_op_jz_ecx(s
->aflag
, l3
);
6694 gen_compute_eflags(cpu_tmp0
);
6695 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, CC_Z
);
6697 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
6699 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_tmp0
, 0, l1
);
6703 gen_op_add_reg_im(s
->aflag
, R_ECX
, -1);
6704 gen_op_jnz_ecx(s
->aflag
, l1
);
6708 gen_op_jz_ecx(s
->aflag
, l1
);
6713 gen_jmp_im(next_eip
);
6722 case 0x130: /* wrmsr */
6723 case 0x132: /* rdmsr */
6725 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6727 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6728 gen_op_set_cc_op(s
->cc_op
);
6729 gen_jmp_im(pc_start
- s
->cs_base
);
6731 tcg_gen_helper_0_0(helper_rdmsr
);
6733 tcg_gen_helper_0_0(helper_wrmsr
);
6737 case 0x131: /* rdtsc */
6738 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6739 gen_op_set_cc_op(s
->cc_op
);
6740 gen_jmp_im(pc_start
- s
->cs_base
);
6743 tcg_gen_helper_0_0(helper_rdtsc
);
6746 gen_jmp(s
, s
->pc
- s
->cs_base
);
6749 case 0x133: /* rdpmc */
6750 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6751 gen_op_set_cc_op(s
->cc_op
);
6752 gen_jmp_im(pc_start
- s
->cs_base
);
6753 tcg_gen_helper_0_0(helper_rdpmc
);
6755 case 0x134: /* sysenter */
6756 /* For Intel SYSENTER is valid on 64-bit */
6757 if (CODE64(s
) && cpu_single_env
->cpuid_vendor1
!= CPUID_VENDOR_INTEL_1
)
6760 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6762 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6763 gen_op_set_cc_op(s
->cc_op
);
6764 s
->cc_op
= CC_OP_DYNAMIC
;
6766 gen_jmp_im(pc_start
- s
->cs_base
);
6767 tcg_gen_helper_0_0(helper_sysenter
);
6771 case 0x135: /* sysexit */
6772 /* For Intel SYSEXIT is valid on 64-bit */
6773 if (CODE64(s
) && cpu_single_env
->cpuid_vendor1
!= CPUID_VENDOR_INTEL_1
)
6776 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6778 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6779 gen_op_set_cc_op(s
->cc_op
);
6780 s
->cc_op
= CC_OP_DYNAMIC
;
6782 gen_jmp_im(pc_start
- s
->cs_base
);
6783 tcg_gen_helper_0_1(helper_sysexit
, tcg_const_i32(dflag
));
6787 #ifdef TARGET_X86_64
6788 case 0x105: /* syscall */
6789 /* XXX: is it usable in real mode ? */
6790 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6791 gen_op_set_cc_op(s
->cc_op
);
6792 s
->cc_op
= CC_OP_DYNAMIC
;
6794 gen_jmp_im(pc_start
- s
->cs_base
);
6795 tcg_gen_helper_0_1(helper_syscall
, tcg_const_i32(s
->pc
- pc_start
));
6798 case 0x107: /* sysret */
6800 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6802 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6803 gen_op_set_cc_op(s
->cc_op
);
6804 s
->cc_op
= CC_OP_DYNAMIC
;
6806 gen_jmp_im(pc_start
- s
->cs_base
);
6807 tcg_gen_helper_0_1(helper_sysret
, tcg_const_i32(s
->dflag
));
6808 /* condition codes are modified only in long mode */
6810 s
->cc_op
= CC_OP_EFLAGS
;
6815 case 0x1a2: /* cpuid */
6816 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6817 gen_op_set_cc_op(s
->cc_op
);
6818 gen_jmp_im(pc_start
- s
->cs_base
);
6819 tcg_gen_helper_0_0(helper_cpuid
);
6821 case 0xf4: /* hlt */
6823 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6825 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6826 gen_op_set_cc_op(s
->cc_op
);
6827 gen_jmp_im(pc_start
- s
->cs_base
);
6828 tcg_gen_helper_0_1(helper_hlt
, tcg_const_i32(s
->pc
- pc_start
));
6833 modrm
= ldub_code(s
->pc
++);
6834 mod
= (modrm
>> 6) & 3;
6835 op
= (modrm
>> 3) & 7;
6838 if (!s
->pe
|| s
->vm86
)
6840 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_READ
);
6841 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,ldt
.selector
));
6845 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
6848 if (!s
->pe
|| s
->vm86
)
6851 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6853 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_WRITE
);
6854 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6855 gen_jmp_im(pc_start
- s
->cs_base
);
6856 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6857 tcg_gen_helper_0_1(helper_lldt
, cpu_tmp2_i32
);
6861 if (!s
->pe
|| s
->vm86
)
6863 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_READ
);
6864 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,tr
.selector
));
6868 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
6871 if (!s
->pe
|| s
->vm86
)
6874 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6876 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_WRITE
);
6877 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6878 gen_jmp_im(pc_start
- s
->cs_base
);
6879 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6880 tcg_gen_helper_0_1(helper_ltr
, cpu_tmp2_i32
);
6885 if (!s
->pe
|| s
->vm86
)
6887 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6888 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6889 gen_op_set_cc_op(s
->cc_op
);
6891 tcg_gen_helper_0_1(helper_verr
, cpu_T
[0]);
6893 tcg_gen_helper_0_1(helper_verw
, cpu_T
[0]);
6894 s
->cc_op
= CC_OP_EFLAGS
;
6901 modrm
= ldub_code(s
->pc
++);
6902 mod
= (modrm
>> 6) & 3;
6903 op
= (modrm
>> 3) & 7;
6909 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_GDTR_READ
);
6910 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6911 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, gdt
.limit
));
6912 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
6913 gen_add_A0_im(s
, 2);
6914 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, gdt
.base
));
6916 gen_op_andl_T0_im(0xffffff);
6917 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6922 case 0: /* monitor */
6923 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
6926 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6927 gen_op_set_cc_op(s
->cc_op
);
6928 gen_jmp_im(pc_start
- s
->cs_base
);
6929 #ifdef TARGET_X86_64
6930 if (s
->aflag
== 2) {
6931 gen_op_movq_A0_reg(R_EAX
);
6935 gen_op_movl_A0_reg(R_EAX
);
6937 gen_op_andl_A0_ffff();
6939 gen_add_A0_ds_seg(s
);
6940 tcg_gen_helper_0_1(helper_monitor
, cpu_A0
);
6943 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
6946 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6947 gen_op_set_cc_op(s
->cc_op
);
6948 s
->cc_op
= CC_OP_DYNAMIC
;
6950 gen_jmp_im(pc_start
- s
->cs_base
);
6951 tcg_gen_helper_0_1(helper_mwait
, tcg_const_i32(s
->pc
- pc_start
));
6958 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IDTR_READ
);
6959 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6960 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, idt
.limit
));
6961 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
6962 gen_add_A0_im(s
, 2);
6963 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, idt
.base
));
6965 gen_op_andl_T0_im(0xffffff);
6966 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6972 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6973 gen_op_set_cc_op(s
->cc_op
);
6974 gen_jmp_im(pc_start
- s
->cs_base
);
6977 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
6980 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6983 tcg_gen_helper_0_2(helper_vmrun
,
6984 tcg_const_i32(s
->aflag
),
6985 tcg_const_i32(s
->pc
- pc_start
));
6990 case 1: /* VMMCALL */
6991 if (!(s
->flags
& HF_SVME_MASK
))
6993 tcg_gen_helper_0_0(helper_vmmcall
);
6995 case 2: /* VMLOAD */
6996 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
6999 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7002 tcg_gen_helper_0_1(helper_vmload
,
7003 tcg_const_i32(s
->aflag
));
7006 case 3: /* VMSAVE */
7007 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
7010 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7013 tcg_gen_helper_0_1(helper_vmsave
,
7014 tcg_const_i32(s
->aflag
));
7018 if ((!(s
->flags
& HF_SVME_MASK
) &&
7019 !(s
->cpuid_ext3_features
& CPUID_EXT3_SKINIT
)) ||
7023 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7026 tcg_gen_helper_0_0(helper_stgi
);
7030 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
7033 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7036 tcg_gen_helper_0_0(helper_clgi
);
7039 case 6: /* SKINIT */
7040 if ((!(s
->flags
& HF_SVME_MASK
) &&
7041 !(s
->cpuid_ext3_features
& CPUID_EXT3_SKINIT
)) ||
7044 tcg_gen_helper_0_0(helper_skinit
);
7046 case 7: /* INVLPGA */
7047 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
7050 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7053 tcg_gen_helper_0_1(helper_invlpga
,
7054 tcg_const_i32(s
->aflag
));
7060 } else if (s
->cpl
!= 0) {
7061 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7063 gen_svm_check_intercept(s
, pc_start
,
7064 op
==2 ? SVM_EXIT_GDTR_WRITE
: SVM_EXIT_IDTR_WRITE
);
7065 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7066 gen_op_ld_T1_A0(OT_WORD
+ s
->mem_index
);
7067 gen_add_A0_im(s
, 2);
7068 gen_op_ld_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
7070 gen_op_andl_T0_im(0xffffff);
7072 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,gdt
.base
));
7073 tcg_gen_st32_tl(cpu_T
[1], cpu_env
, offsetof(CPUX86State
,gdt
.limit
));
7075 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,idt
.base
));
7076 tcg_gen_st32_tl(cpu_T
[1], cpu_env
, offsetof(CPUX86State
,idt
.limit
));
7081 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
);
7082 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,cr
[0]));
7083 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
7087 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7089 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
7090 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
7091 tcg_gen_helper_0_1(helper_lmsw
, cpu_T
[0]);
7092 gen_jmp_im(s
->pc
- s
->cs_base
);
7096 case 7: /* invlpg */
7098 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7101 #ifdef TARGET_X86_64
7102 if (CODE64(s
) && rm
== 0) {
7104 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,segs
[R_GS
].base
));
7105 tcg_gen_ld_tl(cpu_T
[1], cpu_env
, offsetof(CPUX86State
,kernelgsbase
));
7106 tcg_gen_st_tl(cpu_T
[1], cpu_env
, offsetof(CPUX86State
,segs
[R_GS
].base
));
7107 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,kernelgsbase
));
7114 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7115 gen_op_set_cc_op(s
->cc_op
);
7116 gen_jmp_im(pc_start
- s
->cs_base
);
7117 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7118 tcg_gen_helper_0_1(helper_invlpg
, cpu_A0
);
7119 gen_jmp_im(s
->pc
- s
->cs_base
);
7128 case 0x108: /* invd */
7129 case 0x109: /* wbinvd */
7131 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7133 gen_svm_check_intercept(s
, pc_start
, (b
& 2) ? SVM_EXIT_INVD
: SVM_EXIT_WBINVD
);
7137 case 0x63: /* arpl or movslS (x86_64) */
7138 #ifdef TARGET_X86_64
7141 /* d_ot is the size of destination */
7142 d_ot
= dflag
+ OT_WORD
;
7144 modrm
= ldub_code(s
->pc
++);
7145 reg
= ((modrm
>> 3) & 7) | rex_r
;
7146 mod
= (modrm
>> 6) & 3;
7147 rm
= (modrm
& 7) | REX_B(s
);
7150 gen_op_mov_TN_reg(OT_LONG
, 0, rm
);
7152 if (d_ot
== OT_QUAD
)
7153 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
7154 gen_op_mov_reg_T0(d_ot
, reg
);
7156 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7157 if (d_ot
== OT_QUAD
) {
7158 gen_op_lds_T0_A0(OT_LONG
+ s
->mem_index
);
7160 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
7162 gen_op_mov_reg_T0(d_ot
, reg
);
7170 if (!s
->pe
|| s
->vm86
)
7172 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
7173 t1
= tcg_temp_local_new(TCG_TYPE_TL
);
7174 t2
= tcg_temp_local_new(TCG_TYPE_TL
);
7176 modrm
= ldub_code(s
->pc
++);
7177 reg
= (modrm
>> 3) & 7;
7178 mod
= (modrm
>> 6) & 3;
7181 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7182 gen_op_ld_v(ot
+ s
->mem_index
, t0
, cpu_A0
);
7184 gen_op_mov_v_reg(ot
, t0
, rm
);
7186 gen_op_mov_v_reg(ot
, t1
, reg
);
7187 tcg_gen_andi_tl(cpu_tmp0
, t0
, 3);
7188 tcg_gen_andi_tl(t1
, t1
, 3);
7189 tcg_gen_movi_tl(t2
, 0);
7190 label1
= gen_new_label();
7191 tcg_gen_brcond_tl(TCG_COND_GE
, cpu_tmp0
, t1
, label1
);
7192 tcg_gen_andi_tl(t0
, t0
, ~3);
7193 tcg_gen_or_tl(t0
, t0
, t1
);
7194 tcg_gen_movi_tl(t2
, CC_Z
);
7195 gen_set_label(label1
);
7197 gen_op_st_v(ot
+ s
->mem_index
, t0
, cpu_A0
);
7199 gen_op_mov_reg_v(ot
, rm
, t0
);
7201 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7202 gen_op_set_cc_op(s
->cc_op
);
7203 gen_compute_eflags(cpu_cc_src
);
7204 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~CC_Z
);
7205 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, t2
);
7206 s
->cc_op
= CC_OP_EFLAGS
;
7212 case 0x102: /* lar */
7213 case 0x103: /* lsl */
7217 if (!s
->pe
|| s
->vm86
)
7219 ot
= dflag
? OT_LONG
: OT_WORD
;
7220 modrm
= ldub_code(s
->pc
++);
7221 reg
= ((modrm
>> 3) & 7) | rex_r
;
7222 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
7223 t0
= tcg_temp_local_new(TCG_TYPE_TL
);
7224 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7225 gen_op_set_cc_op(s
->cc_op
);
7227 tcg_gen_helper_1_1(helper_lar
, t0
, cpu_T
[0]);
7229 tcg_gen_helper_1_1(helper_lsl
, t0
, cpu_T
[0]);
7230 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_src
, CC_Z
);
7231 label1
= gen_new_label();
7232 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, label1
);
7233 gen_op_mov_reg_v(ot
, reg
, t0
);
7234 gen_set_label(label1
);
7235 s
->cc_op
= CC_OP_EFLAGS
;
7240 modrm
= ldub_code(s
->pc
++);
7241 mod
= (modrm
>> 6) & 3;
7242 op
= (modrm
>> 3) & 7;
7244 case 0: /* prefetchnta */
7245 case 1: /* prefetchnt0 */
7246 case 2: /* prefetchnt0 */
7247 case 3: /* prefetchnt0 */
7250 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7251 /* nothing more to do */
7253 default: /* nop (multi byte) */
7254 gen_nop_modrm(s
, modrm
);
7258 case 0x119 ... 0x11f: /* nop (multi byte) */
7259 modrm
= ldub_code(s
->pc
++);
7260 gen_nop_modrm(s
, modrm
);
7262 case 0x120: /* mov reg, crN */
7263 case 0x122: /* mov crN, reg */
7265 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7267 modrm
= ldub_code(s
->pc
++);
7268 if ((modrm
& 0xc0) != 0xc0)
7270 rm
= (modrm
& 7) | REX_B(s
);
7271 reg
= ((modrm
>> 3) & 7) | rex_r
;
7282 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7283 gen_op_set_cc_op(s
->cc_op
);
7284 gen_jmp_im(pc_start
- s
->cs_base
);
7286 gen_op_mov_TN_reg(ot
, 0, rm
);
7287 tcg_gen_helper_0_2(helper_write_crN
,
7288 tcg_const_i32(reg
), cpu_T
[0]);
7289 gen_jmp_im(s
->pc
- s
->cs_base
);
7292 tcg_gen_helper_1_1(helper_read_crN
,
7293 cpu_T
[0], tcg_const_i32(reg
));
7294 gen_op_mov_reg_T0(ot
, rm
);
7302 case 0x121: /* mov reg, drN */
7303 case 0x123: /* mov drN, reg */
7305 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7307 modrm
= ldub_code(s
->pc
++);
7308 if ((modrm
& 0xc0) != 0xc0)
7310 rm
= (modrm
& 7) | REX_B(s
);
7311 reg
= ((modrm
>> 3) & 7) | rex_r
;
7316 /* XXX: do it dynamically with CR4.DE bit */
7317 if (reg
== 4 || reg
== 5 || reg
>= 8)
7320 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_DR0
+ reg
);
7321 gen_op_mov_TN_reg(ot
, 0, rm
);
7322 tcg_gen_helper_0_2(helper_movl_drN_T0
,
7323 tcg_const_i32(reg
), cpu_T
[0]);
7324 gen_jmp_im(s
->pc
- s
->cs_base
);
7327 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_DR0
+ reg
);
7328 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,dr
[reg
]));
7329 gen_op_mov_reg_T0(ot
, rm
);
7333 case 0x106: /* clts */
7335 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7337 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
7338 tcg_gen_helper_0_0(helper_clts
);
7339 /* abort block because static cpu state changed */
7340 gen_jmp_im(s
->pc
- s
->cs_base
);
7344 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
7345 case 0x1c3: /* MOVNTI reg, mem */
7346 if (!(s
->cpuid_features
& CPUID_SSE2
))
7348 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
7349 modrm
= ldub_code(s
->pc
++);
7350 mod
= (modrm
>> 6) & 3;
7353 reg
= ((modrm
>> 3) & 7) | rex_r
;
7354 /* generate a generic store */
7355 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
7358 modrm
= ldub_code(s
->pc
++);
7359 mod
= (modrm
>> 6) & 3;
7360 op
= (modrm
>> 3) & 7;
7362 case 0: /* fxsave */
7363 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
7364 (s
->flags
& HF_EM_MASK
))
7366 if (s
->flags
& HF_TS_MASK
) {
7367 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
7370 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7371 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7372 gen_op_set_cc_op(s
->cc_op
);
7373 gen_jmp_im(pc_start
- s
->cs_base
);
7374 tcg_gen_helper_0_2(helper_fxsave
,
7375 cpu_A0
, tcg_const_i32((s
->dflag
== 2)));
7377 case 1: /* fxrstor */
7378 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
7379 (s
->flags
& HF_EM_MASK
))
7381 if (s
->flags
& HF_TS_MASK
) {
7382 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
7385 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7386 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7387 gen_op_set_cc_op(s
->cc_op
);
7388 gen_jmp_im(pc_start
- s
->cs_base
);
7389 tcg_gen_helper_0_2(helper_fxrstor
,
7390 cpu_A0
, tcg_const_i32((s
->dflag
== 2)));
7392 case 2: /* ldmxcsr */
7393 case 3: /* stmxcsr */
7394 if (s
->flags
& HF_TS_MASK
) {
7395 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
7398 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
7401 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7403 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
7404 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, mxcsr
));
7406 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, mxcsr
));
7407 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
7410 case 5: /* lfence */
7411 case 6: /* mfence */
7412 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
7415 case 7: /* sfence / clflush */
7416 if ((modrm
& 0xc7) == 0xc0) {
7418 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
7419 if (!(s
->cpuid_features
& CPUID_SSE
))
7423 if (!(s
->cpuid_features
& CPUID_CLFLUSH
))
7425 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7432 case 0x10d: /* 3DNow! prefetch(w) */
7433 modrm
= ldub_code(s
->pc
++);
7434 mod
= (modrm
>> 6) & 3;
7437 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7438 /* ignore for now */
7440 case 0x1aa: /* rsm */
7441 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RSM
);
7442 if (!(s
->flags
& HF_SMM_MASK
))
7444 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
7445 gen_op_set_cc_op(s
->cc_op
);
7446 s
->cc_op
= CC_OP_DYNAMIC
;
7448 gen_jmp_im(s
->pc
- s
->cs_base
);
7449 tcg_gen_helper_0_0(helper_rsm
);
7452 case 0x1b8: /* SSE4.2 popcnt */
7453 if ((prefixes
& (PREFIX_REPZ
| PREFIX_LOCK
| PREFIX_REPNZ
)) !=
7456 if (!(s
->cpuid_ext_features
& CPUID_EXT_POPCNT
))
7459 modrm
= ldub_code(s
->pc
++);
7460 reg
= ((modrm
>> 3) & 7);
7462 if (s
->prefix
& PREFIX_DATA
)
7464 else if (s
->dflag
!= 2)
7469 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
7470 tcg_gen_helper_1_2(helper_popcnt
,
7471 cpu_T
[0], cpu_T
[0], tcg_const_i32(ot
));
7472 gen_op_mov_reg_T0(ot
, reg
);
7474 s
->cc_op
= CC_OP_EFLAGS
;
7476 case 0x10e ... 0x10f:
7477 /* 3DNow! instructions, ignore prefixes */
7478 s
->prefix
&= ~(PREFIX_REPZ
| PREFIX_REPNZ
| PREFIX_DATA
);
7479 case 0x110 ... 0x117:
7480 case 0x128 ... 0x12f:
7481 case 0x138 ... 0x13a:
7482 case 0x150 ... 0x177:
7483 case 0x17c ... 0x17f:
7485 case 0x1c4 ... 0x1c6:
7486 case 0x1d0 ... 0x1fe:
7487 gen_sse(s
, b
, pc_start
, rex_r
);
7492 /* lock generation */
7493 if (s
->prefix
& PREFIX_LOCK
)
7494 tcg_gen_helper_0_0(helper_unlock
);
7497 if (s
->prefix
& PREFIX_LOCK
)
7498 tcg_gen_helper_0_0(helper_unlock
);
7499 /* XXX: ensure that no lock was generated */
7500 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
7504 void optimize_flags_init(void)
7506 #if TCG_TARGET_REG_BITS == 32
7507 assert(sizeof(CCTable
) == (1 << 3));
7509 assert(sizeof(CCTable
) == (1 << 4));
7511 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
7512 cpu_cc_op
= tcg_global_mem_new(TCG_TYPE_I32
,
7513 TCG_AREG0
, offsetof(CPUState
, cc_op
), "cc_op");
7514 cpu_cc_src
= tcg_global_mem_new(TCG_TYPE_TL
,
7515 TCG_AREG0
, offsetof(CPUState
, cc_src
), "cc_src");
7516 cpu_cc_dst
= tcg_global_mem_new(TCG_TYPE_TL
,
7517 TCG_AREG0
, offsetof(CPUState
, cc_dst
), "cc_dst");
7518 cpu_cc_tmp
= tcg_global_mem_new(TCG_TYPE_TL
,
7519 TCG_AREG0
, offsetof(CPUState
, cc_tmp
), "cc_tmp");
7521 /* register helpers */
7523 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
7527 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7528 basic block 'tb'. If search_pc is TRUE, also generate PC
7529 information for each intermediate instruction. */
7530 static inline void gen_intermediate_code_internal(CPUState
*env
,
7531 TranslationBlock
*tb
,
7534 DisasContext dc1
, *dc
= &dc1
;
7535 target_ulong pc_ptr
;
7536 uint16_t *gen_opc_end
;
7539 target_ulong pc_start
;
7540 target_ulong cs_base
;
7544 /* generate intermediate code */
7546 cs_base
= tb
->cs_base
;
7548 cflags
= tb
->cflags
;
7550 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
7551 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
7552 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
7553 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
7555 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
7556 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
7557 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
7558 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
7559 dc
->singlestep_enabled
= env
->singlestep_enabled
;
7560 dc
->cc_op
= CC_OP_DYNAMIC
;
7561 dc
->cs_base
= cs_base
;
7563 dc
->popl_esp_hack
= 0;
7564 /* select memory access functions */
7566 if (flags
& HF_SOFTMMU_MASK
) {
7568 dc
->mem_index
= 2 * 4;
7570 dc
->mem_index
= 1 * 4;
7572 dc
->cpuid_features
= env
->cpuid_features
;
7573 dc
->cpuid_ext_features
= env
->cpuid_ext_features
;
7574 dc
->cpuid_ext2_features
= env
->cpuid_ext2_features
;
7575 dc
->cpuid_ext3_features
= env
->cpuid_ext3_features
;
7576 #ifdef TARGET_X86_64
7577 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
7578 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
7581 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
7582 (flags
& HF_INHIBIT_IRQ_MASK
)
7583 #ifndef CONFIG_SOFTMMU
7584 || (flags
& HF_SOFTMMU_MASK
)
7588 /* check addseg logic */
7589 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
7590 printf("ERROR addseg\n");
7593 cpu_T
[0] = tcg_temp_new(TCG_TYPE_TL
);
7594 cpu_T
[1] = tcg_temp_new(TCG_TYPE_TL
);
7595 cpu_A0
= tcg_temp_new(TCG_TYPE_TL
);
7596 cpu_T3
= tcg_temp_new(TCG_TYPE_TL
);
7598 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
7599 cpu_tmp1_i64
= tcg_temp_new(TCG_TYPE_I64
);
7600 cpu_tmp2_i32
= tcg_temp_new(TCG_TYPE_I32
);
7601 cpu_tmp3_i32
= tcg_temp_new(TCG_TYPE_I32
);
7602 cpu_tmp4
= tcg_temp_new(TCG_TYPE_TL
);
7603 cpu_tmp5
= tcg_temp_new(TCG_TYPE_TL
);
7604 cpu_tmp6
= tcg_temp_new(TCG_TYPE_TL
);
7605 cpu_ptr0
= tcg_temp_new(TCG_TYPE_PTR
);
7606 cpu_ptr1
= tcg_temp_new(TCG_TYPE_PTR
);
7608 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
7610 dc
->is_jmp
= DISAS_NEXT
;
7614 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
7616 max_insns
= CF_COUNT_MASK
;
7620 if (env
->nb_breakpoints
> 0) {
7621 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
7622 if (env
->breakpoints
[j
] == pc_ptr
) {
7623 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
7629 j
= gen_opc_ptr
- gen_opc_buf
;
7633 gen_opc_instr_start
[lj
++] = 0;
7635 gen_opc_pc
[lj
] = pc_ptr
;
7636 gen_opc_cc_op
[lj
] = dc
->cc_op
;
7637 gen_opc_instr_start
[lj
] = 1;
7638 gen_opc_icount
[lj
] = num_insns
;
7640 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
7643 pc_ptr
= disas_insn(dc
, pc_ptr
);
7645 /* stop translation if indicated */
7648 /* if single step mode, we generate only one instruction and
7649 generate an exception */
7650 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7651 the flag and abort the translation to give the irqs a
7652 change to be happen */
7653 if (dc
->tf
|| dc
->singlestep_enabled
||
7654 (flags
& HF_INHIBIT_IRQ_MASK
)) {
7655 gen_jmp_im(pc_ptr
- dc
->cs_base
);
7659 /* if too long translation, stop generation too */
7660 if (gen_opc_ptr
>= gen_opc_end
||
7661 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32) ||
7662 num_insns
>= max_insns
) {
7663 gen_jmp_im(pc_ptr
- dc
->cs_base
);
7668 if (tb
->cflags
& CF_LAST_IO
)
7670 gen_icount_end(tb
, num_insns
);
7671 *gen_opc_ptr
= INDEX_op_end
;
7672 /* we don't forget to fill the last values */
7674 j
= gen_opc_ptr
- gen_opc_buf
;
7677 gen_opc_instr_start
[lj
++] = 0;
7681 if (loglevel
& CPU_LOG_TB_CPU
) {
7682 cpu_dump_state(env
, logfile
, fprintf
, X86_DUMP_CCOP
);
7684 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
7686 fprintf(logfile
, "----------------\n");
7687 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
7688 #ifdef TARGET_X86_64
7693 disas_flags
= !dc
->code32
;
7694 target_disas(logfile
, pc_start
, pc_ptr
- pc_start
, disas_flags
);
7695 fprintf(logfile
, "\n");
7700 tb
->size
= pc_ptr
- pc_start
;
7701 tb
->icount
= num_insns
;
7705 void gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
7707 gen_intermediate_code_internal(env
, tb
, 0);
7710 void gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
7712 gen_intermediate_code_internal(env
, tb
, 1);
7715 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
7716 unsigned long searched_pc
, int pc_pos
, void *puc
)
7720 if (loglevel
& CPU_LOG_TB_OP
) {
7722 fprintf(logfile
, "RESTORE:\n");
7723 for(i
= 0;i
<= pc_pos
; i
++) {
7724 if (gen_opc_instr_start
[i
]) {
7725 fprintf(logfile
, "0x%04x: " TARGET_FMT_lx
"\n", i
, gen_opc_pc
[i
]);
7728 fprintf(logfile
, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx
" cs_base=%x\n",
7729 searched_pc
, pc_pos
, gen_opc_pc
[pc_pos
] - tb
->cs_base
,
7730 (uint32_t)tb
->cs_base
);
7733 env
->eip
= gen_opc_pc
[pc_pos
] - tb
->cs_base
;
7734 cc_op
= gen_opc_cc_op
[pc_pos
];
7735 if (cc_op
!= CC_OP_DYNAMIC
)