4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env
, cpu_T
[2], cpu_A0
;
62 /* local register indexes (only used inside old micro ops) */
66 static int x86_64_hregs
;
69 typedef struct DisasContext
{
70 /* current insn context */
71 int override
; /* -1 if no override */
74 target_ulong pc
; /* pc = eip + cs_base */
75 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
76 static state change (stop translation) */
77 /* current block context */
78 target_ulong cs_base
; /* base of CS segment */
79 int pe
; /* protected mode */
80 int code32
; /* 32 bit code segment */
82 int lma
; /* long mode active */
83 int code64
; /* 64 bit code segment */
86 int ss32
; /* 32 bit stack segment */
87 int cc_op
; /* current CC operation */
88 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
89 int f_st
; /* currently unused */
90 int vm86
; /* vm86 mode */
93 int tf
; /* TF cpu flag */
94 int singlestep_enabled
; /* "hardware" single step enabled */
95 int jmp_opt
; /* use direct block chaining for direct jumps */
96 int mem_index
; /* select memory access functions */
97 uint64_t flags
; /* all execution flags */
98 struct TranslationBlock
*tb
;
99 int popl_esp_hack
; /* for correct popl with esp base handling */
100 int rip_offset
; /* only used in x86_64, but left for simplicity */
102 int cpuid_ext_features
;
103 int cpuid_ext2_features
;
106 static void gen_eob(DisasContext
*s
);
107 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
108 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
110 /* i386 arith/logic operations */
130 OP_SHL1
, /* undocumented */
143 /* I386 int registers */
144 OR_EAX
, /* MUST be even numbered */
153 OR_TMP0
= 16, /* temporary operand register */
155 OR_A0
, /* temporary register used when doing address evaluation */
158 static inline void gen_op_movl_T0_0(void)
160 tcg_gen_movi_tl(cpu_T
[0], 0);
163 static inline void gen_op_movl_T0_im(int32_t val
)
165 tcg_gen_movi_tl(cpu_T
[0], val
);
168 static inline void gen_op_movl_T0_imu(uint32_t val
)
170 tcg_gen_movi_tl(cpu_T
[0], val
);
173 static inline void gen_op_movl_T1_im(int32_t val
)
175 tcg_gen_movi_tl(cpu_T
[1], val
);
178 static inline void gen_op_movl_T1_imu(uint32_t val
)
180 tcg_gen_movi_tl(cpu_T
[1], val
);
183 static inline void gen_op_movl_A0_im(uint32_t val
)
185 tcg_gen_movi_tl(cpu_A0
, val
);
189 static inline void gen_op_movq_A0_im(int64_t val
)
191 tcg_gen_movi_tl(cpu_A0
, val
);
195 static inline void gen_movtl_T0_im(target_ulong val
)
197 tcg_gen_movi_tl(cpu_T
[0], val
);
200 static inline void gen_movtl_T1_im(target_ulong val
)
202 tcg_gen_movi_tl(cpu_T
[1], val
);
205 static inline void gen_op_andl_T0_ffff(void)
207 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
210 static inline void gen_op_andl_T0_im(uint32_t val
)
212 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], val
);
215 static inline void gen_op_movl_T0_T1(void)
217 tcg_gen_mov_tl(cpu_T
[0], cpu_T
[1]);
220 static inline void gen_op_andl_A0_ffff(void)
222 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffff);
227 #define NB_OP_SIZES 4
229 #define DEF_REGS(prefix, suffix) \
230 prefix ## EAX ## suffix,\
231 prefix ## ECX ## suffix,\
232 prefix ## EDX ## suffix,\
233 prefix ## EBX ## suffix,\
234 prefix ## ESP ## suffix,\
235 prefix ## EBP ## suffix,\
236 prefix ## ESI ## suffix,\
237 prefix ## EDI ## suffix,\
238 prefix ## R8 ## suffix,\
239 prefix ## R9 ## suffix,\
240 prefix ## R10 ## suffix,\
241 prefix ## R11 ## suffix,\
242 prefix ## R12 ## suffix,\
243 prefix ## R13 ## suffix,\
244 prefix ## R14 ## suffix,\
245 prefix ## R15 ## suffix,
247 #else /* !TARGET_X86_64 */
249 #define NB_OP_SIZES 3
251 #define DEF_REGS(prefix, suffix) \
252 prefix ## EAX ## suffix,\
253 prefix ## ECX ## suffix,\
254 prefix ## EDX ## suffix,\
255 prefix ## EBX ## suffix,\
256 prefix ## ESP ## suffix,\
257 prefix ## EBP ## suffix,\
258 prefix ## ESI ## suffix,\
259 prefix ## EDI ## suffix,
261 #endif /* !TARGET_X86_64 */
263 #if defined(WORDS_BIGENDIAN)
264 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
265 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
266 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
267 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
268 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
270 #define REG_B_OFFSET 0
271 #define REG_H_OFFSET 1
272 #define REG_W_OFFSET 0
273 #define REG_L_OFFSET 0
274 #define REG_LH_OFFSET 4
277 static inline void gen_op_mov_reg_TN(int ot
, int t_index
, int reg
)
281 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
282 tcg_gen_st8_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_B_OFFSET
);
284 tcg_gen_st8_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
288 tcg_gen_st16_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
292 tcg_gen_st32_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
293 /* high part of register set to zero */
294 tcg_gen_movi_tl(cpu_tmp0
, 0);
295 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
299 tcg_gen_st_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]));
304 tcg_gen_st32_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
310 static inline void gen_op_mov_reg_T0(int ot
, int reg
)
312 gen_op_mov_reg_TN(ot
, 0, reg
);
315 static inline void gen_op_mov_reg_T1(int ot
, int reg
)
317 gen_op_mov_reg_TN(ot
, 1, reg
);
320 static inline void gen_op_mov_reg_A0(int size
, int reg
)
324 tcg_gen_st16_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
328 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
329 /* high part of register set to zero */
330 tcg_gen_movi_tl(cpu_tmp0
, 0);
331 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
335 tcg_gen_st_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
340 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
346 static inline void gen_op_mov_TN_reg(int ot
, int t_index
, int reg
)
350 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
353 tcg_gen_ld8u_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
358 tcg_gen_ld_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]));
363 static inline void gen_op_movl_A0_reg(int reg
)
365 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
368 static inline void gen_op_addl_A0_im(int32_t val
)
370 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
372 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
377 static inline void gen_op_addq_A0_im(int64_t val
)
379 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
383 static void gen_add_A0_im(DisasContext
*s
, int val
)
387 gen_op_addq_A0_im(val
);
390 gen_op_addl_A0_im(val
);
393 static inline void gen_op_addl_T0_T1(void)
395 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
398 static inline void gen_op_jmp_T0(void)
400 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUState
, eip
));
403 static inline void gen_op_addw_ESP_im(int32_t val
)
405 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
406 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
407 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]) + REG_W_OFFSET
);
410 static inline void gen_op_addl_ESP_im(int32_t val
)
412 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
413 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
415 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffff);
417 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
421 static inline void gen_op_addq_ESP_im(int32_t val
)
423 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
424 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
425 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
429 static inline void gen_op_set_cc_op(int32_t val
)
431 tcg_gen_movi_tl(cpu_tmp0
, val
);
432 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, cc_op
));
435 static inline void gen_op_addl_A0_reg_sN(int shift
, int reg
)
437 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
439 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
440 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
442 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
446 static inline void gen_op_movl_A0_seg(int reg
)
448 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
) + REG_L_OFFSET
);
451 static inline void gen_op_addl_A0_seg(int reg
)
453 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
454 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
456 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
461 static inline void gen_op_movq_A0_seg(int reg
)
463 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
466 static inline void gen_op_addq_A0_seg(int reg
)
468 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
469 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
472 static inline void gen_op_movq_A0_reg(int reg
)
474 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
477 static inline void gen_op_addq_A0_reg_sN(int shift
, int reg
)
479 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
481 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
482 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
486 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
488 DEF_REGS(gen_op_cmovw_
, _T1_T0
)
491 DEF_REGS(gen_op_cmovl_
, _T1_T0
)
495 DEF_REGS(gen_op_cmovq_
, _T1_T0
)
500 #define DEF_ARITHC(SUFFIX)\
502 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
503 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
506 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
507 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
510 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
511 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
514 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
515 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
518 static GenOpFunc
*gen_op_arithc_T0_T1_cc
[4][2] = {
522 static GenOpFunc
*gen_op_arithc_mem_T0_T1_cc
[3 * 4][2] = {
524 #ifndef CONFIG_USER_ONLY
530 static const int cc_op_arithb
[8] = {
541 #define DEF_CMPXCHG(SUFFIX)\
542 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
543 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
544 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
545 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
547 static GenOpFunc
*gen_op_cmpxchg_T0_T1_EAX_cc
[4] = {
551 static GenOpFunc
*gen_op_cmpxchg_mem_T0_T1_EAX_cc
[3 * 4] = {
553 #ifndef CONFIG_USER_ONLY
559 #define DEF_SHIFT(SUFFIX)\
561 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
562 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
563 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
565 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
566 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
568 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
571 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
572 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
573 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
574 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
575 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
576 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
577 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
578 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
581 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
582 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
583 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
584 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
585 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
586 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
587 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
588 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
591 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
592 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
593 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
594 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
595 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
596 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
597 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
598 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
601 static GenOpFunc
*gen_op_shift_T0_T1_cc
[4][8] = {
605 static GenOpFunc
*gen_op_shift_mem_T0_T1_cc
[3 * 4][8] = {
607 #ifndef CONFIG_USER_ONLY
613 #define DEF_SHIFTD(SUFFIX, op)\
619 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
620 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
623 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
624 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
627 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
628 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
631 static GenOpFunc1
*gen_op_shiftd_T0_T1_im_cc
[4][2] = {
635 static GenOpFunc
*gen_op_shiftd_T0_T1_ECX_cc
[4][2] = {
639 static GenOpFunc1
*gen_op_shiftd_mem_T0_T1_im_cc
[3 * 4][2] = {
641 #ifndef CONFIG_USER_ONLY
642 DEF_SHIFTD(_kernel
, im
)
643 DEF_SHIFTD(_user
, im
)
647 static GenOpFunc
*gen_op_shiftd_mem_T0_T1_ECX_cc
[3 * 4][2] = {
648 DEF_SHIFTD(_raw
, ECX
)
649 #ifndef CONFIG_USER_ONLY
650 DEF_SHIFTD(_kernel
, ECX
)
651 DEF_SHIFTD(_user
, ECX
)
655 static GenOpFunc
*gen_op_btx_T0_T1_cc
[3][4] = {
658 gen_op_btsw_T0_T1_cc
,
659 gen_op_btrw_T0_T1_cc
,
660 gen_op_btcw_T0_T1_cc
,
664 gen_op_btsl_T0_T1_cc
,
665 gen_op_btrl_T0_T1_cc
,
666 gen_op_btcl_T0_T1_cc
,
671 gen_op_btsq_T0_T1_cc
,
672 gen_op_btrq_T0_T1_cc
,
673 gen_op_btcq_T0_T1_cc
,
678 static GenOpFunc
*gen_op_add_bit_A0_T1
[3] = {
679 gen_op_add_bitw_A0_T1
,
680 gen_op_add_bitl_A0_T1
,
681 X86_64_ONLY(gen_op_add_bitq_A0_T1
),
684 static GenOpFunc
*gen_op_bsx_T0_cc
[3][2] = {
701 static inline void gen_op_lds_T0_A0(int idx
)
703 int mem_index
= (idx
>> 2) - 1;
706 tcg_gen_qemu_ld8s(cpu_T
[0], cpu_A0
, mem_index
);
709 tcg_gen_qemu_ld16s(cpu_T
[0], cpu_A0
, mem_index
);
713 tcg_gen_qemu_ld32s(cpu_T
[0], cpu_A0
, mem_index
);
718 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719 static inline void gen_op_ld_T0_A0(int idx
)
721 int mem_index
= (idx
>> 2) - 1;
724 tcg_gen_qemu_ld8u(cpu_T
[0], cpu_A0
, mem_index
);
727 tcg_gen_qemu_ld16u(cpu_T
[0], cpu_A0
, mem_index
);
730 tcg_gen_qemu_ld32u(cpu_T
[0], cpu_A0
, mem_index
);
734 tcg_gen_qemu_ld64(cpu_T
[0], cpu_A0
, mem_index
);
739 static inline void gen_op_ldu_T0_A0(int idx
)
741 gen_op_ld_T0_A0(idx
);
744 static inline void gen_op_ld_T1_A0(int idx
)
746 int mem_index
= (idx
>> 2) - 1;
749 tcg_gen_qemu_ld8u(cpu_T
[1], cpu_A0
, mem_index
);
752 tcg_gen_qemu_ld16u(cpu_T
[1], cpu_A0
, mem_index
);
755 tcg_gen_qemu_ld32u(cpu_T
[1], cpu_A0
, mem_index
);
759 tcg_gen_qemu_ld64(cpu_T
[1], cpu_A0
, mem_index
);
764 static inline void gen_op_st_T0_A0(int idx
)
766 int mem_index
= (idx
>> 2) - 1;
769 tcg_gen_qemu_st8(cpu_T
[0], cpu_A0
, mem_index
);
772 tcg_gen_qemu_st16(cpu_T
[0], cpu_A0
, mem_index
);
775 tcg_gen_qemu_st32(cpu_T
[0], cpu_A0
, mem_index
);
779 tcg_gen_qemu_st64(cpu_T
[0], cpu_A0
, mem_index
);
784 static inline void gen_op_st_T1_A0(int idx
)
786 int mem_index
= (idx
>> 2) - 1;
789 tcg_gen_qemu_st8(cpu_T
[1], cpu_A0
, mem_index
);
792 tcg_gen_qemu_st16(cpu_T
[1], cpu_A0
, mem_index
);
795 tcg_gen_qemu_st32(cpu_T
[1], cpu_A0
, mem_index
);
799 tcg_gen_qemu_st64(cpu_T
[1], cpu_A0
, mem_index
);
804 static inline void gen_jmp_im(target_ulong pc
)
806 tcg_gen_movi_tl(cpu_tmp0
, pc
);
807 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, eip
));
810 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
814 override
= s
->override
;
818 gen_op_movq_A0_seg(override
);
819 gen_op_addq_A0_reg_sN(0, R_ESI
);
821 gen_op_movq_A0_reg(R_ESI
);
827 if (s
->addseg
&& override
< 0)
830 gen_op_movl_A0_seg(override
);
831 gen_op_addl_A0_reg_sN(0, R_ESI
);
833 gen_op_movl_A0_reg(R_ESI
);
836 /* 16 address, always override */
839 gen_op_movl_A0_reg(R_ESI
);
840 gen_op_andl_A0_ffff();
841 gen_op_addl_A0_seg(override
);
845 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
849 gen_op_movq_A0_reg(R_EDI
);
854 gen_op_movl_A0_seg(R_ES
);
855 gen_op_addl_A0_reg_sN(0, R_EDI
);
857 gen_op_movl_A0_reg(R_EDI
);
860 gen_op_movl_A0_reg(R_EDI
);
861 gen_op_andl_A0_ffff();
862 gen_op_addl_A0_seg(R_ES
);
866 static GenOpFunc
*gen_op_movl_T0_Dshift
[4] = {
867 gen_op_movl_T0_Dshiftb
,
868 gen_op_movl_T0_Dshiftw
,
869 gen_op_movl_T0_Dshiftl
,
870 X86_64_ONLY(gen_op_movl_T0_Dshiftq
),
873 static GenOpFunc1
*gen_op_jnz_ecx
[3] = {
876 X86_64_ONLY(gen_op_jnz_ecxq
),
879 static GenOpFunc1
*gen_op_jz_ecx
[3] = {
882 X86_64_ONLY(gen_op_jz_ecxq
),
885 static GenOpFunc
*gen_op_dec_ECX
[3] = {
888 X86_64_ONLY(gen_op_decq_ECX
),
891 static GenOpFunc1
*gen_op_string_jnz_sub
[2][4] = {
896 X86_64_ONLY(gen_op_jnz_subq
),
902 X86_64_ONLY(gen_op_jz_subq
),
906 static GenOpFunc
*gen_op_in_DX_T0
[3] = {
912 static GenOpFunc
*gen_op_out_DX_T0
[3] = {
918 static GenOpFunc
*gen_op_in
[3] = {
924 static GenOpFunc
*gen_op_out
[3] = {
930 static GenOpFunc
*gen_check_io_T0
[3] = {
936 static GenOpFunc
*gen_check_io_DX
[3] = {
942 static void gen_check_io(DisasContext
*s
, int ot
, int use_dx
, target_ulong cur_eip
)
944 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
945 if (s
->cc_op
!= CC_OP_DYNAMIC
)
946 gen_op_set_cc_op(s
->cc_op
);
949 gen_check_io_DX
[ot
]();
951 gen_check_io_T0
[ot
]();
955 static inline void gen_movs(DisasContext
*s
, int ot
)
957 gen_string_movl_A0_ESI(s
);
958 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
959 gen_string_movl_A0_EDI(s
);
960 gen_op_st_T0_A0(ot
+ s
->mem_index
);
961 gen_op_movl_T0_Dshift
[ot
]();
964 gen_op_addq_ESI_T0();
965 gen_op_addq_EDI_T0();
969 gen_op_addl_ESI_T0();
970 gen_op_addl_EDI_T0();
972 gen_op_addw_ESI_T0();
973 gen_op_addw_EDI_T0();
977 static inline void gen_update_cc_op(DisasContext
*s
)
979 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
980 gen_op_set_cc_op(s
->cc_op
);
981 s
->cc_op
= CC_OP_DYNAMIC
;
985 /* XXX: does not work with gdbstub "ice" single step - not a
987 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
991 l1
= gen_new_label();
992 l2
= gen_new_label();
993 gen_op_jnz_ecx
[s
->aflag
](l1
);
995 gen_jmp_tb(s
, next_eip
, 1);
1000 static inline void gen_stos(DisasContext
*s
, int ot
)
1002 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1003 gen_string_movl_A0_EDI(s
);
1004 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1005 gen_op_movl_T0_Dshift
[ot
]();
1006 #ifdef TARGET_X86_64
1007 if (s
->aflag
== 2) {
1008 gen_op_addq_EDI_T0();
1012 gen_op_addl_EDI_T0();
1014 gen_op_addw_EDI_T0();
1018 static inline void gen_lods(DisasContext
*s
, int ot
)
1020 gen_string_movl_A0_ESI(s
);
1021 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1022 gen_op_mov_reg_T0(ot
, R_EAX
);
1023 gen_op_movl_T0_Dshift
[ot
]();
1024 #ifdef TARGET_X86_64
1025 if (s
->aflag
== 2) {
1026 gen_op_addq_ESI_T0();
1030 gen_op_addl_ESI_T0();
1032 gen_op_addw_ESI_T0();
1036 static inline void gen_scas(DisasContext
*s
, int ot
)
1038 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1039 gen_string_movl_A0_EDI(s
);
1040 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1041 gen_op_cmpl_T0_T1_cc();
1042 gen_op_movl_T0_Dshift
[ot
]();
1043 #ifdef TARGET_X86_64
1044 if (s
->aflag
== 2) {
1045 gen_op_addq_EDI_T0();
1049 gen_op_addl_EDI_T0();
1051 gen_op_addw_EDI_T0();
1055 static inline void gen_cmps(DisasContext
*s
, int ot
)
1057 gen_string_movl_A0_ESI(s
);
1058 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1059 gen_string_movl_A0_EDI(s
);
1060 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1061 gen_op_cmpl_T0_T1_cc();
1062 gen_op_movl_T0_Dshift
[ot
]();
1063 #ifdef TARGET_X86_64
1064 if (s
->aflag
== 2) {
1065 gen_op_addq_ESI_T0();
1066 gen_op_addq_EDI_T0();
1070 gen_op_addl_ESI_T0();
1071 gen_op_addl_EDI_T0();
1073 gen_op_addw_ESI_T0();
1074 gen_op_addw_EDI_T0();
1078 static inline void gen_ins(DisasContext
*s
, int ot
)
1080 gen_string_movl_A0_EDI(s
);
1082 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1083 gen_op_in_DX_T0
[ot
]();
1084 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1085 gen_op_movl_T0_Dshift
[ot
]();
1086 #ifdef TARGET_X86_64
1087 if (s
->aflag
== 2) {
1088 gen_op_addq_EDI_T0();
1092 gen_op_addl_EDI_T0();
1094 gen_op_addw_EDI_T0();
1098 static inline void gen_outs(DisasContext
*s
, int ot
)
1100 gen_string_movl_A0_ESI(s
);
1101 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1102 gen_op_out_DX_T0
[ot
]();
1103 gen_op_movl_T0_Dshift
[ot
]();
1104 #ifdef TARGET_X86_64
1105 if (s
->aflag
== 2) {
1106 gen_op_addq_ESI_T0();
1110 gen_op_addl_ESI_T0();
1112 gen_op_addw_ESI_T0();
1116 /* same method as Valgrind : we generate jumps to current or next
1118 #define GEN_REPZ(op) \
1119 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1120 target_ulong cur_eip, target_ulong next_eip) \
1123 gen_update_cc_op(s); \
1124 l2 = gen_jz_ecx_string(s, next_eip); \
1125 gen_ ## op(s, ot); \
1126 gen_op_dec_ECX[s->aflag](); \
1127 /* a loop would cause two single step exceptions if ECX = 1 \
1128 before rep string_insn */ \
1130 gen_op_jz_ecx[s->aflag](l2); \
1131 gen_jmp(s, cur_eip); \
1134 #define GEN_REPZ2(op) \
1135 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1136 target_ulong cur_eip, \
1137 target_ulong next_eip, \
1141 gen_update_cc_op(s); \
1142 l2 = gen_jz_ecx_string(s, next_eip); \
1143 gen_ ## op(s, ot); \
1144 gen_op_dec_ECX[s->aflag](); \
1145 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1146 gen_op_string_jnz_sub[nz][ot](l2);\
1148 gen_op_jz_ecx[s->aflag](l2); \
1149 gen_jmp(s, cur_eip); \
1171 static GenOpFunc1
*gen_jcc_sub
[4][8] = {
1202 #ifdef TARGET_X86_64
1205 BUGGY_64(gen_op_jb_subq
),
1207 BUGGY_64(gen_op_jbe_subq
),
1210 BUGGY_64(gen_op_jl_subq
),
1211 BUGGY_64(gen_op_jle_subq
),
1215 static GenOpFunc1
*gen_op_loop
[3][4] = {
1226 #ifdef TARGET_X86_64
1235 static GenOpFunc
*gen_setcc_slow
[8] = {
1246 static GenOpFunc
*gen_setcc_sub
[4][8] = {
1249 gen_op_setb_T0_subb
,
1250 gen_op_setz_T0_subb
,
1251 gen_op_setbe_T0_subb
,
1252 gen_op_sets_T0_subb
,
1254 gen_op_setl_T0_subb
,
1255 gen_op_setle_T0_subb
,
1259 gen_op_setb_T0_subw
,
1260 gen_op_setz_T0_subw
,
1261 gen_op_setbe_T0_subw
,
1262 gen_op_sets_T0_subw
,
1264 gen_op_setl_T0_subw
,
1265 gen_op_setle_T0_subw
,
1269 gen_op_setb_T0_subl
,
1270 gen_op_setz_T0_subl
,
1271 gen_op_setbe_T0_subl
,
1272 gen_op_sets_T0_subl
,
1274 gen_op_setl_T0_subl
,
1275 gen_op_setle_T0_subl
,
1277 #ifdef TARGET_X86_64
1280 gen_op_setb_T0_subq
,
1281 gen_op_setz_T0_subq
,
1282 gen_op_setbe_T0_subq
,
1283 gen_op_sets_T0_subq
,
1285 gen_op_setl_T0_subq
,
1286 gen_op_setle_T0_subq
,
1291 static GenOpFunc
*gen_op_fp_arith_ST0_FT0
[8] = {
1292 gen_op_fadd_ST0_FT0
,
1293 gen_op_fmul_ST0_FT0
,
1294 gen_op_fcom_ST0_FT0
,
1295 gen_op_fcom_ST0_FT0
,
1296 gen_op_fsub_ST0_FT0
,
1297 gen_op_fsubr_ST0_FT0
,
1298 gen_op_fdiv_ST0_FT0
,
1299 gen_op_fdivr_ST0_FT0
,
1302 /* NOTE the exception in "r" op ordering */
1303 static GenOpFunc1
*gen_op_fp_arith_STN_ST0
[8] = {
1304 gen_op_fadd_STN_ST0
,
1305 gen_op_fmul_STN_ST0
,
1308 gen_op_fsubr_STN_ST0
,
1309 gen_op_fsub_STN_ST0
,
1310 gen_op_fdivr_STN_ST0
,
1311 gen_op_fdiv_STN_ST0
,
1314 /* if d == OR_TMP0, it means memory operand (address in A0) */
1315 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1317 GenOpFunc
*gen_update_cc
;
1320 gen_op_mov_TN_reg(ot
, 0, d
);
1322 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1327 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1328 gen_op_set_cc_op(s1
->cc_op
);
1330 gen_op_arithc_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1331 gen_op_mov_reg_T0(ot
, d
);
1333 gen_op_arithc_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
- OP_ADCL
]();
1335 s1
->cc_op
= CC_OP_DYNAMIC
;
1338 gen_op_addl_T0_T1();
1339 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1340 gen_update_cc
= gen_op_update2_cc
;
1343 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1344 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1345 gen_update_cc
= gen_op_update2_cc
;
1349 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1350 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1351 gen_update_cc
= gen_op_update1_cc
;
1354 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1355 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1356 gen_update_cc
= gen_op_update1_cc
;
1359 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1360 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1361 gen_update_cc
= gen_op_update1_cc
;
1364 gen_op_cmpl_T0_T1_cc();
1365 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1366 gen_update_cc
= NULL
;
1369 if (op
!= OP_CMPL
) {
1371 gen_op_mov_reg_T0(ot
, d
);
1373 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1375 /* the flags update must happen after the memory write (precise
1376 exception support) */
1382 /* if d == OR_TMP0, it means memory operand (address in A0) */
1383 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1386 gen_op_mov_TN_reg(ot
, 0, d
);
1388 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1389 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1390 gen_op_set_cc_op(s1
->cc_op
);
1393 s1
->cc_op
= CC_OP_INCB
+ ot
;
1396 s1
->cc_op
= CC_OP_DECB
+ ot
;
1399 gen_op_mov_reg_T0(ot
, d
);
1401 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1402 gen_op_update_inc_cc();
1405 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1408 gen_op_mov_TN_reg(ot
, 0, d
);
1410 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1412 gen_op_mov_TN_reg(ot
, 1, s
);
1413 /* for zero counts, flags are not updated, so must do it dynamically */
1414 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1415 gen_op_set_cc_op(s1
->cc_op
);
1418 gen_op_shift_T0_T1_cc
[ot
][op
]();
1420 gen_op_shift_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
]();
1422 gen_op_mov_reg_T0(ot
, d
);
1423 s1
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1426 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1428 /* currently not optimized */
1429 gen_op_movl_T1_im(c
);
1430 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1433 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1441 int mod
, rm
, code
, override
, must_add_seg
;
1443 override
= s
->override
;
1444 must_add_seg
= s
->addseg
;
1447 mod
= (modrm
>> 6) & 3;
1459 code
= ldub_code(s
->pc
++);
1460 scale
= (code
>> 6) & 3;
1461 index
= ((code
>> 3) & 7) | REX_X(s
);
1468 if ((base
& 7) == 5) {
1470 disp
= (int32_t)ldl_code(s
->pc
);
1472 if (CODE64(s
) && !havesib
) {
1473 disp
+= s
->pc
+ s
->rip_offset
;
1480 disp
= (int8_t)ldub_code(s
->pc
++);
1484 disp
= ldl_code(s
->pc
);
1490 /* for correct popl handling with esp */
1491 if (base
== 4 && s
->popl_esp_hack
)
1492 disp
+= s
->popl_esp_hack
;
1493 #ifdef TARGET_X86_64
1494 if (s
->aflag
== 2) {
1495 gen_op_movq_A0_reg(base
);
1497 gen_op_addq_A0_im(disp
);
1502 gen_op_movl_A0_reg(base
);
1504 gen_op_addl_A0_im(disp
);
1507 #ifdef TARGET_X86_64
1508 if (s
->aflag
== 2) {
1509 gen_op_movq_A0_im(disp
);
1513 gen_op_movl_A0_im(disp
);
1516 /* XXX: index == 4 is always invalid */
1517 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1518 #ifdef TARGET_X86_64
1519 if (s
->aflag
== 2) {
1520 gen_op_addq_A0_reg_sN(scale
, index
);
1524 gen_op_addl_A0_reg_sN(scale
, index
);
1529 if (base
== R_EBP
|| base
== R_ESP
)
1534 #ifdef TARGET_X86_64
1535 if (s
->aflag
== 2) {
1536 gen_op_addq_A0_seg(override
);
1540 gen_op_addl_A0_seg(override
);
1547 disp
= lduw_code(s
->pc
);
1549 gen_op_movl_A0_im(disp
);
1550 rm
= 0; /* avoid SS override */
1557 disp
= (int8_t)ldub_code(s
->pc
++);
1561 disp
= lduw_code(s
->pc
);
1567 gen_op_movl_A0_reg(R_EBX
);
1568 gen_op_addl_A0_reg_sN(0, R_ESI
);
1571 gen_op_movl_A0_reg(R_EBX
);
1572 gen_op_addl_A0_reg_sN(0, R_EDI
);
1575 gen_op_movl_A0_reg(R_EBP
);
1576 gen_op_addl_A0_reg_sN(0, R_ESI
);
1579 gen_op_movl_A0_reg(R_EBP
);
1580 gen_op_addl_A0_reg_sN(0, R_EDI
);
1583 gen_op_movl_A0_reg(R_ESI
);
1586 gen_op_movl_A0_reg(R_EDI
);
1589 gen_op_movl_A0_reg(R_EBP
);
1593 gen_op_movl_A0_reg(R_EBX
);
1597 gen_op_addl_A0_im(disp
);
1598 gen_op_andl_A0_ffff();
1602 if (rm
== 2 || rm
== 3 || rm
== 6)
1607 gen_op_addl_A0_seg(override
);
1617 static void gen_nop_modrm(DisasContext
*s
, int modrm
)
1619 int mod
, rm
, base
, code
;
1621 mod
= (modrm
>> 6) & 3;
1631 code
= ldub_code(s
->pc
++);
1667 /* used for LEA and MOV AX, mem */
1668 static void gen_add_A0_ds_seg(DisasContext
*s
)
1670 int override
, must_add_seg
;
1671 must_add_seg
= s
->addseg
;
1673 if (s
->override
>= 0) {
1674 override
= s
->override
;
1680 #ifdef TARGET_X86_64
1682 gen_op_addq_A0_seg(override
);
1686 gen_op_addl_A0_seg(override
);
1691 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1693 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1695 int mod
, rm
, opreg
, disp
;
1697 mod
= (modrm
>> 6) & 3;
1698 rm
= (modrm
& 7) | REX_B(s
);
1702 gen_op_mov_TN_reg(ot
, 0, reg
);
1703 gen_op_mov_reg_T0(ot
, rm
);
1705 gen_op_mov_TN_reg(ot
, 0, rm
);
1707 gen_op_mov_reg_T0(ot
, reg
);
1710 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
1713 gen_op_mov_TN_reg(ot
, 0, reg
);
1714 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1716 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1718 gen_op_mov_reg_T0(ot
, reg
);
1723 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
1729 ret
= ldub_code(s
->pc
);
1733 ret
= lduw_code(s
->pc
);
1738 ret
= ldl_code(s
->pc
);
1745 static inline int insn_const_size(unsigned int ot
)
1753 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong eip
)
1755 TranslationBlock
*tb
;
1758 pc
= s
->cs_base
+ eip
;
1760 /* NOTE: we handle the case where the TB spans two pages here */
1761 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
1762 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
1763 /* jump to same page: we can use a direct jump */
1764 tcg_gen_goto_tb(tb_num
);
1766 tcg_gen_exit_tb((long)tb
+ tb_num
);
1768 /* jump to another page: currently not optimized */
1774 static inline void gen_jcc(DisasContext
*s
, int b
,
1775 target_ulong val
, target_ulong next_eip
)
1777 TranslationBlock
*tb
;
1784 jcc_op
= (b
>> 1) & 7;
1788 /* we optimize the cmp/jcc case */
1793 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1796 /* some jumps are easy to compute */
1838 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1841 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1853 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1854 gen_op_set_cc_op(s
->cc_op
);
1855 s
->cc_op
= CC_OP_DYNAMIC
;
1859 gen_setcc_slow
[jcc_op
]();
1860 func
= gen_op_jnz_T0_label
;
1870 l1
= gen_new_label();
1873 gen_goto_tb(s
, 0, next_eip
);
1876 gen_goto_tb(s
, 1, val
);
1881 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1882 gen_op_set_cc_op(s
->cc_op
);
1883 s
->cc_op
= CC_OP_DYNAMIC
;
1885 gen_setcc_slow
[jcc_op
]();
1891 l1
= gen_new_label();
1892 l2
= gen_new_label();
1893 gen_op_jnz_T0_label(l1
);
1894 gen_jmp_im(next_eip
);
1895 gen_op_jmp_label(l2
);
1903 static void gen_setcc(DisasContext
*s
, int b
)
1909 jcc_op
= (b
>> 1) & 7;
1911 /* we optimize the cmp/jcc case */
1916 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1921 /* some jumps are easy to compute */
1948 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1951 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1959 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1960 gen_op_set_cc_op(s
->cc_op
);
1961 func
= gen_setcc_slow
[jcc_op
];
1970 /* move T0 to seg_reg and compute if the CPU state may change. Never
1971 call this function with seg_reg == R_CS */
1972 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
1974 if (s
->pe
&& !s
->vm86
) {
1975 /* XXX: optimize by finding processor state dynamically */
1976 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1977 gen_op_set_cc_op(s
->cc_op
);
1978 gen_jmp_im(cur_eip
);
1979 gen_op_movl_seg_T0(seg_reg
);
1980 /* abort translation because the addseg value may change or
1981 because ss32 may change. For R_SS, translation must always
1982 stop as a special handling must be done to disable hardware
1983 interrupts for the next instruction */
1984 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
1987 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
1988 if (seg_reg
== R_SS
)
1993 #define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1996 gen_svm_check_io(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
1998 #if !defined(CONFIG_USER_ONLY)
1999 if(s
->flags
& (1ULL << INTERCEPT_IOIO_PROT
)) {
2000 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2001 gen_op_set_cc_op(s
->cc_op
);
2002 SVM_movq_T1_im(s
->pc
- s
->cs_base
);
2003 gen_jmp_im(pc_start
- s
->cs_base
);
2005 gen_op_svm_check_intercept_io((uint32_t)(type
>> 32), (uint32_t)type
);
2006 s
->cc_op
= CC_OP_DYNAMIC
;
2007 /* FIXME: maybe we could move the io intercept vector to the TB as well
2008 so we know if this is an EOB or not ... let's assume it's not
2015 static inline int svm_is_rep(int prefixes
)
2017 return ((prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) ? 8 : 0);
2021 gen_svm_check_intercept_param(DisasContext
*s
, target_ulong pc_start
,
2022 uint64_t type
, uint64_t param
)
2024 if(!(s
->flags
& (INTERCEPT_SVM_MASK
)))
2025 /* no SVM activated */
2028 /* CRx and DRx reads/writes */
2029 case SVM_EXIT_READ_CR0
... SVM_EXIT_EXCP_BASE
- 1:
2030 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2031 gen_op_set_cc_op(s
->cc_op
);
2032 s
->cc_op
= CC_OP_DYNAMIC
;
2034 gen_jmp_im(pc_start
- s
->cs_base
);
2035 SVM_movq_T1_im(param
);
2037 gen_op_svm_check_intercept_param((uint32_t)(type
>> 32), (uint32_t)type
);
2038 /* this is a special case as we do not know if the interception occurs
2039 so we assume there was none */
2042 if(s
->flags
& (1ULL << INTERCEPT_MSR_PROT
)) {
2043 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2044 gen_op_set_cc_op(s
->cc_op
);
2045 s
->cc_op
= CC_OP_DYNAMIC
;
2047 gen_jmp_im(pc_start
- s
->cs_base
);
2048 SVM_movq_T1_im(param
);
2050 gen_op_svm_check_intercept_param((uint32_t)(type
>> 32), (uint32_t)type
);
2051 /* this is a special case as we do not know if the interception occurs
2052 so we assume there was none */
2057 if(s
->flags
& (1ULL << ((type
- SVM_EXIT_INTR
) + INTERCEPT_INTR
))) {
2058 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2059 gen_op_set_cc_op(s
->cc_op
);
2060 s
->cc_op
= CC_OP_EFLAGS
;
2062 gen_jmp_im(pc_start
- s
->cs_base
);
2063 SVM_movq_T1_im(param
);
2065 gen_op_svm_vmexit(type
>> 32, type
);
2066 /* we can optimize this one so TBs don't get longer
2067 than up to vmexit */
2076 gen_svm_check_intercept(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
2078 return gen_svm_check_intercept_param(s
, pc_start
, type
, 0);
2081 static inline void gen_stack_update(DisasContext
*s
, int addend
)
2083 #ifdef TARGET_X86_64
2085 gen_op_addq_ESP_im(addend
);
2089 gen_op_addl_ESP_im(addend
);
2091 gen_op_addw_ESP_im(addend
);
2095 /* generate a push. It depends on ss32, addseg and dflag */
2096 static void gen_push_T0(DisasContext
*s
)
2098 #ifdef TARGET_X86_64
2100 gen_op_movq_A0_reg(R_ESP
);
2102 gen_op_addq_A0_im(-8);
2103 gen_op_st_T0_A0(OT_QUAD
+ s
->mem_index
);
2105 gen_op_addq_A0_im(-2);
2106 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2108 gen_op_mov_reg_A0(2, R_ESP
);
2112 gen_op_movl_A0_reg(R_ESP
);
2114 gen_op_addl_A0_im(-2);
2116 gen_op_addl_A0_im(-4);
2119 gen_op_movl_T1_A0();
2120 gen_op_addl_A0_seg(R_SS
);
2123 gen_op_andl_A0_ffff();
2124 gen_op_movl_T1_A0();
2125 gen_op_addl_A0_seg(R_SS
);
2127 gen_op_st_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2128 if (s
->ss32
&& !s
->addseg
)
2129 gen_op_mov_reg_A0(1, R_ESP
);
2131 gen_op_mov_reg_T1(s
->ss32
+ 1, R_ESP
);
2135 /* generate a push. It depends on ss32, addseg and dflag */
2136 /* slower version for T1, only used for call Ev */
2137 static void gen_push_T1(DisasContext
*s
)
2139 #ifdef TARGET_X86_64
2141 gen_op_movq_A0_reg(R_ESP
);
2143 gen_op_addq_A0_im(-8);
2144 gen_op_st_T1_A0(OT_QUAD
+ s
->mem_index
);
2146 gen_op_addq_A0_im(-2);
2147 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2149 gen_op_mov_reg_A0(2, R_ESP
);
2153 gen_op_movl_A0_reg(R_ESP
);
2155 gen_op_addl_A0_im(-2);
2157 gen_op_addl_A0_im(-4);
2160 gen_op_addl_A0_seg(R_SS
);
2163 gen_op_andl_A0_ffff();
2164 gen_op_addl_A0_seg(R_SS
);
2166 gen_op_st_T1_A0(s
->dflag
+ 1 + s
->mem_index
);
2168 if (s
->ss32
&& !s
->addseg
)
2169 gen_op_mov_reg_A0(1, R_ESP
);
2171 gen_stack_update(s
, (-2) << s
->dflag
);
2175 /* two step pop is necessary for precise exceptions */
2176 static void gen_pop_T0(DisasContext
*s
)
2178 #ifdef TARGET_X86_64
2180 gen_op_movq_A0_reg(R_ESP
);
2181 gen_op_ld_T0_A0((s
->dflag
? OT_QUAD
: OT_WORD
) + s
->mem_index
);
2185 gen_op_movl_A0_reg(R_ESP
);
2188 gen_op_addl_A0_seg(R_SS
);
2190 gen_op_andl_A0_ffff();
2191 gen_op_addl_A0_seg(R_SS
);
2193 gen_op_ld_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2197 static void gen_pop_update(DisasContext
*s
)
2199 #ifdef TARGET_X86_64
2200 if (CODE64(s
) && s
->dflag
) {
2201 gen_stack_update(s
, 8);
2205 gen_stack_update(s
, 2 << s
->dflag
);
2209 static void gen_stack_A0(DisasContext
*s
)
2211 gen_op_movl_A0_reg(R_ESP
);
2213 gen_op_andl_A0_ffff();
2214 gen_op_movl_T1_A0();
2216 gen_op_addl_A0_seg(R_SS
);
2219 /* NOTE: wrap around in 16 bit not fully handled */
2220 static void gen_pusha(DisasContext
*s
)
2223 gen_op_movl_A0_reg(R_ESP
);
2224 gen_op_addl_A0_im(-16 << s
->dflag
);
2226 gen_op_andl_A0_ffff();
2227 gen_op_movl_T1_A0();
2229 gen_op_addl_A0_seg(R_SS
);
2230 for(i
= 0;i
< 8; i
++) {
2231 gen_op_mov_TN_reg(OT_LONG
, 0, 7 - i
);
2232 gen_op_st_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2233 gen_op_addl_A0_im(2 << s
->dflag
);
2235 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2238 /* NOTE: wrap around in 16 bit not fully handled */
2239 static void gen_popa(DisasContext
*s
)
2242 gen_op_movl_A0_reg(R_ESP
);
2244 gen_op_andl_A0_ffff();
2245 gen_op_movl_T1_A0();
2246 gen_op_addl_T1_im(16 << s
->dflag
);
2248 gen_op_addl_A0_seg(R_SS
);
2249 for(i
= 0;i
< 8; i
++) {
2250 /* ESP is not reloaded */
2252 gen_op_ld_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2253 gen_op_mov_reg_T0(OT_WORD
+ s
->dflag
, 7 - i
);
2255 gen_op_addl_A0_im(2 << s
->dflag
);
2257 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2260 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2265 #ifdef TARGET_X86_64
2267 ot
= s
->dflag
? OT_QUAD
: OT_WORD
;
2270 gen_op_movl_A0_reg(R_ESP
);
2271 gen_op_addq_A0_im(-opsize
);
2272 gen_op_movl_T1_A0();
2275 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2276 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2278 gen_op_enter64_level(level
, (ot
== OT_QUAD
));
2280 gen_op_mov_reg_T1(ot
, R_EBP
);
2281 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2282 gen_op_mov_reg_T1(OT_QUAD
, R_ESP
);
2286 ot
= s
->dflag
+ OT_WORD
;
2287 opsize
= 2 << s
->dflag
;
2289 gen_op_movl_A0_reg(R_ESP
);
2290 gen_op_addl_A0_im(-opsize
);
2292 gen_op_andl_A0_ffff();
2293 gen_op_movl_T1_A0();
2295 gen_op_addl_A0_seg(R_SS
);
2297 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2298 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2300 gen_op_enter_level(level
, s
->dflag
);
2302 gen_op_mov_reg_T1(ot
, R_EBP
);
2303 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2304 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2308 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2310 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2311 gen_op_set_cc_op(s
->cc_op
);
2312 gen_jmp_im(cur_eip
);
2313 gen_op_raise_exception(trapno
);
2317 /* an interrupt is different from an exception because of the
2319 static void gen_interrupt(DisasContext
*s
, int intno
,
2320 target_ulong cur_eip
, target_ulong next_eip
)
2322 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2323 gen_op_set_cc_op(s
->cc_op
);
2324 gen_jmp_im(cur_eip
);
2325 gen_op_raise_interrupt(intno
, (int)(next_eip
- cur_eip
));
2329 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2331 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2332 gen_op_set_cc_op(s
->cc_op
);
2333 gen_jmp_im(cur_eip
);
2338 /* generate a generic end of block. Trace exception is also generated
2340 static void gen_eob(DisasContext
*s
)
2342 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2343 gen_op_set_cc_op(s
->cc_op
);
2344 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2345 gen_op_reset_inhibit_irq();
2347 if (s
->singlestep_enabled
) {
2350 gen_op_single_step();
2357 /* generate a jump to eip. No segment change must happen before as a
2358 direct call to the next block may occur */
2359 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2362 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2363 gen_op_set_cc_op(s
->cc_op
);
2364 s
->cc_op
= CC_OP_DYNAMIC
;
2366 gen_goto_tb(s
, tb_num
, eip
);
2374 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2376 gen_jmp_tb(s
, eip
, 0);
2379 static GenOpFunc1
*gen_ldq_env_A0
[3] = {
2380 gen_op_ldq_raw_env_A0
,
2381 #ifndef CONFIG_USER_ONLY
2382 gen_op_ldq_kernel_env_A0
,
2383 gen_op_ldq_user_env_A0
,
2387 static GenOpFunc1
*gen_stq_env_A0
[3] = {
2388 gen_op_stq_raw_env_A0
,
2389 #ifndef CONFIG_USER_ONLY
2390 gen_op_stq_kernel_env_A0
,
2391 gen_op_stq_user_env_A0
,
2395 static GenOpFunc1
*gen_ldo_env_A0
[3] = {
2396 gen_op_ldo_raw_env_A0
,
2397 #ifndef CONFIG_USER_ONLY
2398 gen_op_ldo_kernel_env_A0
,
2399 gen_op_ldo_user_env_A0
,
2403 static GenOpFunc1
*gen_sto_env_A0
[3] = {
2404 gen_op_sto_raw_env_A0
,
2405 #ifndef CONFIG_USER_ONLY
2406 gen_op_sto_kernel_env_A0
,
2407 gen_op_sto_user_env_A0
,
2411 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2412 #define SSE_DUMMY ((GenOpFunc2 *)2)
2414 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2415 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2416 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2418 static GenOpFunc2
*sse_op_table1
[256][4] = {
2419 /* 3DNow! extensions */
2420 [0x0e] = { SSE_DUMMY
}, /* femms */
2421 [0x0f] = { SSE_DUMMY
}, /* pf... */
2422 /* pure SSE operations */
2423 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2424 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2425 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd, movsldup, movddup */
2426 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2427 [0x14] = { gen_op_punpckldq_xmm
, gen_op_punpcklqdq_xmm
},
2428 [0x15] = { gen_op_punpckhdq_xmm
, gen_op_punpckhqdq_xmm
},
2429 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2430 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2432 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2433 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2434 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2435 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2436 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2437 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2438 [0x2e] = { gen_op_ucomiss
, gen_op_ucomisd
},
2439 [0x2f] = { gen_op_comiss
, gen_op_comisd
},
2440 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2441 [0x51] = SSE_FOP(sqrt
),
2442 [0x52] = { gen_op_rsqrtps
, NULL
, gen_op_rsqrtss
, NULL
},
2443 [0x53] = { gen_op_rcpps
, NULL
, gen_op_rcpss
, NULL
},
2444 [0x54] = { gen_op_pand_xmm
, gen_op_pand_xmm
}, /* andps, andpd */
2445 [0x55] = { gen_op_pandn_xmm
, gen_op_pandn_xmm
}, /* andnps, andnpd */
2446 [0x56] = { gen_op_por_xmm
, gen_op_por_xmm
}, /* orps, orpd */
2447 [0x57] = { gen_op_pxor_xmm
, gen_op_pxor_xmm
}, /* xorps, xorpd */
2448 [0x58] = SSE_FOP(add
),
2449 [0x59] = SSE_FOP(mul
),
2450 [0x5a] = { gen_op_cvtps2pd
, gen_op_cvtpd2ps
,
2451 gen_op_cvtss2sd
, gen_op_cvtsd2ss
},
2452 [0x5b] = { gen_op_cvtdq2ps
, gen_op_cvtps2dq
, gen_op_cvttps2dq
},
2453 [0x5c] = SSE_FOP(sub
),
2454 [0x5d] = SSE_FOP(min
),
2455 [0x5e] = SSE_FOP(div
),
2456 [0x5f] = SSE_FOP(max
),
2458 [0xc2] = SSE_FOP(cmpeq
),
2459 [0xc6] = { (GenOpFunc2
*)gen_op_shufps
, (GenOpFunc2
*)gen_op_shufpd
},
2461 /* MMX ops and their SSE extensions */
2462 [0x60] = MMX_OP2(punpcklbw
),
2463 [0x61] = MMX_OP2(punpcklwd
),
2464 [0x62] = MMX_OP2(punpckldq
),
2465 [0x63] = MMX_OP2(packsswb
),
2466 [0x64] = MMX_OP2(pcmpgtb
),
2467 [0x65] = MMX_OP2(pcmpgtw
),
2468 [0x66] = MMX_OP2(pcmpgtl
),
2469 [0x67] = MMX_OP2(packuswb
),
2470 [0x68] = MMX_OP2(punpckhbw
),
2471 [0x69] = MMX_OP2(punpckhwd
),
2472 [0x6a] = MMX_OP2(punpckhdq
),
2473 [0x6b] = MMX_OP2(packssdw
),
2474 [0x6c] = { NULL
, gen_op_punpcklqdq_xmm
},
2475 [0x6d] = { NULL
, gen_op_punpckhqdq_xmm
},
2476 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2477 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2478 [0x70] = { (GenOpFunc2
*)gen_op_pshufw_mmx
,
2479 (GenOpFunc2
*)gen_op_pshufd_xmm
,
2480 (GenOpFunc2
*)gen_op_pshufhw_xmm
,
2481 (GenOpFunc2
*)gen_op_pshuflw_xmm
},
2482 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2483 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2484 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2485 [0x74] = MMX_OP2(pcmpeqb
),
2486 [0x75] = MMX_OP2(pcmpeqw
),
2487 [0x76] = MMX_OP2(pcmpeql
),
2488 [0x77] = { SSE_DUMMY
}, /* emms */
2489 [0x7c] = { NULL
, gen_op_haddpd
, NULL
, gen_op_haddps
},
2490 [0x7d] = { NULL
, gen_op_hsubpd
, NULL
, gen_op_hsubps
},
2491 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2492 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2493 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2494 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2495 [0xd0] = { NULL
, gen_op_addsubpd
, NULL
, gen_op_addsubps
},
2496 [0xd1] = MMX_OP2(psrlw
),
2497 [0xd2] = MMX_OP2(psrld
),
2498 [0xd3] = MMX_OP2(psrlq
),
2499 [0xd4] = MMX_OP2(paddq
),
2500 [0xd5] = MMX_OP2(pmullw
),
2501 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2502 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2503 [0xd8] = MMX_OP2(psubusb
),
2504 [0xd9] = MMX_OP2(psubusw
),
2505 [0xda] = MMX_OP2(pminub
),
2506 [0xdb] = MMX_OP2(pand
),
2507 [0xdc] = MMX_OP2(paddusb
),
2508 [0xdd] = MMX_OP2(paddusw
),
2509 [0xde] = MMX_OP2(pmaxub
),
2510 [0xdf] = MMX_OP2(pandn
),
2511 [0xe0] = MMX_OP2(pavgb
),
2512 [0xe1] = MMX_OP2(psraw
),
2513 [0xe2] = MMX_OP2(psrad
),
2514 [0xe3] = MMX_OP2(pavgw
),
2515 [0xe4] = MMX_OP2(pmulhuw
),
2516 [0xe5] = MMX_OP2(pmulhw
),
2517 [0xe6] = { NULL
, gen_op_cvttpd2dq
, gen_op_cvtdq2pd
, gen_op_cvtpd2dq
},
2518 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2519 [0xe8] = MMX_OP2(psubsb
),
2520 [0xe9] = MMX_OP2(psubsw
),
2521 [0xea] = MMX_OP2(pminsw
),
2522 [0xeb] = MMX_OP2(por
),
2523 [0xec] = MMX_OP2(paddsb
),
2524 [0xed] = MMX_OP2(paddsw
),
2525 [0xee] = MMX_OP2(pmaxsw
),
2526 [0xef] = MMX_OP2(pxor
),
2527 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu */
2528 [0xf1] = MMX_OP2(psllw
),
2529 [0xf2] = MMX_OP2(pslld
),
2530 [0xf3] = MMX_OP2(psllq
),
2531 [0xf4] = MMX_OP2(pmuludq
),
2532 [0xf5] = MMX_OP2(pmaddwd
),
2533 [0xf6] = MMX_OP2(psadbw
),
2534 [0xf7] = MMX_OP2(maskmov
),
2535 [0xf8] = MMX_OP2(psubb
),
2536 [0xf9] = MMX_OP2(psubw
),
2537 [0xfa] = MMX_OP2(psubl
),
2538 [0xfb] = MMX_OP2(psubq
),
2539 [0xfc] = MMX_OP2(paddb
),
2540 [0xfd] = MMX_OP2(paddw
),
2541 [0xfe] = MMX_OP2(paddl
),
2544 static GenOpFunc2
*sse_op_table2
[3 * 8][2] = {
2545 [0 + 2] = MMX_OP2(psrlw
),
2546 [0 + 4] = MMX_OP2(psraw
),
2547 [0 + 6] = MMX_OP2(psllw
),
2548 [8 + 2] = MMX_OP2(psrld
),
2549 [8 + 4] = MMX_OP2(psrad
),
2550 [8 + 6] = MMX_OP2(pslld
),
2551 [16 + 2] = MMX_OP2(psrlq
),
2552 [16 + 3] = { NULL
, gen_op_psrldq_xmm
},
2553 [16 + 6] = MMX_OP2(psllq
),
2554 [16 + 7] = { NULL
, gen_op_pslldq_xmm
},
2557 static GenOpFunc1
*sse_op_table3
[4 * 3] = {
2560 X86_64_ONLY(gen_op_cvtsq2ss
),
2561 X86_64_ONLY(gen_op_cvtsq2sd
),
2565 X86_64_ONLY(gen_op_cvttss2sq
),
2566 X86_64_ONLY(gen_op_cvttsd2sq
),
2570 X86_64_ONLY(gen_op_cvtss2sq
),
2571 X86_64_ONLY(gen_op_cvtsd2sq
),
2574 static GenOpFunc2
*sse_op_table4
[8][4] = {
2585 static GenOpFunc2
*sse_op_table5
[256] = {
2586 [0x0c] = gen_op_pi2fw
,
2587 [0x0d] = gen_op_pi2fd
,
2588 [0x1c] = gen_op_pf2iw
,
2589 [0x1d] = gen_op_pf2id
,
2590 [0x8a] = gen_op_pfnacc
,
2591 [0x8e] = gen_op_pfpnacc
,
2592 [0x90] = gen_op_pfcmpge
,
2593 [0x94] = gen_op_pfmin
,
2594 [0x96] = gen_op_pfrcp
,
2595 [0x97] = gen_op_pfrsqrt
,
2596 [0x9a] = gen_op_pfsub
,
2597 [0x9e] = gen_op_pfadd
,
2598 [0xa0] = gen_op_pfcmpgt
,
2599 [0xa4] = gen_op_pfmax
,
2600 [0xa6] = gen_op_movq
, /* pfrcpit1; no need to actually increase precision */
2601 [0xa7] = gen_op_movq
, /* pfrsqit1 */
2602 [0xaa] = gen_op_pfsubr
,
2603 [0xae] = gen_op_pfacc
,
2604 [0xb0] = gen_op_pfcmpeq
,
2605 [0xb4] = gen_op_pfmul
,
2606 [0xb6] = gen_op_movq
, /* pfrcpit2 */
2607 [0xb7] = gen_op_pmulhrw_mmx
,
2608 [0xbb] = gen_op_pswapd
,
2609 [0xbf] = gen_op_pavgb_mmx
/* pavgusb */
2612 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
2614 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
2615 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
2616 GenOpFunc2
*sse_op2
;
2617 GenOpFunc3
*sse_op3
;
2620 if (s
->prefix
& PREFIX_DATA
)
2622 else if (s
->prefix
& PREFIX_REPZ
)
2624 else if (s
->prefix
& PREFIX_REPNZ
)
2628 sse_op2
= sse_op_table1
[b
][b1
];
2631 if ((b
<= 0x5f && b
>= 0x10) || b
== 0xc6 || b
== 0xc2) {
2641 /* simple MMX/SSE operation */
2642 if (s
->flags
& HF_TS_MASK
) {
2643 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
2646 if (s
->flags
& HF_EM_MASK
) {
2648 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
2651 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
2654 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
2665 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2666 the static cpu state) */
2671 modrm
= ldub_code(s
->pc
++);
2672 reg
= ((modrm
>> 3) & 7);
2675 mod
= (modrm
>> 6) & 3;
2676 if (sse_op2
== SSE_SPECIAL
) {
2679 case 0x0e7: /* movntq */
2682 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2683 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2685 case 0x1e7: /* movntdq */
2686 case 0x02b: /* movntps */
2687 case 0x12b: /* movntps */
2688 case 0x3f0: /* lddqu */
2691 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2692 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2694 case 0x6e: /* movd mm, ea */
2695 #ifdef TARGET_X86_64
2696 if (s
->dflag
== 2) {
2697 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2698 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2702 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2703 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2706 case 0x16e: /* movd xmm, ea */
2707 #ifdef TARGET_X86_64
2708 if (s
->dflag
== 2) {
2709 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2710 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2714 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2715 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2718 case 0x6f: /* movq mm, ea */
2720 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2721 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2724 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
].mmx
),
2725 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2728 case 0x010: /* movups */
2729 case 0x110: /* movupd */
2730 case 0x028: /* movaps */
2731 case 0x128: /* movapd */
2732 case 0x16f: /* movdqa xmm, ea */
2733 case 0x26f: /* movdqu xmm, ea */
2735 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2736 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2738 rm
= (modrm
& 7) | REX_B(s
);
2739 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
2740 offsetof(CPUX86State
,xmm_regs
[rm
]));
2743 case 0x210: /* movss xmm, ea */
2745 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2746 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
2747 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2749 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2750 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2751 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2753 rm
= (modrm
& 7) | REX_B(s
);
2754 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2755 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2758 case 0x310: /* movsd xmm, ea */
2760 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2761 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2763 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2764 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2766 rm
= (modrm
& 7) | REX_B(s
);
2767 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2768 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2771 case 0x012: /* movlps */
2772 case 0x112: /* movlpd */
2774 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2775 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2778 rm
= (modrm
& 7) | REX_B(s
);
2779 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2780 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2783 case 0x212: /* movsldup */
2785 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2786 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2788 rm
= (modrm
& 7) | REX_B(s
);
2789 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2790 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2791 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2792 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(2)));
2794 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2795 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2796 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2797 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2799 case 0x312: /* movddup */
2801 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2802 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2804 rm
= (modrm
& 7) | REX_B(s
);
2805 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2806 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2808 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2809 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2811 case 0x016: /* movhps */
2812 case 0x116: /* movhpd */
2814 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2815 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2818 rm
= (modrm
& 7) | REX_B(s
);
2819 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2820 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2823 case 0x216: /* movshdup */
2825 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2826 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2828 rm
= (modrm
& 7) | REX_B(s
);
2829 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2830 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
2831 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2832 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
2834 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2835 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2836 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2837 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2839 case 0x7e: /* movd ea, mm */
2840 #ifdef TARGET_X86_64
2841 if (s
->dflag
== 2) {
2842 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2843 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
2847 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2848 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2851 case 0x17e: /* movd ea, xmm */
2852 #ifdef TARGET_X86_64
2853 if (s
->dflag
== 2) {
2854 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2855 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
2859 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2860 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2863 case 0x27e: /* movq xmm, ea */
2865 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2866 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2868 rm
= (modrm
& 7) | REX_B(s
);
2869 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2870 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2872 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2874 case 0x7f: /* movq ea, mm */
2876 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2877 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2880 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
2881 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2884 case 0x011: /* movups */
2885 case 0x111: /* movupd */
2886 case 0x029: /* movaps */
2887 case 0x129: /* movapd */
2888 case 0x17f: /* movdqa ea, xmm */
2889 case 0x27f: /* movdqu ea, xmm */
2891 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2892 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2894 rm
= (modrm
& 7) | REX_B(s
);
2895 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
2896 offsetof(CPUX86State
,xmm_regs
[reg
]));
2899 case 0x211: /* movss ea, xmm */
2901 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2902 gen_op_movl_T0_env(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2903 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
2905 rm
= (modrm
& 7) | REX_B(s
);
2906 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
2907 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2910 case 0x311: /* movsd ea, xmm */
2912 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2913 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2915 rm
= (modrm
& 7) | REX_B(s
);
2916 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2917 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2920 case 0x013: /* movlps */
2921 case 0x113: /* movlpd */
2923 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2924 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2929 case 0x017: /* movhps */
2930 case 0x117: /* movhpd */
2932 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2933 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2938 case 0x71: /* shift mm, im */
2941 case 0x171: /* shift xmm, im */
2944 val
= ldub_code(s
->pc
++);
2946 gen_op_movl_T0_im(val
);
2947 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
2949 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
2950 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
2952 gen_op_movl_T0_im(val
);
2953 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
2955 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
2956 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
2958 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
2962 rm
= (modrm
& 7) | REX_B(s
);
2963 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2966 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2968 sse_op2(op2_offset
, op1_offset
);
2970 case 0x050: /* movmskps */
2971 rm
= (modrm
& 7) | REX_B(s
);
2972 gen_op_movmskps(offsetof(CPUX86State
,xmm_regs
[rm
]));
2973 gen_op_mov_reg_T0(OT_LONG
, reg
);
2975 case 0x150: /* movmskpd */
2976 rm
= (modrm
& 7) | REX_B(s
);
2977 gen_op_movmskpd(offsetof(CPUX86State
,xmm_regs
[rm
]));
2978 gen_op_mov_reg_T0(OT_LONG
, reg
);
2980 case 0x02a: /* cvtpi2ps */
2981 case 0x12a: /* cvtpi2pd */
2984 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2985 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
2986 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
2989 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2991 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2994 gen_op_cvtpi2ps(op1_offset
, op2_offset
);
2998 gen_op_cvtpi2pd(op1_offset
, op2_offset
);
3002 case 0x22a: /* cvtsi2ss */
3003 case 0x32a: /* cvtsi2sd */
3004 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3005 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3006 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3007 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)](op1_offset
);
3009 case 0x02c: /* cvttps2pi */
3010 case 0x12c: /* cvttpd2pi */
3011 case 0x02d: /* cvtps2pi */
3012 case 0x12d: /* cvtpd2pi */
3015 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3016 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3017 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
3019 rm
= (modrm
& 7) | REX_B(s
);
3020 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3022 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
3025 gen_op_cvttps2pi(op1_offset
, op2_offset
);
3028 gen_op_cvttpd2pi(op1_offset
, op2_offset
);
3031 gen_op_cvtps2pi(op1_offset
, op2_offset
);
3034 gen_op_cvtpd2pi(op1_offset
, op2_offset
);
3038 case 0x22c: /* cvttss2si */
3039 case 0x32c: /* cvttsd2si */
3040 case 0x22d: /* cvtss2si */
3041 case 0x32d: /* cvtsd2si */
3042 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3044 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3046 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_t0
.XMM_Q(0)));
3048 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3049 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3051 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3053 rm
= (modrm
& 7) | REX_B(s
);
3054 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3056 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
3057 (b
& 1) * 4](op2_offset
);
3058 gen_op_mov_reg_T0(ot
, reg
);
3060 case 0xc4: /* pinsrw */
3063 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3064 val
= ldub_code(s
->pc
++);
3067 gen_op_pinsrw_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]), val
);
3070 gen_op_pinsrw_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
), val
);
3073 case 0xc5: /* pextrw */
3077 val
= ldub_code(s
->pc
++);
3080 rm
= (modrm
& 7) | REX_B(s
);
3081 gen_op_pextrw_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]), val
);
3085 gen_op_pextrw_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
), val
);
3087 reg
= ((modrm
>> 3) & 7) | rex_r
;
3088 gen_op_mov_reg_T0(OT_LONG
, reg
);
3090 case 0x1d6: /* movq ea, xmm */
3092 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3093 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3095 rm
= (modrm
& 7) | REX_B(s
);
3096 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3097 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3098 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3101 case 0x2d6: /* movq2dq */
3104 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3105 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3106 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3108 case 0x3d6: /* movdq2q */
3110 rm
= (modrm
& 7) | REX_B(s
);
3111 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
),
3112 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3114 case 0xd7: /* pmovmskb */
3119 rm
= (modrm
& 7) | REX_B(s
);
3120 gen_op_pmovmskb_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]));
3123 gen_op_pmovmskb_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3125 reg
= ((modrm
>> 3) & 7) | rex_r
;
3126 gen_op_mov_reg_T0(OT_LONG
, reg
);
3132 /* generic MMX or SSE operation */
3135 /* maskmov : we must prepare A0 */
3138 #ifdef TARGET_X86_64
3139 if (s
->aflag
== 2) {
3140 gen_op_movq_A0_reg(R_EDI
);
3144 gen_op_movl_A0_reg(R_EDI
);
3146 gen_op_andl_A0_ffff();
3148 gen_add_A0_ds_seg(s
);
3150 case 0x70: /* pshufx insn */
3151 case 0xc6: /* pshufx insn */
3152 case 0xc2: /* compare insns */
3159 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3161 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3162 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3163 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f && b
!= 0x5b) ||
3165 /* specific case for SSE single instructions */
3168 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3169 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3172 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
3175 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
3178 rm
= (modrm
& 7) | REX_B(s
);
3179 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3182 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3184 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3185 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3186 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
3189 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3193 case 0x0f: /* 3DNow! data insns */
3194 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
3196 val
= ldub_code(s
->pc
++);
3197 sse_op2
= sse_op_table5
[val
];
3200 sse_op2(op1_offset
, op2_offset
);
3202 case 0x70: /* pshufx insn */
3203 case 0xc6: /* pshufx insn */
3204 val
= ldub_code(s
->pc
++);
3205 sse_op3
= (GenOpFunc3
*)sse_op2
;
3206 sse_op3(op1_offset
, op2_offset
, val
);
3210 val
= ldub_code(s
->pc
++);
3213 sse_op2
= sse_op_table4
[val
][b1
];
3214 sse_op2(op1_offset
, op2_offset
);
3217 sse_op2(op1_offset
, op2_offset
);
3220 if (b
== 0x2e || b
== 0x2f) {
3221 s
->cc_op
= CC_OP_EFLAGS
;
3227 /* convert one instruction. s->is_jmp is set if the translation must
3228 be stopped. Return the next pc value */
3229 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
3231 int b
, prefixes
, aflag
, dflag
;
3233 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
3234 target_ulong next_eip
, tval
;
3244 #ifdef TARGET_X86_64
3249 s
->rip_offset
= 0; /* for relative ip address */
3251 b
= ldub_code(s
->pc
);
3253 /* check prefixes */
3254 #ifdef TARGET_X86_64
3258 prefixes
|= PREFIX_REPZ
;
3261 prefixes
|= PREFIX_REPNZ
;
3264 prefixes
|= PREFIX_LOCK
;
3285 prefixes
|= PREFIX_DATA
;
3288 prefixes
|= PREFIX_ADR
;
3292 rex_w
= (b
>> 3) & 1;
3293 rex_r
= (b
& 0x4) << 1;
3294 s
->rex_x
= (b
& 0x2) << 2;
3295 REX_B(s
) = (b
& 0x1) << 3;
3296 x86_64_hregs
= 1; /* select uniform byte register addressing */
3300 /* 0x66 is ignored if rex.w is set */
3303 if (prefixes
& PREFIX_DATA
)
3306 if (!(prefixes
& PREFIX_ADR
))
3313 prefixes
|= PREFIX_REPZ
;
3316 prefixes
|= PREFIX_REPNZ
;
3319 prefixes
|= PREFIX_LOCK
;
3340 prefixes
|= PREFIX_DATA
;
3343 prefixes
|= PREFIX_ADR
;
3346 if (prefixes
& PREFIX_DATA
)
3348 if (prefixes
& PREFIX_ADR
)
3352 s
->prefix
= prefixes
;
3356 /* lock generation */
3357 if (prefixes
& PREFIX_LOCK
)
3360 /* now check op code */
3364 /**************************/
3365 /* extended op code */
3366 b
= ldub_code(s
->pc
++) | 0x100;
3369 /**************************/
3387 ot
= dflag
+ OT_WORD
;
3390 case 0: /* OP Ev, Gv */
3391 modrm
= ldub_code(s
->pc
++);
3392 reg
= ((modrm
>> 3) & 7) | rex_r
;
3393 mod
= (modrm
>> 6) & 3;
3394 rm
= (modrm
& 7) | REX_B(s
);
3396 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3398 } else if (op
== OP_XORL
&& rm
== reg
) {
3400 /* xor reg, reg optimisation */
3402 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3403 gen_op_mov_reg_T0(ot
, reg
);
3404 gen_op_update1_cc();
3409 gen_op_mov_TN_reg(ot
, 1, reg
);
3410 gen_op(s
, op
, ot
, opreg
);
3412 case 1: /* OP Gv, Ev */
3413 modrm
= ldub_code(s
->pc
++);
3414 mod
= (modrm
>> 6) & 3;
3415 reg
= ((modrm
>> 3) & 7) | rex_r
;
3416 rm
= (modrm
& 7) | REX_B(s
);
3418 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3419 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3420 } else if (op
== OP_XORL
&& rm
== reg
) {
3423 gen_op_mov_TN_reg(ot
, 1, rm
);
3425 gen_op(s
, op
, ot
, reg
);
3427 case 2: /* OP A, Iv */
3428 val
= insn_get(s
, ot
);
3429 gen_op_movl_T1_im(val
);
3430 gen_op(s
, op
, ot
, OR_EAX
);
3436 case 0x80: /* GRP1 */
3446 ot
= dflag
+ OT_WORD
;
3448 modrm
= ldub_code(s
->pc
++);
3449 mod
= (modrm
>> 6) & 3;
3450 rm
= (modrm
& 7) | REX_B(s
);
3451 op
= (modrm
>> 3) & 7;
3457 s
->rip_offset
= insn_const_size(ot
);
3458 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3469 val
= insn_get(s
, ot
);
3472 val
= (int8_t)insn_get(s
, OT_BYTE
);
3475 gen_op_movl_T1_im(val
);
3476 gen_op(s
, op
, ot
, opreg
);
3480 /**************************/
3481 /* inc, dec, and other misc arith */
3482 case 0x40 ... 0x47: /* inc Gv */
3483 ot
= dflag
? OT_LONG
: OT_WORD
;
3484 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
3486 case 0x48 ... 0x4f: /* dec Gv */
3487 ot
= dflag
? OT_LONG
: OT_WORD
;
3488 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
3490 case 0xf6: /* GRP3 */
3495 ot
= dflag
+ OT_WORD
;
3497 modrm
= ldub_code(s
->pc
++);
3498 mod
= (modrm
>> 6) & 3;
3499 rm
= (modrm
& 7) | REX_B(s
);
3500 op
= (modrm
>> 3) & 7;
3503 s
->rip_offset
= insn_const_size(ot
);
3504 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3505 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3507 gen_op_mov_TN_reg(ot
, 0, rm
);
3512 val
= insn_get(s
, ot
);
3513 gen_op_movl_T1_im(val
);
3514 gen_op_testl_T0_T1_cc();
3515 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3520 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3522 gen_op_mov_reg_T0(ot
, rm
);
3528 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3530 gen_op_mov_reg_T0(ot
, rm
);
3532 gen_op_update_neg_cc();
3533 s
->cc_op
= CC_OP_SUBB
+ ot
;
3538 gen_op_mulb_AL_T0();
3539 s
->cc_op
= CC_OP_MULB
;
3542 gen_op_mulw_AX_T0();
3543 s
->cc_op
= CC_OP_MULW
;
3547 gen_op_mull_EAX_T0();
3548 s
->cc_op
= CC_OP_MULL
;
3550 #ifdef TARGET_X86_64
3552 gen_op_mulq_EAX_T0();
3553 s
->cc_op
= CC_OP_MULQ
;
3561 gen_op_imulb_AL_T0();
3562 s
->cc_op
= CC_OP_MULB
;
3565 gen_op_imulw_AX_T0();
3566 s
->cc_op
= CC_OP_MULW
;
3570 gen_op_imull_EAX_T0();
3571 s
->cc_op
= CC_OP_MULL
;
3573 #ifdef TARGET_X86_64
3575 gen_op_imulq_EAX_T0();
3576 s
->cc_op
= CC_OP_MULQ
;
3584 gen_jmp_im(pc_start
- s
->cs_base
);
3585 gen_op_divb_AL_T0();
3588 gen_jmp_im(pc_start
- s
->cs_base
);
3589 gen_op_divw_AX_T0();
3593 gen_jmp_im(pc_start
- s
->cs_base
);
3595 /* XXX: this is just a test */
3596 tcg_gen_macro_2(cpu_T
[0], cpu_T
[0], MACRO_TEST
);
3598 tcg_gen_helper_0_1(helper_divl_EAX_T0
, cpu_T
[0]);
3601 #ifdef TARGET_X86_64
3603 gen_jmp_im(pc_start
- s
->cs_base
);
3604 gen_op_divq_EAX_T0();
3612 gen_jmp_im(pc_start
- s
->cs_base
);
3613 gen_op_idivb_AL_T0();
3616 gen_jmp_im(pc_start
- s
->cs_base
);
3617 gen_op_idivw_AX_T0();
3621 gen_jmp_im(pc_start
- s
->cs_base
);
3622 tcg_gen_helper_0_1(helper_idivl_EAX_T0
, cpu_T
[0]);
3624 #ifdef TARGET_X86_64
3626 gen_jmp_im(pc_start
- s
->cs_base
);
3627 gen_op_idivq_EAX_T0();
3637 case 0xfe: /* GRP4 */
3638 case 0xff: /* GRP5 */
3642 ot
= dflag
+ OT_WORD
;
3644 modrm
= ldub_code(s
->pc
++);
3645 mod
= (modrm
>> 6) & 3;
3646 rm
= (modrm
& 7) | REX_B(s
);
3647 op
= (modrm
>> 3) & 7;
3648 if (op
>= 2 && b
== 0xfe) {
3652 if (op
== 2 || op
== 4) {
3653 /* operand size for jumps is 64 bit */
3655 } else if (op
== 3 || op
== 5) {
3656 /* for call calls, the operand is 16 or 32 bit, even
3658 ot
= dflag
? OT_LONG
: OT_WORD
;
3659 } else if (op
== 6) {
3660 /* default push size is 64 bit */
3661 ot
= dflag
? OT_QUAD
: OT_WORD
;
3665 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3666 if (op
>= 2 && op
!= 3 && op
!= 5)
3667 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3669 gen_op_mov_TN_reg(ot
, 0, rm
);
3673 case 0: /* inc Ev */
3678 gen_inc(s
, ot
, opreg
, 1);
3680 case 1: /* dec Ev */
3685 gen_inc(s
, ot
, opreg
, -1);
3687 case 2: /* call Ev */
3688 /* XXX: optimize if memory (no 'and' is necessary) */
3690 gen_op_andl_T0_ffff();
3691 next_eip
= s
->pc
- s
->cs_base
;
3692 gen_movtl_T1_im(next_eip
);
3697 case 3: /* lcall Ev */
3698 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3699 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3700 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
3702 if (s
->pe
&& !s
->vm86
) {
3703 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3704 gen_op_set_cc_op(s
->cc_op
);
3705 gen_jmp_im(pc_start
- s
->cs_base
);
3706 gen_op_lcall_protected_T0_T1(dflag
, s
->pc
- pc_start
);
3708 gen_op_lcall_real_T0_T1(dflag
, s
->pc
- s
->cs_base
);
3712 case 4: /* jmp Ev */
3714 gen_op_andl_T0_ffff();
3718 case 5: /* ljmp Ev */
3719 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3720 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3721 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
3723 if (s
->pe
&& !s
->vm86
) {
3724 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3725 gen_op_set_cc_op(s
->cc_op
);
3726 gen_jmp_im(pc_start
- s
->cs_base
);
3727 gen_op_ljmp_protected_T0_T1(s
->pc
- pc_start
);
3729 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
3730 gen_op_movl_T0_T1();
3735 case 6: /* push Ev */
3743 case 0x84: /* test Ev, Gv */
3748 ot
= dflag
+ OT_WORD
;
3750 modrm
= ldub_code(s
->pc
++);
3751 mod
= (modrm
>> 6) & 3;
3752 rm
= (modrm
& 7) | REX_B(s
);
3753 reg
= ((modrm
>> 3) & 7) | rex_r
;
3755 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3756 gen_op_mov_TN_reg(ot
, 1, reg
);
3757 gen_op_testl_T0_T1_cc();
3758 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3761 case 0xa8: /* test eAX, Iv */
3766 ot
= dflag
+ OT_WORD
;
3767 val
= insn_get(s
, ot
);
3769 gen_op_mov_TN_reg(ot
, 0, OR_EAX
);
3770 gen_op_movl_T1_im(val
);
3771 gen_op_testl_T0_T1_cc();
3772 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3775 case 0x98: /* CWDE/CBW */
3776 #ifdef TARGET_X86_64
3778 gen_op_movslq_RAX_EAX();
3782 gen_op_movswl_EAX_AX();
3784 gen_op_movsbw_AX_AL();
3786 case 0x99: /* CDQ/CWD */
3787 #ifdef TARGET_X86_64
3789 gen_op_movsqo_RDX_RAX();
3793 gen_op_movslq_EDX_EAX();
3795 gen_op_movswl_DX_AX();
3797 case 0x1af: /* imul Gv, Ev */
3798 case 0x69: /* imul Gv, Ev, I */
3800 ot
= dflag
+ OT_WORD
;
3801 modrm
= ldub_code(s
->pc
++);
3802 reg
= ((modrm
>> 3) & 7) | rex_r
;
3804 s
->rip_offset
= insn_const_size(ot
);
3807 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3809 val
= insn_get(s
, ot
);
3810 gen_op_movl_T1_im(val
);
3811 } else if (b
== 0x6b) {
3812 val
= (int8_t)insn_get(s
, OT_BYTE
);
3813 gen_op_movl_T1_im(val
);
3815 gen_op_mov_TN_reg(ot
, 1, reg
);
3818 #ifdef TARGET_X86_64
3819 if (ot
== OT_QUAD
) {
3820 gen_op_imulq_T0_T1();
3823 if (ot
== OT_LONG
) {
3824 gen_op_imull_T0_T1();
3826 gen_op_imulw_T0_T1();
3828 gen_op_mov_reg_T0(ot
, reg
);
3829 s
->cc_op
= CC_OP_MULB
+ ot
;
3832 case 0x1c1: /* xadd Ev, Gv */
3836 ot
= dflag
+ OT_WORD
;
3837 modrm
= ldub_code(s
->pc
++);
3838 reg
= ((modrm
>> 3) & 7) | rex_r
;
3839 mod
= (modrm
>> 6) & 3;
3841 rm
= (modrm
& 7) | REX_B(s
);
3842 gen_op_mov_TN_reg(ot
, 0, reg
);
3843 gen_op_mov_TN_reg(ot
, 1, rm
);
3844 gen_op_addl_T0_T1();
3845 gen_op_mov_reg_T1(ot
, reg
);
3846 gen_op_mov_reg_T0(ot
, rm
);
3848 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3849 gen_op_mov_TN_reg(ot
, 0, reg
);
3850 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3851 gen_op_addl_T0_T1();
3852 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3853 gen_op_mov_reg_T1(ot
, reg
);
3855 gen_op_update2_cc();
3856 s
->cc_op
= CC_OP_ADDB
+ ot
;
3859 case 0x1b1: /* cmpxchg Ev, Gv */
3863 ot
= dflag
+ OT_WORD
;
3864 modrm
= ldub_code(s
->pc
++);
3865 reg
= ((modrm
>> 3) & 7) | rex_r
;
3866 mod
= (modrm
>> 6) & 3;
3867 gen_op_mov_TN_reg(ot
, 1, reg
);
3869 rm
= (modrm
& 7) | REX_B(s
);
3870 gen_op_mov_TN_reg(ot
, 0, rm
);
3871 gen_op_cmpxchg_T0_T1_EAX_cc
[ot
]();
3872 gen_op_mov_reg_T0(ot
, rm
);
3874 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3875 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3876 gen_op_cmpxchg_mem_T0_T1_EAX_cc
[ot
+ s
->mem_index
]();
3878 s
->cc_op
= CC_OP_SUBB
+ ot
;
3880 case 0x1c7: /* cmpxchg8b */
3881 modrm
= ldub_code(s
->pc
++);
3882 mod
= (modrm
>> 6) & 3;
3883 if ((mod
== 3) || ((modrm
& 0x38) != 0x8))
3885 gen_jmp_im(pc_start
- s
->cs_base
);
3886 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3887 gen_op_set_cc_op(s
->cc_op
);
3888 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3890 s
->cc_op
= CC_OP_EFLAGS
;
3893 /**************************/
3895 case 0x50 ... 0x57: /* push */
3896 gen_op_mov_TN_reg(OT_LONG
, 0, (b
& 7) | REX_B(s
));
3899 case 0x58 ... 0x5f: /* pop */
3901 ot
= dflag
? OT_QUAD
: OT_WORD
;
3903 ot
= dflag
+ OT_WORD
;
3906 /* NOTE: order is important for pop %sp */
3908 gen_op_mov_reg_T0(ot
, (b
& 7) | REX_B(s
));
3910 case 0x60: /* pusha */
3915 case 0x61: /* popa */
3920 case 0x68: /* push Iv */
3923 ot
= dflag
? OT_QUAD
: OT_WORD
;
3925 ot
= dflag
+ OT_WORD
;
3928 val
= insn_get(s
, ot
);
3930 val
= (int8_t)insn_get(s
, OT_BYTE
);
3931 gen_op_movl_T0_im(val
);
3934 case 0x8f: /* pop Ev */
3936 ot
= dflag
? OT_QUAD
: OT_WORD
;
3938 ot
= dflag
+ OT_WORD
;
3940 modrm
= ldub_code(s
->pc
++);
3941 mod
= (modrm
>> 6) & 3;
3944 /* NOTE: order is important for pop %sp */
3946 rm
= (modrm
& 7) | REX_B(s
);
3947 gen_op_mov_reg_T0(ot
, rm
);
3949 /* NOTE: order is important too for MMU exceptions */
3950 s
->popl_esp_hack
= 1 << ot
;
3951 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3952 s
->popl_esp_hack
= 0;
3956 case 0xc8: /* enter */
3959 val
= lduw_code(s
->pc
);
3961 level
= ldub_code(s
->pc
++);
3962 gen_enter(s
, val
, level
);
3965 case 0xc9: /* leave */
3966 /* XXX: exception not precise (ESP is updated before potential exception) */
3968 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EBP
);
3969 gen_op_mov_reg_T0(OT_QUAD
, R_ESP
);
3970 } else if (s
->ss32
) {
3971 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
3972 gen_op_mov_reg_T0(OT_LONG
, R_ESP
);
3974 gen_op_mov_TN_reg(OT_WORD
, 0, R_EBP
);
3975 gen_op_mov_reg_T0(OT_WORD
, R_ESP
);
3979 ot
= dflag
? OT_QUAD
: OT_WORD
;
3981 ot
= dflag
+ OT_WORD
;
3983 gen_op_mov_reg_T0(ot
, R_EBP
);
3986 case 0x06: /* push es */
3987 case 0x0e: /* push cs */
3988 case 0x16: /* push ss */
3989 case 0x1e: /* push ds */
3992 gen_op_movl_T0_seg(b
>> 3);
3995 case 0x1a0: /* push fs */
3996 case 0x1a8: /* push gs */
3997 gen_op_movl_T0_seg((b
>> 3) & 7);
4000 case 0x07: /* pop es */
4001 case 0x17: /* pop ss */
4002 case 0x1f: /* pop ds */
4007 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4010 /* if reg == SS, inhibit interrupts/trace. */
4011 /* If several instructions disable interrupts, only the
4013 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4014 gen_op_set_inhibit_irq();
4018 gen_jmp_im(s
->pc
- s
->cs_base
);
4022 case 0x1a1: /* pop fs */
4023 case 0x1a9: /* pop gs */
4025 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
4028 gen_jmp_im(s
->pc
- s
->cs_base
);
4033 /**************************/
4036 case 0x89: /* mov Gv, Ev */
4040 ot
= dflag
+ OT_WORD
;
4041 modrm
= ldub_code(s
->pc
++);
4042 reg
= ((modrm
>> 3) & 7) | rex_r
;
4044 /* generate a generic store */
4045 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
4048 case 0xc7: /* mov Ev, Iv */
4052 ot
= dflag
+ OT_WORD
;
4053 modrm
= ldub_code(s
->pc
++);
4054 mod
= (modrm
>> 6) & 3;
4056 s
->rip_offset
= insn_const_size(ot
);
4057 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4059 val
= insn_get(s
, ot
);
4060 gen_op_movl_T0_im(val
);
4062 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4064 gen_op_mov_reg_T0(ot
, (modrm
& 7) | REX_B(s
));
4067 case 0x8b: /* mov Ev, Gv */
4071 ot
= OT_WORD
+ dflag
;
4072 modrm
= ldub_code(s
->pc
++);
4073 reg
= ((modrm
>> 3) & 7) | rex_r
;
4075 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4076 gen_op_mov_reg_T0(ot
, reg
);
4078 case 0x8e: /* mov seg, Gv */
4079 modrm
= ldub_code(s
->pc
++);
4080 reg
= (modrm
>> 3) & 7;
4081 if (reg
>= 6 || reg
== R_CS
)
4083 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
4084 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4086 /* if reg == SS, inhibit interrupts/trace */
4087 /* If several instructions disable interrupts, only the
4089 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4090 gen_op_set_inhibit_irq();
4094 gen_jmp_im(s
->pc
- s
->cs_base
);
4098 case 0x8c: /* mov Gv, seg */
4099 modrm
= ldub_code(s
->pc
++);
4100 reg
= (modrm
>> 3) & 7;
4101 mod
= (modrm
>> 6) & 3;
4104 gen_op_movl_T0_seg(reg
);
4106 ot
= OT_WORD
+ dflag
;
4109 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4112 case 0x1b6: /* movzbS Gv, Eb */
4113 case 0x1b7: /* movzwS Gv, Eb */
4114 case 0x1be: /* movsbS Gv, Eb */
4115 case 0x1bf: /* movswS Gv, Eb */
4118 /* d_ot is the size of destination */
4119 d_ot
= dflag
+ OT_WORD
;
4120 /* ot is the size of source */
4121 ot
= (b
& 1) + OT_BYTE
;
4122 modrm
= ldub_code(s
->pc
++);
4123 reg
= ((modrm
>> 3) & 7) | rex_r
;
4124 mod
= (modrm
>> 6) & 3;
4125 rm
= (modrm
& 7) | REX_B(s
);
4128 gen_op_mov_TN_reg(ot
, 0, rm
);
4129 switch(ot
| (b
& 8)) {
4131 gen_op_movzbl_T0_T0();
4134 gen_op_movsbl_T0_T0();
4137 gen_op_movzwl_T0_T0();
4141 gen_op_movswl_T0_T0();
4144 gen_op_mov_reg_T0(d_ot
, reg
);
4146 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4148 gen_op_lds_T0_A0(ot
+ s
->mem_index
);
4150 gen_op_ldu_T0_A0(ot
+ s
->mem_index
);
4152 gen_op_mov_reg_T0(d_ot
, reg
);
4157 case 0x8d: /* lea */
4158 ot
= dflag
+ OT_WORD
;
4159 modrm
= ldub_code(s
->pc
++);
4160 mod
= (modrm
>> 6) & 3;
4163 reg
= ((modrm
>> 3) & 7) | rex_r
;
4164 /* we must ensure that no segment is added */
4168 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4170 gen_op_mov_reg_A0(ot
- OT_WORD
, reg
);
4173 case 0xa0: /* mov EAX, Ov */
4175 case 0xa2: /* mov Ov, EAX */
4178 target_ulong offset_addr
;
4183 ot
= dflag
+ OT_WORD
;
4184 #ifdef TARGET_X86_64
4185 if (s
->aflag
== 2) {
4186 offset_addr
= ldq_code(s
->pc
);
4188 gen_op_movq_A0_im(offset_addr
);
4193 offset_addr
= insn_get(s
, OT_LONG
);
4195 offset_addr
= insn_get(s
, OT_WORD
);
4197 gen_op_movl_A0_im(offset_addr
);
4199 gen_add_A0_ds_seg(s
);
4201 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4202 gen_op_mov_reg_T0(ot
, R_EAX
);
4204 gen_op_mov_TN_reg(ot
, 0, R_EAX
);
4205 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4209 case 0xd7: /* xlat */
4210 #ifdef TARGET_X86_64
4211 if (s
->aflag
== 2) {
4212 gen_op_movq_A0_reg(R_EBX
);
4213 gen_op_addq_A0_AL();
4217 gen_op_movl_A0_reg(R_EBX
);
4218 gen_op_addl_A0_AL();
4220 gen_op_andl_A0_ffff();
4222 gen_add_A0_ds_seg(s
);
4223 gen_op_ldu_T0_A0(OT_BYTE
+ s
->mem_index
);
4224 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
4226 case 0xb0 ... 0xb7: /* mov R, Ib */
4227 val
= insn_get(s
, OT_BYTE
);
4228 gen_op_movl_T0_im(val
);
4229 gen_op_mov_reg_T0(OT_BYTE
, (b
& 7) | REX_B(s
));
4231 case 0xb8 ... 0xbf: /* mov R, Iv */
4232 #ifdef TARGET_X86_64
4236 tmp
= ldq_code(s
->pc
);
4238 reg
= (b
& 7) | REX_B(s
);
4239 gen_movtl_T0_im(tmp
);
4240 gen_op_mov_reg_T0(OT_QUAD
, reg
);
4244 ot
= dflag
? OT_LONG
: OT_WORD
;
4245 val
= insn_get(s
, ot
);
4246 reg
= (b
& 7) | REX_B(s
);
4247 gen_op_movl_T0_im(val
);
4248 gen_op_mov_reg_T0(ot
, reg
);
4252 case 0x91 ... 0x97: /* xchg R, EAX */
4253 ot
= dflag
+ OT_WORD
;
4254 reg
= (b
& 7) | REX_B(s
);
4258 case 0x87: /* xchg Ev, Gv */
4262 ot
= dflag
+ OT_WORD
;
4263 modrm
= ldub_code(s
->pc
++);
4264 reg
= ((modrm
>> 3) & 7) | rex_r
;
4265 mod
= (modrm
>> 6) & 3;
4267 rm
= (modrm
& 7) | REX_B(s
);
4269 gen_op_mov_TN_reg(ot
, 0, reg
);
4270 gen_op_mov_TN_reg(ot
, 1, rm
);
4271 gen_op_mov_reg_T0(ot
, rm
);
4272 gen_op_mov_reg_T1(ot
, reg
);
4274 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4275 gen_op_mov_TN_reg(ot
, 0, reg
);
4276 /* for xchg, lock is implicit */
4277 if (!(prefixes
& PREFIX_LOCK
))
4279 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4280 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4281 if (!(prefixes
& PREFIX_LOCK
))
4283 gen_op_mov_reg_T1(ot
, reg
);
4286 case 0xc4: /* les Gv */
4291 case 0xc5: /* lds Gv */
4296 case 0x1b2: /* lss Gv */
4299 case 0x1b4: /* lfs Gv */
4302 case 0x1b5: /* lgs Gv */
4305 ot
= dflag
? OT_LONG
: OT_WORD
;
4306 modrm
= ldub_code(s
->pc
++);
4307 reg
= ((modrm
>> 3) & 7) | rex_r
;
4308 mod
= (modrm
>> 6) & 3;
4311 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4312 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4313 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4314 /* load the segment first to handle exceptions properly */
4315 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4316 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
4317 /* then put the data */
4318 gen_op_mov_reg_T1(ot
, reg
);
4320 gen_jmp_im(s
->pc
- s
->cs_base
);
4325 /************************/
4336 ot
= dflag
+ OT_WORD
;
4338 modrm
= ldub_code(s
->pc
++);
4339 mod
= (modrm
>> 6) & 3;
4340 op
= (modrm
>> 3) & 7;
4346 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4349 opreg
= (modrm
& 7) | REX_B(s
);
4354 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
4357 shift
= ldub_code(s
->pc
++);
4359 gen_shifti(s
, op
, ot
, opreg
, shift
);
4374 case 0x1a4: /* shld imm */
4378 case 0x1a5: /* shld cl */
4382 case 0x1ac: /* shrd imm */
4386 case 0x1ad: /* shrd cl */
4390 ot
= dflag
+ OT_WORD
;
4391 modrm
= ldub_code(s
->pc
++);
4392 mod
= (modrm
>> 6) & 3;
4393 rm
= (modrm
& 7) | REX_B(s
);
4394 reg
= ((modrm
>> 3) & 7) | rex_r
;
4397 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4398 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4400 gen_op_mov_TN_reg(ot
, 0, rm
);
4402 gen_op_mov_TN_reg(ot
, 1, reg
);
4405 val
= ldub_code(s
->pc
++);
4412 gen_op_shiftd_T0_T1_im_cc
[ot
][op
](val
);
4414 gen_op_shiftd_mem_T0_T1_im_cc
[ot
+ s
->mem_index
][op
](val
);
4415 if (op
== 0 && ot
!= OT_WORD
)
4416 s
->cc_op
= CC_OP_SHLB
+ ot
;
4418 s
->cc_op
= CC_OP_SARB
+ ot
;
4421 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4422 gen_op_set_cc_op(s
->cc_op
);
4424 gen_op_shiftd_T0_T1_ECX_cc
[ot
][op
]();
4426 gen_op_shiftd_mem_T0_T1_ECX_cc
[ot
+ s
->mem_index
][op
]();
4427 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
4430 gen_op_mov_reg_T0(ot
, rm
);
4434 /************************/
4437 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
4438 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4439 /* XXX: what to do if illegal op ? */
4440 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
4443 modrm
= ldub_code(s
->pc
++);
4444 mod
= (modrm
>> 6) & 3;
4446 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
4449 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4451 case 0x00 ... 0x07: /* fxxxs */
4452 case 0x10 ... 0x17: /* fixxxl */
4453 case 0x20 ... 0x27: /* fxxxl */
4454 case 0x30 ... 0x37: /* fixxx */
4461 gen_op_flds_FT0_A0();
4464 gen_op_fildl_FT0_A0();
4467 gen_op_fldl_FT0_A0();
4471 gen_op_fild_FT0_A0();
4475 gen_op_fp_arith_ST0_FT0
[op1
]();
4477 /* fcomp needs pop */
4482 case 0x08: /* flds */
4483 case 0x0a: /* fsts */
4484 case 0x0b: /* fstps */
4485 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4486 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4487 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4492 gen_op_flds_ST0_A0();
4495 gen_op_fildl_ST0_A0();
4498 gen_op_fldl_ST0_A0();
4502 gen_op_fild_ST0_A0();
4509 gen_op_fisttl_ST0_A0();
4512 gen_op_fisttll_ST0_A0();
4516 gen_op_fistt_ST0_A0();
4523 gen_op_fsts_ST0_A0();
4526 gen_op_fistl_ST0_A0();
4529 gen_op_fstl_ST0_A0();
4533 gen_op_fist_ST0_A0();
4541 case 0x0c: /* fldenv mem */
4542 gen_op_fldenv_A0(s
->dflag
);
4544 case 0x0d: /* fldcw mem */
4547 case 0x0e: /* fnstenv mem */
4548 gen_op_fnstenv_A0(s
->dflag
);
4550 case 0x0f: /* fnstcw mem */
4553 case 0x1d: /* fldt mem */
4554 gen_op_fldt_ST0_A0();
4556 case 0x1f: /* fstpt mem */
4557 gen_op_fstt_ST0_A0();
4560 case 0x2c: /* frstor mem */
4561 gen_op_frstor_A0(s
->dflag
);
4563 case 0x2e: /* fnsave mem */
4564 gen_op_fnsave_A0(s
->dflag
);
4566 case 0x2f: /* fnstsw mem */
4569 case 0x3c: /* fbld */
4570 gen_op_fbld_ST0_A0();
4572 case 0x3e: /* fbstp */
4573 gen_op_fbst_ST0_A0();
4576 case 0x3d: /* fildll */
4577 gen_op_fildll_ST0_A0();
4579 case 0x3f: /* fistpll */
4580 gen_op_fistll_ST0_A0();
4587 /* register float ops */
4591 case 0x08: /* fld sti */
4593 gen_op_fmov_ST0_STN((opreg
+ 1) & 7);
4595 case 0x09: /* fxchg sti */
4596 case 0x29: /* fxchg4 sti, undocumented op */
4597 case 0x39: /* fxchg7 sti, undocumented op */
4598 gen_op_fxchg_ST0_STN(opreg
);
4600 case 0x0a: /* grp d9/2 */
4603 /* check exceptions (FreeBSD FPU probe) */
4604 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4605 gen_op_set_cc_op(s
->cc_op
);
4606 gen_jmp_im(pc_start
- s
->cs_base
);
4613 case 0x0c: /* grp d9/4 */
4623 gen_op_fcom_ST0_FT0();
4632 case 0x0d: /* grp d9/5 */
4641 gen_op_fldl2t_ST0();
4645 gen_op_fldl2e_ST0();
4653 gen_op_fldlg2_ST0();
4657 gen_op_fldln2_ST0();
4668 case 0x0e: /* grp d9/6 */
4679 case 3: /* fpatan */
4682 case 4: /* fxtract */
4685 case 5: /* fprem1 */
4688 case 6: /* fdecstp */
4692 case 7: /* fincstp */
4697 case 0x0f: /* grp d9/7 */
4702 case 1: /* fyl2xp1 */
4708 case 3: /* fsincos */
4711 case 5: /* fscale */
4714 case 4: /* frndint */
4726 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4727 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4728 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4734 gen_op_fp_arith_STN_ST0
[op1
](opreg
);
4738 gen_op_fmov_FT0_STN(opreg
);
4739 gen_op_fp_arith_ST0_FT0
[op1
]();
4743 case 0x02: /* fcom */
4744 case 0x22: /* fcom2, undocumented op */
4745 gen_op_fmov_FT0_STN(opreg
);
4746 gen_op_fcom_ST0_FT0();
4748 case 0x03: /* fcomp */
4749 case 0x23: /* fcomp3, undocumented op */
4750 case 0x32: /* fcomp5, undocumented op */
4751 gen_op_fmov_FT0_STN(opreg
);
4752 gen_op_fcom_ST0_FT0();
4755 case 0x15: /* da/5 */
4757 case 1: /* fucompp */
4758 gen_op_fmov_FT0_STN(1);
4759 gen_op_fucom_ST0_FT0();
4769 case 0: /* feni (287 only, just do nop here) */
4771 case 1: /* fdisi (287 only, just do nop here) */
4776 case 3: /* fninit */
4779 case 4: /* fsetpm (287 only, just do nop here) */
4785 case 0x1d: /* fucomi */
4786 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4787 gen_op_set_cc_op(s
->cc_op
);
4788 gen_op_fmov_FT0_STN(opreg
);
4789 gen_op_fucomi_ST0_FT0();
4790 s
->cc_op
= CC_OP_EFLAGS
;
4792 case 0x1e: /* fcomi */
4793 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4794 gen_op_set_cc_op(s
->cc_op
);
4795 gen_op_fmov_FT0_STN(opreg
);
4796 gen_op_fcomi_ST0_FT0();
4797 s
->cc_op
= CC_OP_EFLAGS
;
4799 case 0x28: /* ffree sti */
4800 gen_op_ffree_STN(opreg
);
4802 case 0x2a: /* fst sti */
4803 gen_op_fmov_STN_ST0(opreg
);
4805 case 0x2b: /* fstp sti */
4806 case 0x0b: /* fstp1 sti, undocumented op */
4807 case 0x3a: /* fstp8 sti, undocumented op */
4808 case 0x3b: /* fstp9 sti, undocumented op */
4809 gen_op_fmov_STN_ST0(opreg
);
4812 case 0x2c: /* fucom st(i) */
4813 gen_op_fmov_FT0_STN(opreg
);
4814 gen_op_fucom_ST0_FT0();
4816 case 0x2d: /* fucomp st(i) */
4817 gen_op_fmov_FT0_STN(opreg
);
4818 gen_op_fucom_ST0_FT0();
4821 case 0x33: /* de/3 */
4823 case 1: /* fcompp */
4824 gen_op_fmov_FT0_STN(1);
4825 gen_op_fcom_ST0_FT0();
4833 case 0x38: /* ffreep sti, undocumented op */
4834 gen_op_ffree_STN(opreg
);
4837 case 0x3c: /* df/4 */
4840 gen_op_fnstsw_EAX();
4846 case 0x3d: /* fucomip */
4847 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4848 gen_op_set_cc_op(s
->cc_op
);
4849 gen_op_fmov_FT0_STN(opreg
);
4850 gen_op_fucomi_ST0_FT0();
4852 s
->cc_op
= CC_OP_EFLAGS
;
4854 case 0x3e: /* fcomip */
4855 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4856 gen_op_set_cc_op(s
->cc_op
);
4857 gen_op_fmov_FT0_STN(opreg
);
4858 gen_op_fcomi_ST0_FT0();
4860 s
->cc_op
= CC_OP_EFLAGS
;
4862 case 0x10 ... 0x13: /* fcmovxx */
4866 const static uint8_t fcmov_cc
[8] = {
4872 op1
= fcmov_cc
[op
& 3] | ((op
>> 3) & 1);
4874 gen_op_fcmov_ST0_STN_T0(opreg
);
4882 /************************/
4885 case 0xa4: /* movsS */
4890 ot
= dflag
+ OT_WORD
;
4892 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4893 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4899 case 0xaa: /* stosS */
4904 ot
= dflag
+ OT_WORD
;
4906 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4907 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4912 case 0xac: /* lodsS */
4917 ot
= dflag
+ OT_WORD
;
4918 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4919 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4924 case 0xae: /* scasS */
4929 ot
= dflag
+ OT_WORD
;
4930 if (prefixes
& PREFIX_REPNZ
) {
4931 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4932 } else if (prefixes
& PREFIX_REPZ
) {
4933 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4936 s
->cc_op
= CC_OP_SUBB
+ ot
;
4940 case 0xa6: /* cmpsS */
4945 ot
= dflag
+ OT_WORD
;
4946 if (prefixes
& PREFIX_REPNZ
) {
4947 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4948 } else if (prefixes
& PREFIX_REPZ
) {
4949 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4952 s
->cc_op
= CC_OP_SUBB
+ ot
;
4955 case 0x6c: /* insS */
4960 ot
= dflag
? OT_LONG
: OT_WORD
;
4961 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4962 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
4963 gen_op_andl_T0_ffff();
4964 if (gen_svm_check_io(s
, pc_start
,
4965 SVM_IOIO_TYPE_MASK
| (1 << (4+ot
)) |
4966 svm_is_rep(prefixes
) | 4 | (1 << (7+s
->aflag
))))
4968 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4969 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4974 case 0x6e: /* outsS */
4979 ot
= dflag
? OT_LONG
: OT_WORD
;
4980 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4981 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
4982 gen_op_andl_T0_ffff();
4983 if (gen_svm_check_io(s
, pc_start
,
4984 (1 << (4+ot
)) | svm_is_rep(prefixes
) |
4985 4 | (1 << (7+s
->aflag
))))
4987 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4988 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4994 /************************/
5002 ot
= dflag
? OT_LONG
: OT_WORD
;
5003 val
= ldub_code(s
->pc
++);
5004 gen_op_movl_T0_im(val
);
5005 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5006 if (gen_svm_check_io(s
, pc_start
,
5007 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) |
5011 gen_op_mov_reg_T1(ot
, R_EAX
);
5018 ot
= dflag
? OT_LONG
: OT_WORD
;
5019 val
= ldub_code(s
->pc
++);
5020 gen_op_movl_T0_im(val
);
5021 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5022 if (gen_svm_check_io(s
, pc_start
, svm_is_rep(prefixes
) |
5025 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5033 ot
= dflag
? OT_LONG
: OT_WORD
;
5034 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5035 gen_op_andl_T0_ffff();
5036 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5037 if (gen_svm_check_io(s
, pc_start
,
5038 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) |
5042 gen_op_mov_reg_T1(ot
, R_EAX
);
5049 ot
= dflag
? OT_LONG
: OT_WORD
;
5050 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5051 gen_op_andl_T0_ffff();
5052 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5053 if (gen_svm_check_io(s
, pc_start
,
5054 svm_is_rep(prefixes
) | (1 << (4+ot
))))
5056 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5060 /************************/
5062 case 0xc2: /* ret im */
5063 val
= ldsw_code(s
->pc
);
5066 if (CODE64(s
) && s
->dflag
)
5068 gen_stack_update(s
, val
+ (2 << s
->dflag
));
5070 gen_op_andl_T0_ffff();
5074 case 0xc3: /* ret */
5078 gen_op_andl_T0_ffff();
5082 case 0xca: /* lret im */
5083 val
= ldsw_code(s
->pc
);
5086 if (s
->pe
&& !s
->vm86
) {
5087 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5088 gen_op_set_cc_op(s
->cc_op
);
5089 gen_jmp_im(pc_start
- s
->cs_base
);
5090 gen_op_lret_protected(s
->dflag
, val
);
5094 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5096 gen_op_andl_T0_ffff();
5097 /* NOTE: keeping EIP updated is not a problem in case of
5101 gen_op_addl_A0_im(2 << s
->dflag
);
5102 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5103 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
5104 /* add stack offset */
5105 gen_stack_update(s
, val
+ (4 << s
->dflag
));
5109 case 0xcb: /* lret */
5112 case 0xcf: /* iret */
5113 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IRET
))
5117 gen_op_iret_real(s
->dflag
);
5118 s
->cc_op
= CC_OP_EFLAGS
;
5119 } else if (s
->vm86
) {
5121 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5123 gen_op_iret_real(s
->dflag
);
5124 s
->cc_op
= CC_OP_EFLAGS
;
5127 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5128 gen_op_set_cc_op(s
->cc_op
);
5129 gen_jmp_im(pc_start
- s
->cs_base
);
5130 gen_op_iret_protected(s
->dflag
, s
->pc
- s
->cs_base
);
5131 s
->cc_op
= CC_OP_EFLAGS
;
5135 case 0xe8: /* call im */
5138 tval
= (int32_t)insn_get(s
, OT_LONG
);
5140 tval
= (int16_t)insn_get(s
, OT_WORD
);
5141 next_eip
= s
->pc
- s
->cs_base
;
5145 gen_movtl_T0_im(next_eip
);
5150 case 0x9a: /* lcall im */
5152 unsigned int selector
, offset
;
5156 ot
= dflag
? OT_LONG
: OT_WORD
;
5157 offset
= insn_get(s
, ot
);
5158 selector
= insn_get(s
, OT_WORD
);
5160 gen_op_movl_T0_im(selector
);
5161 gen_op_movl_T1_imu(offset
);
5164 case 0xe9: /* jmp im */
5166 tval
= (int32_t)insn_get(s
, OT_LONG
);
5168 tval
= (int16_t)insn_get(s
, OT_WORD
);
5169 tval
+= s
->pc
- s
->cs_base
;
5174 case 0xea: /* ljmp im */
5176 unsigned int selector
, offset
;
5180 ot
= dflag
? OT_LONG
: OT_WORD
;
5181 offset
= insn_get(s
, ot
);
5182 selector
= insn_get(s
, OT_WORD
);
5184 gen_op_movl_T0_im(selector
);
5185 gen_op_movl_T1_imu(offset
);
5188 case 0xeb: /* jmp Jb */
5189 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5190 tval
+= s
->pc
- s
->cs_base
;
5195 case 0x70 ... 0x7f: /* jcc Jb */
5196 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5198 case 0x180 ... 0x18f: /* jcc Jv */
5200 tval
= (int32_t)insn_get(s
, OT_LONG
);
5202 tval
= (int16_t)insn_get(s
, OT_WORD
);
5205 next_eip
= s
->pc
- s
->cs_base
;
5209 gen_jcc(s
, b
, tval
, next_eip
);
5212 case 0x190 ... 0x19f: /* setcc Gv */
5213 modrm
= ldub_code(s
->pc
++);
5215 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
5217 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5218 ot
= dflag
+ OT_WORD
;
5219 modrm
= ldub_code(s
->pc
++);
5220 reg
= ((modrm
>> 3) & 7) | rex_r
;
5221 mod
= (modrm
>> 6) & 3;
5224 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5225 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
5227 rm
= (modrm
& 7) | REX_B(s
);
5228 gen_op_mov_TN_reg(ot
, 1, rm
);
5230 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
5233 /************************/
5235 case 0x9c: /* pushf */
5236 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PUSHF
))
5238 if (s
->vm86
&& s
->iopl
!= 3) {
5239 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5241 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5242 gen_op_set_cc_op(s
->cc_op
);
5243 gen_op_movl_T0_eflags();
5247 case 0x9d: /* popf */
5248 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_POPF
))
5250 if (s
->vm86
&& s
->iopl
!= 3) {
5251 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5256 gen_op_movl_eflags_T0_cpl0();
5258 gen_op_movw_eflags_T0_cpl0();
5261 if (s
->cpl
<= s
->iopl
) {
5263 gen_op_movl_eflags_T0_io();
5265 gen_op_movw_eflags_T0_io();
5269 gen_op_movl_eflags_T0();
5271 gen_op_movw_eflags_T0();
5276 s
->cc_op
= CC_OP_EFLAGS
;
5277 /* abort translation because TF flag may change */
5278 gen_jmp_im(s
->pc
- s
->cs_base
);
5282 case 0x9e: /* sahf */
5285 gen_op_mov_TN_reg(OT_BYTE
, 0, R_AH
);
5286 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5287 gen_op_set_cc_op(s
->cc_op
);
5288 gen_op_movb_eflags_T0();
5289 s
->cc_op
= CC_OP_EFLAGS
;
5291 case 0x9f: /* lahf */
5294 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5295 gen_op_set_cc_op(s
->cc_op
);
5296 gen_op_movl_T0_eflags();
5297 gen_op_mov_reg_T0(OT_BYTE
, R_AH
);
5299 case 0xf5: /* cmc */
5300 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5301 gen_op_set_cc_op(s
->cc_op
);
5303 s
->cc_op
= CC_OP_EFLAGS
;
5305 case 0xf8: /* clc */
5306 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5307 gen_op_set_cc_op(s
->cc_op
);
5309 s
->cc_op
= CC_OP_EFLAGS
;
5311 case 0xf9: /* stc */
5312 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5313 gen_op_set_cc_op(s
->cc_op
);
5315 s
->cc_op
= CC_OP_EFLAGS
;
5317 case 0xfc: /* cld */
5320 case 0xfd: /* std */
5324 /************************/
5325 /* bit operations */
5326 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5327 ot
= dflag
+ OT_WORD
;
5328 modrm
= ldub_code(s
->pc
++);
5329 op
= (modrm
>> 3) & 7;
5330 mod
= (modrm
>> 6) & 3;
5331 rm
= (modrm
& 7) | REX_B(s
);
5334 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5335 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5337 gen_op_mov_TN_reg(ot
, 0, rm
);
5340 val
= ldub_code(s
->pc
++);
5341 gen_op_movl_T1_im(val
);
5345 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5346 s
->cc_op
= CC_OP_SARB
+ ot
;
5349 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5351 gen_op_mov_reg_T0(ot
, rm
);
5352 gen_op_update_bt_cc();
5355 case 0x1a3: /* bt Gv, Ev */
5358 case 0x1ab: /* bts */
5361 case 0x1b3: /* btr */
5364 case 0x1bb: /* btc */
5367 ot
= dflag
+ OT_WORD
;
5368 modrm
= ldub_code(s
->pc
++);
5369 reg
= ((modrm
>> 3) & 7) | rex_r
;
5370 mod
= (modrm
>> 6) & 3;
5371 rm
= (modrm
& 7) | REX_B(s
);
5372 gen_op_mov_TN_reg(OT_LONG
, 1, reg
);
5374 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5375 /* specific case: we need to add a displacement */
5376 gen_op_add_bit_A0_T1
[ot
- OT_WORD
]();
5377 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5379 gen_op_mov_TN_reg(ot
, 0, rm
);
5381 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5382 s
->cc_op
= CC_OP_SARB
+ ot
;
5385 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5387 gen_op_mov_reg_T0(ot
, rm
);
5388 gen_op_update_bt_cc();
5391 case 0x1bc: /* bsf */
5392 case 0x1bd: /* bsr */
5393 ot
= dflag
+ OT_WORD
;
5394 modrm
= ldub_code(s
->pc
++);
5395 reg
= ((modrm
>> 3) & 7) | rex_r
;
5396 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5397 /* NOTE: in order to handle the 0 case, we must load the
5398 result. It could be optimized with a generated jump */
5399 gen_op_mov_TN_reg(ot
, 1, reg
);
5400 gen_op_bsx_T0_cc
[ot
- OT_WORD
][b
& 1]();
5401 gen_op_mov_reg_T1(ot
, reg
);
5402 s
->cc_op
= CC_OP_LOGICB
+ ot
;
5404 /************************/
5406 case 0x27: /* daa */
5409 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5410 gen_op_set_cc_op(s
->cc_op
);
5412 s
->cc_op
= CC_OP_EFLAGS
;
5414 case 0x2f: /* das */
5417 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5418 gen_op_set_cc_op(s
->cc_op
);
5420 s
->cc_op
= CC_OP_EFLAGS
;
5422 case 0x37: /* aaa */
5425 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5426 gen_op_set_cc_op(s
->cc_op
);
5428 s
->cc_op
= CC_OP_EFLAGS
;
5430 case 0x3f: /* aas */
5433 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5434 gen_op_set_cc_op(s
->cc_op
);
5436 s
->cc_op
= CC_OP_EFLAGS
;
5438 case 0xd4: /* aam */
5441 val
= ldub_code(s
->pc
++);
5443 gen_exception(s
, EXCP00_DIVZ
, pc_start
- s
->cs_base
);
5446 s
->cc_op
= CC_OP_LOGICB
;
5449 case 0xd5: /* aad */
5452 val
= ldub_code(s
->pc
++);
5454 s
->cc_op
= CC_OP_LOGICB
;
5456 /************************/
5458 case 0x90: /* nop */
5459 /* XXX: xchg + rex handling */
5460 /* XXX: correct lock test for all insn */
5461 if (prefixes
& PREFIX_LOCK
)
5463 if (prefixes
& PREFIX_REPZ
) {
5464 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PAUSE
);
5467 case 0x9b: /* fwait */
5468 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
5469 (HF_MP_MASK
| HF_TS_MASK
)) {
5470 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5472 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5473 gen_op_set_cc_op(s
->cc_op
);
5474 gen_jmp_im(pc_start
- s
->cs_base
);
5478 case 0xcc: /* int3 */
5479 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5481 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5483 case 0xcd: /* int N */
5484 val
= ldub_code(s
->pc
++);
5485 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5487 if (s
->vm86
&& s
->iopl
!= 3) {
5488 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5490 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5493 case 0xce: /* into */
5496 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5498 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5499 gen_op_set_cc_op(s
->cc_op
);
5500 gen_jmp_im(pc_start
- s
->cs_base
);
5501 gen_op_into(s
->pc
- pc_start
);
5503 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5504 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_ICEBP
))
5507 gen_debug(s
, pc_start
- s
->cs_base
);
5510 tb_flush(cpu_single_env
);
5511 cpu_set_log(CPU_LOG_INT
| CPU_LOG_TB_IN_ASM
);
5514 case 0xfa: /* cli */
5516 if (s
->cpl
<= s
->iopl
) {
5519 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5525 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5529 case 0xfb: /* sti */
5531 if (s
->cpl
<= s
->iopl
) {
5534 /* interruptions are enabled only the first insn after sti */
5535 /* If several instructions disable interrupts, only the
5537 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5538 gen_op_set_inhibit_irq();
5539 /* give a chance to handle pending irqs */
5540 gen_jmp_im(s
->pc
- s
->cs_base
);
5543 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5549 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5553 case 0x62: /* bound */
5556 ot
= dflag
? OT_LONG
: OT_WORD
;
5557 modrm
= ldub_code(s
->pc
++);
5558 reg
= (modrm
>> 3) & 7;
5559 mod
= (modrm
>> 6) & 3;
5562 gen_op_mov_TN_reg(ot
, 0, reg
);
5563 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5564 gen_jmp_im(pc_start
- s
->cs_base
);
5570 case 0x1c8 ... 0x1cf: /* bswap reg */
5571 reg
= (b
& 7) | REX_B(s
);
5572 #ifdef TARGET_X86_64
5574 gen_op_mov_TN_reg(OT_QUAD
, 0, reg
);
5575 tcg_gen_bswap_i64(cpu_T
[0], cpu_T
[0]);
5576 gen_op_mov_reg_T0(OT_QUAD
, reg
);
5580 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
5582 tmp0
= tcg_temp_new(TCG_TYPE_I32
);
5583 tcg_gen_trunc_i64_i32(tmp0
, cpu_T
[0]);
5584 tcg_gen_bswap_i32(tmp0
, tmp0
);
5585 tcg_gen_extu_i32_i64(cpu_T
[0], tmp0
);
5586 gen_op_mov_reg_T0(OT_LONG
, reg
);
5590 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
5591 tcg_gen_bswap_i32(cpu_T
[0], cpu_T
[0]);
5592 gen_op_mov_reg_T0(OT_LONG
, reg
);
5596 case 0xd6: /* salc */
5599 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5600 gen_op_set_cc_op(s
->cc_op
);
5603 case 0xe0: /* loopnz */
5604 case 0xe1: /* loopz */
5605 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5606 gen_op_set_cc_op(s
->cc_op
);
5608 case 0xe2: /* loop */
5609 case 0xe3: /* jecxz */
5613 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5614 next_eip
= s
->pc
- s
->cs_base
;
5619 l1
= gen_new_label();
5620 l2
= gen_new_label();
5623 gen_op_jz_ecx
[s
->aflag
](l1
);
5625 gen_op_dec_ECX
[s
->aflag
]();
5628 gen_op_loop
[s
->aflag
][b
](l1
);
5631 gen_jmp_im(next_eip
);
5632 gen_op_jmp_label(l2
);
5639 case 0x130: /* wrmsr */
5640 case 0x132: /* rdmsr */
5642 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5646 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 0);
5649 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 1);
5656 case 0x131: /* rdtsc */
5657 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RDTSC
))
5659 gen_jmp_im(pc_start
- s
->cs_base
);
5662 case 0x133: /* rdpmc */
5663 gen_jmp_im(pc_start
- s
->cs_base
);
5666 case 0x134: /* sysenter */
5670 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5672 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5673 gen_op_set_cc_op(s
->cc_op
);
5674 s
->cc_op
= CC_OP_DYNAMIC
;
5676 gen_jmp_im(pc_start
- s
->cs_base
);
5681 case 0x135: /* sysexit */
5685 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5687 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5688 gen_op_set_cc_op(s
->cc_op
);
5689 s
->cc_op
= CC_OP_DYNAMIC
;
5691 gen_jmp_im(pc_start
- s
->cs_base
);
5696 #ifdef TARGET_X86_64
5697 case 0x105: /* syscall */
5698 /* XXX: is it usable in real mode ? */
5699 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5700 gen_op_set_cc_op(s
->cc_op
);
5701 s
->cc_op
= CC_OP_DYNAMIC
;
5703 gen_jmp_im(pc_start
- s
->cs_base
);
5704 gen_op_syscall(s
->pc
- pc_start
);
5707 case 0x107: /* sysret */
5709 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5711 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5712 gen_op_set_cc_op(s
->cc_op
);
5713 s
->cc_op
= CC_OP_DYNAMIC
;
5715 gen_jmp_im(pc_start
- s
->cs_base
);
5716 gen_op_sysret(s
->dflag
);
5717 /* condition codes are modified only in long mode */
5719 s
->cc_op
= CC_OP_EFLAGS
;
5724 case 0x1a2: /* cpuid */
5725 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CPUID
))
5729 case 0xf4: /* hlt */
5731 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5733 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_HLT
))
5735 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5736 gen_op_set_cc_op(s
->cc_op
);
5737 gen_jmp_im(s
->pc
- s
->cs_base
);
5743 modrm
= ldub_code(s
->pc
++);
5744 mod
= (modrm
>> 6) & 3;
5745 op
= (modrm
>> 3) & 7;
5748 if (!s
->pe
|| s
->vm86
)
5750 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_READ
))
5752 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
5756 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5759 if (!s
->pe
|| s
->vm86
)
5762 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5764 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_WRITE
))
5766 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5767 gen_jmp_im(pc_start
- s
->cs_base
);
5772 if (!s
->pe
|| s
->vm86
)
5774 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_READ
))
5776 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
5780 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5783 if (!s
->pe
|| s
->vm86
)
5786 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5788 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_WRITE
))
5790 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5791 gen_jmp_im(pc_start
- s
->cs_base
);
5797 if (!s
->pe
|| s
->vm86
)
5799 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5800 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5801 gen_op_set_cc_op(s
->cc_op
);
5806 s
->cc_op
= CC_OP_EFLAGS
;
5813 modrm
= ldub_code(s
->pc
++);
5814 mod
= (modrm
>> 6) & 3;
5815 op
= (modrm
>> 3) & 7;
5821 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_GDTR_READ
))
5823 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5824 gen_op_movl_T0_env(offsetof(CPUX86State
, gdt
.limit
));
5825 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5826 gen_add_A0_im(s
, 2);
5827 gen_op_movtl_T0_env(offsetof(CPUX86State
, gdt
.base
));
5829 gen_op_andl_T0_im(0xffffff);
5830 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
5835 case 0: /* monitor */
5836 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
5839 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MONITOR
))
5841 gen_jmp_im(pc_start
- s
->cs_base
);
5842 #ifdef TARGET_X86_64
5843 if (s
->aflag
== 2) {
5844 gen_op_movq_A0_reg(R_EBX
);
5845 gen_op_addq_A0_AL();
5849 gen_op_movl_A0_reg(R_EBX
);
5850 gen_op_addl_A0_AL();
5852 gen_op_andl_A0_ffff();
5854 gen_add_A0_ds_seg(s
);
5858 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
5861 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5862 gen_op_set_cc_op(s
->cc_op
);
5863 s
->cc_op
= CC_OP_DYNAMIC
;
5865 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MWAIT
))
5867 gen_jmp_im(s
->pc
- s
->cs_base
);
5875 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IDTR_READ
))
5877 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5878 gen_op_movl_T0_env(offsetof(CPUX86State
, idt
.limit
));
5879 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5880 gen_add_A0_im(s
, 2);
5881 gen_op_movtl_T0_env(offsetof(CPUX86State
, idt
.base
));
5883 gen_op_andl_T0_im(0xffffff);
5884 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
5892 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMRUN
))
5894 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5895 gen_op_set_cc_op(s
->cc_op
);
5896 gen_jmp_im(s
->pc
- s
->cs_base
);
5898 s
->cc_op
= CC_OP_EFLAGS
;
5901 case 1: /* VMMCALL */
5902 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMMCALL
))
5904 /* FIXME: cause #UD if hflags & SVM */
5907 case 2: /* VMLOAD */
5908 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMLOAD
))
5912 case 3: /* VMSAVE */
5913 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMSAVE
))
5918 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_STGI
))
5923 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CLGI
))
5927 case 6: /* SKINIT */
5928 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SKINIT
))
5932 case 7: /* INVLPGA */
5933 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPGA
))
5940 } else if (s
->cpl
!= 0) {
5941 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5943 if (gen_svm_check_intercept(s
, pc_start
,
5944 op
==2 ? SVM_EXIT_GDTR_WRITE
: SVM_EXIT_IDTR_WRITE
))
5946 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5947 gen_op_ld_T1_A0(OT_WORD
+ s
->mem_index
);
5948 gen_add_A0_im(s
, 2);
5949 gen_op_ld_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
5951 gen_op_andl_T0_im(0xffffff);
5953 gen_op_movtl_env_T0(offsetof(CPUX86State
,gdt
.base
));
5954 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
5956 gen_op_movtl_env_T0(offsetof(CPUX86State
,idt
.base
));
5957 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
5962 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
))
5964 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
5965 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
5969 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5971 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
))
5973 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5975 gen_jmp_im(s
->pc
- s
->cs_base
);
5979 case 7: /* invlpg */
5981 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5984 #ifdef TARGET_X86_64
5985 if (CODE64(s
) && rm
== 0) {
5987 gen_op_movtl_T0_env(offsetof(CPUX86State
,segs
[R_GS
].base
));
5988 gen_op_movtl_T1_env(offsetof(CPUX86State
,kernelgsbase
));
5989 gen_op_movtl_env_T1(offsetof(CPUX86State
,segs
[R_GS
].base
));
5990 gen_op_movtl_env_T0(offsetof(CPUX86State
,kernelgsbase
));
5997 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPG
))
5999 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6001 gen_jmp_im(s
->pc
- s
->cs_base
);
6010 case 0x108: /* invd */
6011 case 0x109: /* wbinvd */
6013 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6015 if (gen_svm_check_intercept(s
, pc_start
, (b
& 2) ? SVM_EXIT_INVD
: SVM_EXIT_WBINVD
))
6020 case 0x63: /* arpl or movslS (x86_64) */
6021 #ifdef TARGET_X86_64
6024 /* d_ot is the size of destination */
6025 d_ot
= dflag
+ OT_WORD
;
6027 modrm
= ldub_code(s
->pc
++);
6028 reg
= ((modrm
>> 3) & 7) | rex_r
;
6029 mod
= (modrm
>> 6) & 3;
6030 rm
= (modrm
& 7) | REX_B(s
);
6033 gen_op_mov_TN_reg(OT_LONG
, 0, rm
);
6035 if (d_ot
== OT_QUAD
)
6036 gen_op_movslq_T0_T0();
6037 gen_op_mov_reg_T0(d_ot
, reg
);
6039 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6040 if (d_ot
== OT_QUAD
) {
6041 gen_op_lds_T0_A0(OT_LONG
+ s
->mem_index
);
6043 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6045 gen_op_mov_reg_T0(d_ot
, reg
);
6050 if (!s
->pe
|| s
->vm86
)
6052 ot
= dflag
? OT_LONG
: OT_WORD
;
6053 modrm
= ldub_code(s
->pc
++);
6054 reg
= (modrm
>> 3) & 7;
6055 mod
= (modrm
>> 6) & 3;
6058 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6059 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
6061 gen_op_mov_TN_reg(ot
, 0, rm
);
6063 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6064 gen_op_set_cc_op(s
->cc_op
);
6066 s
->cc_op
= CC_OP_EFLAGS
;
6068 gen_op_st_T0_A0(ot
+ s
->mem_index
);
6070 gen_op_mov_reg_T0(ot
, rm
);
6072 gen_op_arpl_update();
6075 case 0x102: /* lar */
6076 case 0x103: /* lsl */
6077 if (!s
->pe
|| s
->vm86
)
6079 ot
= dflag
? OT_LONG
: OT_WORD
;
6080 modrm
= ldub_code(s
->pc
++);
6081 reg
= ((modrm
>> 3) & 7) | rex_r
;
6082 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
6083 gen_op_mov_TN_reg(ot
, 1, reg
);
6084 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6085 gen_op_set_cc_op(s
->cc_op
);
6090 s
->cc_op
= CC_OP_EFLAGS
;
6091 gen_op_mov_reg_T1(ot
, reg
);
6094 modrm
= ldub_code(s
->pc
++);
6095 mod
= (modrm
>> 6) & 3;
6096 op
= (modrm
>> 3) & 7;
6098 case 0: /* prefetchnta */
6099 case 1: /* prefetchnt0 */
6100 case 2: /* prefetchnt0 */
6101 case 3: /* prefetchnt0 */
6104 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6105 /* nothing more to do */
6107 default: /* nop (multi byte) */
6108 gen_nop_modrm(s
, modrm
);
6112 case 0x119 ... 0x11f: /* nop (multi byte) */
6113 modrm
= ldub_code(s
->pc
++);
6114 gen_nop_modrm(s
, modrm
);
6116 case 0x120: /* mov reg, crN */
6117 case 0x122: /* mov crN, reg */
6119 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6121 modrm
= ldub_code(s
->pc
++);
6122 if ((modrm
& 0xc0) != 0xc0)
6124 rm
= (modrm
& 7) | REX_B(s
);
6125 reg
= ((modrm
>> 3) & 7) | rex_r
;
6137 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
+ reg
);
6138 gen_op_mov_TN_reg(ot
, 0, rm
);
6139 gen_op_movl_crN_T0(reg
);
6140 gen_jmp_im(s
->pc
- s
->cs_base
);
6143 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
+ reg
);
6144 #if !defined(CONFIG_USER_ONLY)
6146 gen_op_movtl_T0_cr8();
6149 gen_op_movtl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
6150 gen_op_mov_reg_T0(ot
, rm
);
6158 case 0x121: /* mov reg, drN */
6159 case 0x123: /* mov drN, reg */
6161 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6163 modrm
= ldub_code(s
->pc
++);
6164 if ((modrm
& 0xc0) != 0xc0)
6166 rm
= (modrm
& 7) | REX_B(s
);
6167 reg
= ((modrm
>> 3) & 7) | rex_r
;
6172 /* XXX: do it dynamically with CR4.DE bit */
6173 if (reg
== 4 || reg
== 5 || reg
>= 8)
6176 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_DR0
+ reg
);
6177 gen_op_mov_TN_reg(ot
, 0, rm
);
6178 gen_op_movl_drN_T0(reg
);
6179 gen_jmp_im(s
->pc
- s
->cs_base
);
6182 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_DR0
+ reg
);
6183 gen_op_movtl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
6184 gen_op_mov_reg_T0(ot
, rm
);
6188 case 0x106: /* clts */
6190 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6192 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
6194 /* abort block because static cpu state changed */
6195 gen_jmp_im(s
->pc
- s
->cs_base
);
6199 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6200 case 0x1c3: /* MOVNTI reg, mem */
6201 if (!(s
->cpuid_features
& CPUID_SSE2
))
6203 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
6204 modrm
= ldub_code(s
->pc
++);
6205 mod
= (modrm
>> 6) & 3;
6208 reg
= ((modrm
>> 3) & 7) | rex_r
;
6209 /* generate a generic store */
6210 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
6213 modrm
= ldub_code(s
->pc
++);
6214 mod
= (modrm
>> 6) & 3;
6215 op
= (modrm
>> 3) & 7;
6217 case 0: /* fxsave */
6218 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6219 (s
->flags
& HF_EM_MASK
))
6221 if (s
->flags
& HF_TS_MASK
) {
6222 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6225 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6226 gen_op_fxsave_A0((s
->dflag
== 2));
6228 case 1: /* fxrstor */
6229 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6230 (s
->flags
& HF_EM_MASK
))
6232 if (s
->flags
& HF_TS_MASK
) {
6233 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6236 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6237 gen_op_fxrstor_A0((s
->dflag
== 2));
6239 case 2: /* ldmxcsr */
6240 case 3: /* stmxcsr */
6241 if (s
->flags
& HF_TS_MASK
) {
6242 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6245 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
6248 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6250 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6251 gen_op_movl_env_T0(offsetof(CPUX86State
, mxcsr
));
6253 gen_op_movl_T0_env(offsetof(CPUX86State
, mxcsr
));
6254 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
6257 case 5: /* lfence */
6258 case 6: /* mfence */
6259 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
6262 case 7: /* sfence / clflush */
6263 if ((modrm
& 0xc7) == 0xc0) {
6265 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6266 if (!(s
->cpuid_features
& CPUID_SSE
))
6270 if (!(s
->cpuid_features
& CPUID_CLFLUSH
))
6272 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6279 case 0x10d: /* 3DNow! prefetch(w) */
6280 modrm
= ldub_code(s
->pc
++);
6281 mod
= (modrm
>> 6) & 3;
6284 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6285 /* ignore for now */
6287 case 0x1aa: /* rsm */
6288 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RSM
))
6290 if (!(s
->flags
& HF_SMM_MASK
))
6292 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6293 gen_op_set_cc_op(s
->cc_op
);
6294 s
->cc_op
= CC_OP_DYNAMIC
;
6296 gen_jmp_im(s
->pc
- s
->cs_base
);
6300 case 0x10e ... 0x10f:
6301 /* 3DNow! instructions, ignore prefixes */
6302 s
->prefix
&= ~(PREFIX_REPZ
| PREFIX_REPNZ
| PREFIX_DATA
);
6303 case 0x110 ... 0x117:
6304 case 0x128 ... 0x12f:
6305 case 0x150 ... 0x177:
6306 case 0x17c ... 0x17f:
6308 case 0x1c4 ... 0x1c6:
6309 case 0x1d0 ... 0x1fe:
6310 gen_sse(s
, b
, pc_start
, rex_r
);
6315 /* lock generation */
6316 if (s
->prefix
& PREFIX_LOCK
)
6320 if (s
->prefix
& PREFIX_LOCK
)
6322 /* XXX: ensure that no lock was generated */
6323 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
6327 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6328 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6330 /* flags read by an operation */
6331 static uint16_t opc_read_flags
[NB_OPS
] = {
6332 [INDEX_op_aas
] = CC_A
,
6333 [INDEX_op_aaa
] = CC_A
,
6334 [INDEX_op_das
] = CC_A
| CC_C
,
6335 [INDEX_op_daa
] = CC_A
| CC_C
,
6337 /* subtle: due to the incl/decl implementation, C is used */
6338 [INDEX_op_update_inc_cc
] = CC_C
,
6340 [INDEX_op_into
] = CC_O
,
6342 [INDEX_op_jb_subb
] = CC_C
,
6343 [INDEX_op_jb_subw
] = CC_C
,
6344 [INDEX_op_jb_subl
] = CC_C
,
6346 [INDEX_op_jz_subb
] = CC_Z
,
6347 [INDEX_op_jz_subw
] = CC_Z
,
6348 [INDEX_op_jz_subl
] = CC_Z
,
6350 [INDEX_op_jbe_subb
] = CC_Z
| CC_C
,
6351 [INDEX_op_jbe_subw
] = CC_Z
| CC_C
,
6352 [INDEX_op_jbe_subl
] = CC_Z
| CC_C
,
6354 [INDEX_op_js_subb
] = CC_S
,
6355 [INDEX_op_js_subw
] = CC_S
,
6356 [INDEX_op_js_subl
] = CC_S
,
6358 [INDEX_op_jl_subb
] = CC_O
| CC_S
,
6359 [INDEX_op_jl_subw
] = CC_O
| CC_S
,
6360 [INDEX_op_jl_subl
] = CC_O
| CC_S
,
6362 [INDEX_op_jle_subb
] = CC_O
| CC_S
| CC_Z
,
6363 [INDEX_op_jle_subw
] = CC_O
| CC_S
| CC_Z
,
6364 [INDEX_op_jle_subl
] = CC_O
| CC_S
| CC_Z
,
6366 [INDEX_op_loopnzw
] = CC_Z
,
6367 [INDEX_op_loopnzl
] = CC_Z
,
6368 [INDEX_op_loopzw
] = CC_Z
,
6369 [INDEX_op_loopzl
] = CC_Z
,
6371 [INDEX_op_seto_T0_cc
] = CC_O
,
6372 [INDEX_op_setb_T0_cc
] = CC_C
,
6373 [INDEX_op_setz_T0_cc
] = CC_Z
,
6374 [INDEX_op_setbe_T0_cc
] = CC_Z
| CC_C
,
6375 [INDEX_op_sets_T0_cc
] = CC_S
,
6376 [INDEX_op_setp_T0_cc
] = CC_P
,
6377 [INDEX_op_setl_T0_cc
] = CC_O
| CC_S
,
6378 [INDEX_op_setle_T0_cc
] = CC_O
| CC_S
| CC_Z
,
6380 [INDEX_op_setb_T0_subb
] = CC_C
,
6381 [INDEX_op_setb_T0_subw
] = CC_C
,
6382 [INDEX_op_setb_T0_subl
] = CC_C
,
6384 [INDEX_op_setz_T0_subb
] = CC_Z
,
6385 [INDEX_op_setz_T0_subw
] = CC_Z
,
6386 [INDEX_op_setz_T0_subl
] = CC_Z
,
6388 [INDEX_op_setbe_T0_subb
] = CC_Z
| CC_C
,
6389 [INDEX_op_setbe_T0_subw
] = CC_Z
| CC_C
,
6390 [INDEX_op_setbe_T0_subl
] = CC_Z
| CC_C
,
6392 [INDEX_op_sets_T0_subb
] = CC_S
,
6393 [INDEX_op_sets_T0_subw
] = CC_S
,
6394 [INDEX_op_sets_T0_subl
] = CC_S
,
6396 [INDEX_op_setl_T0_subb
] = CC_O
| CC_S
,
6397 [INDEX_op_setl_T0_subw
] = CC_O
| CC_S
,
6398 [INDEX_op_setl_T0_subl
] = CC_O
| CC_S
,
6400 [INDEX_op_setle_T0_subb
] = CC_O
| CC_S
| CC_Z
,
6401 [INDEX_op_setle_T0_subw
] = CC_O
| CC_S
| CC_Z
,
6402 [INDEX_op_setle_T0_subl
] = CC_O
| CC_S
| CC_Z
,
6404 [INDEX_op_movl_T0_eflags
] = CC_OSZAPC
,
6405 [INDEX_op_cmc
] = CC_C
,
6406 [INDEX_op_salc
] = CC_C
,
6408 /* needed for correct flag optimisation before string ops */
6409 [INDEX_op_jnz_ecxw
] = CC_OSZAPC
,
6410 [INDEX_op_jnz_ecxl
] = CC_OSZAPC
,
6411 [INDEX_op_jz_ecxw
] = CC_OSZAPC
,
6412 [INDEX_op_jz_ecxl
] = CC_OSZAPC
,
6414 #ifdef TARGET_X86_64
6415 [INDEX_op_jb_subq
] = CC_C
,
6416 [INDEX_op_jz_subq
] = CC_Z
,
6417 [INDEX_op_jbe_subq
] = CC_Z
| CC_C
,
6418 [INDEX_op_js_subq
] = CC_S
,
6419 [INDEX_op_jl_subq
] = CC_O
| CC_S
,
6420 [INDEX_op_jle_subq
] = CC_O
| CC_S
| CC_Z
,
6422 [INDEX_op_loopnzq
] = CC_Z
,
6423 [INDEX_op_loopzq
] = CC_Z
,
6425 [INDEX_op_setb_T0_subq
] = CC_C
,
6426 [INDEX_op_setz_T0_subq
] = CC_Z
,
6427 [INDEX_op_setbe_T0_subq
] = CC_Z
| CC_C
,
6428 [INDEX_op_sets_T0_subq
] = CC_S
,
6429 [INDEX_op_setl_T0_subq
] = CC_O
| CC_S
,
6430 [INDEX_op_setle_T0_subq
] = CC_O
| CC_S
| CC_Z
,
6432 [INDEX_op_jnz_ecxq
] = CC_OSZAPC
,
6433 [INDEX_op_jz_ecxq
] = CC_OSZAPC
,
6436 #define DEF_READF(SUFFIX)\
6437 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6438 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6439 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6440 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6441 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6442 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6443 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6444 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6446 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6447 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6448 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6449 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6450 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6451 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6452 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6453 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6457 #ifndef CONFIG_USER_ONLY
6463 /* flags written by an operation */
6464 static uint16_t opc_write_flags
[NB_OPS
] = {
6465 [INDEX_op_update2_cc
] = CC_OSZAPC
,
6466 [INDEX_op_update1_cc
] = CC_OSZAPC
,
6467 [INDEX_op_cmpl_T0_T1_cc
] = CC_OSZAPC
,
6468 [INDEX_op_update_neg_cc
] = CC_OSZAPC
,
6469 /* subtle: due to the incl/decl implementation, C is used */
6470 [INDEX_op_update_inc_cc
] = CC_OSZAPC
,
6471 [INDEX_op_testl_T0_T1_cc
] = CC_OSZAPC
,
6473 [INDEX_op_mulb_AL_T0
] = CC_OSZAPC
,
6474 [INDEX_op_mulw_AX_T0
] = CC_OSZAPC
,
6475 [INDEX_op_mull_EAX_T0
] = CC_OSZAPC
,
6476 X86_64_DEF([INDEX_op_mulq_EAX_T0
] = CC_OSZAPC
,)
6477 [INDEX_op_imulb_AL_T0
] = CC_OSZAPC
,
6478 [INDEX_op_imulw_AX_T0
] = CC_OSZAPC
,
6479 [INDEX_op_imull_EAX_T0
] = CC_OSZAPC
,
6480 X86_64_DEF([INDEX_op_imulq_EAX_T0
] = CC_OSZAPC
,)
6481 [INDEX_op_imulw_T0_T1
] = CC_OSZAPC
,
6482 [INDEX_op_imull_T0_T1
] = CC_OSZAPC
,
6483 X86_64_DEF([INDEX_op_imulq_T0_T1
] = CC_OSZAPC
,)
6486 [INDEX_op_ucomiss
] = CC_OSZAPC
,
6487 [INDEX_op_ucomisd
] = CC_OSZAPC
,
6488 [INDEX_op_comiss
] = CC_OSZAPC
,
6489 [INDEX_op_comisd
] = CC_OSZAPC
,
6492 [INDEX_op_aam
] = CC_OSZAPC
,
6493 [INDEX_op_aad
] = CC_OSZAPC
,
6494 [INDEX_op_aas
] = CC_OSZAPC
,
6495 [INDEX_op_aaa
] = CC_OSZAPC
,
6496 [INDEX_op_das
] = CC_OSZAPC
,
6497 [INDEX_op_daa
] = CC_OSZAPC
,
6499 [INDEX_op_movb_eflags_T0
] = CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
,
6500 [INDEX_op_movw_eflags_T0
] = CC_OSZAPC
,
6501 [INDEX_op_movl_eflags_T0
] = CC_OSZAPC
,
6502 [INDEX_op_movw_eflags_T0_io
] = CC_OSZAPC
,
6503 [INDEX_op_movl_eflags_T0_io
] = CC_OSZAPC
,
6504 [INDEX_op_movw_eflags_T0_cpl0
] = CC_OSZAPC
,
6505 [INDEX_op_movl_eflags_T0_cpl0
] = CC_OSZAPC
,
6506 [INDEX_op_clc
] = CC_C
,
6507 [INDEX_op_stc
] = CC_C
,
6508 [INDEX_op_cmc
] = CC_C
,
6510 [INDEX_op_btw_T0_T1_cc
] = CC_OSZAPC
,
6511 [INDEX_op_btl_T0_T1_cc
] = CC_OSZAPC
,
6512 X86_64_DEF([INDEX_op_btq_T0_T1_cc
] = CC_OSZAPC
,)
6513 [INDEX_op_btsw_T0_T1_cc
] = CC_OSZAPC
,
6514 [INDEX_op_btsl_T0_T1_cc
] = CC_OSZAPC
,
6515 X86_64_DEF([INDEX_op_btsq_T0_T1_cc
] = CC_OSZAPC
,)
6516 [INDEX_op_btrw_T0_T1_cc
] = CC_OSZAPC
,
6517 [INDEX_op_btrl_T0_T1_cc
] = CC_OSZAPC
,
6518 X86_64_DEF([INDEX_op_btrq_T0_T1_cc
] = CC_OSZAPC
,)
6519 [INDEX_op_btcw_T0_T1_cc
] = CC_OSZAPC
,
6520 [INDEX_op_btcl_T0_T1_cc
] = CC_OSZAPC
,
6521 X86_64_DEF([INDEX_op_btcq_T0_T1_cc
] = CC_OSZAPC
,)
6523 [INDEX_op_bsfw_T0_cc
] = CC_OSZAPC
,
6524 [INDEX_op_bsfl_T0_cc
] = CC_OSZAPC
,
6525 X86_64_DEF([INDEX_op_bsfq_T0_cc
] = CC_OSZAPC
,)
6526 [INDEX_op_bsrw_T0_cc
] = CC_OSZAPC
,
6527 [INDEX_op_bsrl_T0_cc
] = CC_OSZAPC
,
6528 X86_64_DEF([INDEX_op_bsrq_T0_cc
] = CC_OSZAPC
,)
6530 [INDEX_op_cmpxchgb_T0_T1_EAX_cc
] = CC_OSZAPC
,
6531 [INDEX_op_cmpxchgw_T0_T1_EAX_cc
] = CC_OSZAPC
,
6532 [INDEX_op_cmpxchgl_T0_T1_EAX_cc
] = CC_OSZAPC
,
6533 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc
] = CC_OSZAPC
,)
6535 [INDEX_op_cmpxchg8b
] = CC_Z
,
6536 [INDEX_op_lar
] = CC_Z
,
6537 [INDEX_op_lsl
] = CC_Z
,
6538 [INDEX_op_verr
] = CC_Z
,
6539 [INDEX_op_verw
] = CC_Z
,
6540 [INDEX_op_fcomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6541 [INDEX_op_fucomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6543 #define DEF_WRITEF(SUFFIX)\
6544 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6545 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6546 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6547 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6548 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6549 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6550 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6551 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6553 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6554 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6555 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6556 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6557 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6558 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6559 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6560 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6562 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6563 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6564 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6565 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6566 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6567 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6568 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6569 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6571 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6572 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6573 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6574 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6576 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6577 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6578 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6579 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6581 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6582 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6583 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6584 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6586 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6587 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6588 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6589 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6590 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6591 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6593 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6594 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6595 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6596 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6597 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6598 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6600 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6601 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6602 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6603 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6608 #ifndef CONFIG_USER_ONLY
6614 /* simpler form of an operation if no flags need to be generated */
6615 static uint16_t opc_simpler
[NB_OPS
] = {
6616 [INDEX_op_update2_cc
] = INDEX_op_nop
,
6617 [INDEX_op_update1_cc
] = INDEX_op_nop
,
6618 [INDEX_op_update_neg_cc
] = INDEX_op_nop
,
6620 /* broken: CC_OP logic must be rewritten */
6621 [INDEX_op_update_inc_cc
] = INDEX_op_nop
,
6624 [INDEX_op_shlb_T0_T1_cc
] = INDEX_op_shlb_T0_T1
,
6625 [INDEX_op_shlw_T0_T1_cc
] = INDEX_op_shlw_T0_T1
,
6626 [INDEX_op_shll_T0_T1_cc
] = INDEX_op_shll_T0_T1
,
6627 X86_64_DEF([INDEX_op_shlq_T0_T1_cc
] = INDEX_op_shlq_T0_T1
,)
6629 [INDEX_op_shrb_T0_T1_cc
] = INDEX_op_shrb_T0_T1
,
6630 [INDEX_op_shrw_T0_T1_cc
] = INDEX_op_shrw_T0_T1
,
6631 [INDEX_op_shrl_T0_T1_cc
] = INDEX_op_shrl_T0_T1
,
6632 X86_64_DEF([INDEX_op_shrq_T0_T1_cc
] = INDEX_op_shrq_T0_T1
,)
6634 [INDEX_op_sarb_T0_T1_cc
] = INDEX_op_sarb_T0_T1
,
6635 [INDEX_op_sarw_T0_T1_cc
] = INDEX_op_sarw_T0_T1
,
6636 [INDEX_op_sarl_T0_T1_cc
] = INDEX_op_sarl_T0_T1
,
6637 X86_64_DEF([INDEX_op_sarq_T0_T1_cc
] = INDEX_op_sarq_T0_T1
,)
6639 #define DEF_SIMPLER(SUFFIX)\
6640 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6641 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6642 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6643 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6645 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6646 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6647 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6648 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6652 #ifndef CONFIG_USER_ONLY
6653 DEF_SIMPLER(_kernel
)
6658 static void tcg_macro_func(TCGContext
*s
, int macro_id
, const int *dead_args
)
6663 tcg_gen_helper_0_1(helper_divl_EAX_T0
, cpu_T
[0]);
6669 void optimize_flags_init(void)
6672 /* put default values in arrays */
6673 for(i
= 0; i
< NB_OPS
; i
++) {
6674 if (opc_simpler
[i
] == 0)
6678 tcg_set_macro_func(&tcg_ctx
, tcg_macro_func
);
6680 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
6681 #if TARGET_LONG_BITS > HOST_LONG_BITS
6682 cpu_T
[0] = tcg_global_mem_new(TCG_TYPE_TL
,
6683 TCG_AREG0
, offsetof(CPUState
, t0
), "T0");
6684 cpu_T
[1] = tcg_global_mem_new(TCG_TYPE_TL
,
6685 TCG_AREG0
, offsetof(CPUState
, t1
), "T1");
6686 cpu_A0
= tcg_global_mem_new(TCG_TYPE_TL
,
6687 TCG_AREG0
, offsetof(CPUState
, t2
), "A0");
6689 cpu_T
[0] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG1
, "T0");
6690 cpu_T
[1] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG2
, "T1");
6691 cpu_A0
= tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG3
, "A0");
6693 /* the helpers are only registered to print debug info */
6694 TCG_HELPER(helper_divl_EAX_T0
);
6695 TCG_HELPER(helper_idivl_EAX_T0
);
6698 /* CPU flags computation optimization: we move backward thru the
6699 generated code to see which flags are needed. The operation is
6700 modified if suitable */
6701 static void optimize_flags(uint16_t *opc_buf
, int opc_buf_len
)
6704 int live_flags
, write_flags
, op
;
6706 opc_ptr
= opc_buf
+ opc_buf_len
;
6707 /* live_flags contains the flags needed by the next instructions
6708 in the code. At the end of the block, we consider that all the
6710 live_flags
= CC_OSZAPC
;
6711 while (opc_ptr
> opc_buf
) {
6713 /* if none of the flags written by the instruction is used,
6714 then we can try to find a simpler instruction */
6715 write_flags
= opc_write_flags
[op
];
6716 if ((live_flags
& write_flags
) == 0) {
6717 *opc_ptr
= opc_simpler
[op
];
6719 /* compute the live flags before the instruction */
6720 live_flags
&= ~write_flags
;
6721 live_flags
|= opc_read_flags
[op
];
6725 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6726 basic block 'tb'. If search_pc is TRUE, also generate PC
6727 information for each intermediate instruction. */
6728 static inline int gen_intermediate_code_internal(CPUState
*env
,
6729 TranslationBlock
*tb
,
6732 DisasContext dc1
, *dc
= &dc1
;
6733 target_ulong pc_ptr
;
6734 uint16_t *gen_opc_end
;
6737 target_ulong pc_start
;
6738 target_ulong cs_base
;
6740 /* generate intermediate code */
6742 cs_base
= tb
->cs_base
;
6744 cflags
= tb
->cflags
;
6746 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
6747 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
6748 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
6749 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
6751 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
6752 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
6753 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
6754 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
6755 dc
->singlestep_enabled
= env
->singlestep_enabled
;
6756 dc
->cc_op
= CC_OP_DYNAMIC
;
6757 dc
->cs_base
= cs_base
;
6759 dc
->popl_esp_hack
= 0;
6760 /* select memory access functions */
6762 if (flags
& HF_SOFTMMU_MASK
) {
6764 dc
->mem_index
= 2 * 4;
6766 dc
->mem_index
= 1 * 4;
6768 dc
->cpuid_features
= env
->cpuid_features
;
6769 dc
->cpuid_ext_features
= env
->cpuid_ext_features
;
6770 dc
->cpuid_ext2_features
= env
->cpuid_ext2_features
;
6771 #ifdef TARGET_X86_64
6772 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
6773 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
6776 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
6777 (flags
& HF_INHIBIT_IRQ_MASK
)
6778 #ifndef CONFIG_SOFTMMU
6779 || (flags
& HF_SOFTMMU_MASK
)
6783 /* check addseg logic */
6784 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
6785 printf("ERROR addseg\n");
6788 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
6790 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
6792 dc
->is_jmp
= DISAS_NEXT
;
6797 if (env
->nb_breakpoints
> 0) {
6798 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
6799 if (env
->breakpoints
[j
] == pc_ptr
) {
6800 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
6806 j
= gen_opc_ptr
- gen_opc_buf
;
6810 gen_opc_instr_start
[lj
++] = 0;
6812 gen_opc_pc
[lj
] = pc_ptr
;
6813 gen_opc_cc_op
[lj
] = dc
->cc_op
;
6814 gen_opc_instr_start
[lj
] = 1;
6816 pc_ptr
= disas_insn(dc
, pc_ptr
);
6817 /* stop translation if indicated */
6820 /* if single step mode, we generate only one instruction and
6821 generate an exception */
6822 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6823 the flag and abort the translation to give the irqs a
6824 change to be happen */
6825 if (dc
->tf
|| dc
->singlestep_enabled
||
6826 (flags
& HF_INHIBIT_IRQ_MASK
) ||
6827 (cflags
& CF_SINGLE_INSN
)) {
6828 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6832 /* if too long translation, stop generation too */
6833 if (gen_opc_ptr
>= gen_opc_end
||
6834 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
6835 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6840 *gen_opc_ptr
= INDEX_op_end
;
6841 /* we don't forget to fill the last values */
6843 j
= gen_opc_ptr
- gen_opc_buf
;
6846 gen_opc_instr_start
[lj
++] = 0;
6850 if (loglevel
& CPU_LOG_TB_CPU
) {
6851 cpu_dump_state(env
, logfile
, fprintf
, X86_DUMP_CCOP
);
6853 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
6855 fprintf(logfile
, "----------------\n");
6856 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
6857 #ifdef TARGET_X86_64
6862 disas_flags
= !dc
->code32
;
6863 target_disas(logfile
, pc_start
, pc_ptr
- pc_start
, disas_flags
);
6864 fprintf(logfile
, "\n");
6865 if (loglevel
& CPU_LOG_TB_OP_OPT
) {
6866 fprintf(logfile
, "OP before opt:\n");
6867 tcg_dump_ops(&tcg_ctx
, logfile
);
6868 fprintf(logfile
, "\n");
6873 /* optimize flag computations */
6874 optimize_flags(gen_opc_buf
, gen_opc_ptr
- gen_opc_buf
);
6877 tb
->size
= pc_ptr
- pc_start
;
6881 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
6883 return gen_intermediate_code_internal(env
, tb
, 0);
6886 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
6888 return gen_intermediate_code_internal(env
, tb
, 1);
6891 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
6892 unsigned long searched_pc
, int pc_pos
, void *puc
)
6896 if (loglevel
& CPU_LOG_TB_OP
) {
6898 fprintf(logfile
, "RESTORE:\n");
6899 for(i
= 0;i
<= pc_pos
; i
++) {
6900 if (gen_opc_instr_start
[i
]) {
6901 fprintf(logfile
, "0x%04x: " TARGET_FMT_lx
"\n", i
, gen_opc_pc
[i
]);
6904 fprintf(logfile
, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx
" cs_base=%x\n",
6905 searched_pc
, pc_pos
, gen_opc_pc
[pc_pos
] - tb
->cs_base
,
6906 (uint32_t)tb
->cs_base
);
6909 env
->eip
= gen_opc_pc
[pc_pos
] - tb
->cs_base
;
6910 cc_op
= gen_opc_cc_op
[pc_pos
];
6911 if (cc_op
!= CC_OP_DYNAMIC
)