4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env
, cpu_T
[2], cpu_A0
;
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0
, cpu_tmp1
;
66 static int x86_64_hregs
;
69 typedef struct DisasContext
{
70 /* current insn context */
71 int override
; /* -1 if no override */
74 target_ulong pc
; /* pc = eip + cs_base */
75 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
76 static state change (stop translation) */
77 /* current block context */
78 target_ulong cs_base
; /* base of CS segment */
79 int pe
; /* protected mode */
80 int code32
; /* 32 bit code segment */
82 int lma
; /* long mode active */
83 int code64
; /* 64 bit code segment */
86 int ss32
; /* 32 bit stack segment */
87 int cc_op
; /* current CC operation */
88 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
89 int f_st
; /* currently unused */
90 int vm86
; /* vm86 mode */
93 int tf
; /* TF cpu flag */
94 int singlestep_enabled
; /* "hardware" single step enabled */
95 int jmp_opt
; /* use direct block chaining for direct jumps */
96 int mem_index
; /* select memory access functions */
97 uint64_t flags
; /* all execution flags */
98 struct TranslationBlock
*tb
;
99 int popl_esp_hack
; /* for correct popl with esp base handling */
100 int rip_offset
; /* only used in x86_64, but left for simplicity */
102 int cpuid_ext_features
;
103 int cpuid_ext2_features
;
106 static void gen_eob(DisasContext
*s
);
107 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
108 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
110 /* i386 arith/logic operations */
130 OP_SHL1
, /* undocumented */
143 /* I386 int registers */
144 OR_EAX
, /* MUST be even numbered */
153 OR_TMP0
= 16, /* temporary operand register */
155 OR_A0
, /* temporary register used when doing address evaluation */
158 static inline void gen_op_movl_T0_0(void)
160 tcg_gen_movi_tl(cpu_T
[0], 0);
163 static inline void gen_op_movl_T0_im(int32_t val
)
165 tcg_gen_movi_tl(cpu_T
[0], val
);
168 static inline void gen_op_movl_T0_imu(uint32_t val
)
170 tcg_gen_movi_tl(cpu_T
[0], val
);
173 static inline void gen_op_movl_T1_im(int32_t val
)
175 tcg_gen_movi_tl(cpu_T
[1], val
);
178 static inline void gen_op_movl_T1_imu(uint32_t val
)
180 tcg_gen_movi_tl(cpu_T
[1], val
);
183 static inline void gen_op_movl_A0_im(uint32_t val
)
185 tcg_gen_movi_tl(cpu_A0
, val
);
189 static inline void gen_op_movq_A0_im(int64_t val
)
191 tcg_gen_movi_tl(cpu_A0
, val
);
195 static inline void gen_movtl_T0_im(target_ulong val
)
197 tcg_gen_movi_tl(cpu_T
[0], val
);
200 static inline void gen_movtl_T1_im(target_ulong val
)
202 tcg_gen_movi_tl(cpu_T
[1], val
);
205 static inline void gen_op_andl_T0_ffff(void)
207 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
210 static inline void gen_op_andl_T0_im(uint32_t val
)
212 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], val
);
215 static inline void gen_op_movl_T0_T1(void)
217 tcg_gen_mov_tl(cpu_T
[0], cpu_T
[1]);
220 static inline void gen_op_andl_A0_ffff(void)
222 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffff);
227 #define NB_OP_SIZES 4
229 #define DEF_REGS(prefix, suffix) \
230 prefix ## EAX ## suffix,\
231 prefix ## ECX ## suffix,\
232 prefix ## EDX ## suffix,\
233 prefix ## EBX ## suffix,\
234 prefix ## ESP ## suffix,\
235 prefix ## EBP ## suffix,\
236 prefix ## ESI ## suffix,\
237 prefix ## EDI ## suffix,\
238 prefix ## R8 ## suffix,\
239 prefix ## R9 ## suffix,\
240 prefix ## R10 ## suffix,\
241 prefix ## R11 ## suffix,\
242 prefix ## R12 ## suffix,\
243 prefix ## R13 ## suffix,\
244 prefix ## R14 ## suffix,\
245 prefix ## R15 ## suffix,
247 #else /* !TARGET_X86_64 */
249 #define NB_OP_SIZES 3
251 #define DEF_REGS(prefix, suffix) \
252 prefix ## EAX ## suffix,\
253 prefix ## ECX ## suffix,\
254 prefix ## EDX ## suffix,\
255 prefix ## EBX ## suffix,\
256 prefix ## ESP ## suffix,\
257 prefix ## EBP ## suffix,\
258 prefix ## ESI ## suffix,\
259 prefix ## EDI ## suffix,
261 #endif /* !TARGET_X86_64 */
263 #if defined(WORDS_BIGENDIAN)
264 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
265 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
266 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
267 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
268 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
270 #define REG_B_OFFSET 0
271 #define REG_H_OFFSET 1
272 #define REG_W_OFFSET 0
273 #define REG_L_OFFSET 0
274 #define REG_LH_OFFSET 4
277 static inline void gen_op_mov_reg_TN(int ot
, int t_index
, int reg
)
281 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
282 tcg_gen_st8_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_B_OFFSET
);
284 tcg_gen_st8_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
288 tcg_gen_st16_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
292 tcg_gen_st32_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
293 /* high part of register set to zero */
294 tcg_gen_movi_tl(cpu_tmp0
, 0);
295 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
299 tcg_gen_st_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]));
304 tcg_gen_st32_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
310 static inline void gen_op_mov_reg_T0(int ot
, int reg
)
312 gen_op_mov_reg_TN(ot
, 0, reg
);
315 static inline void gen_op_mov_reg_T1(int ot
, int reg
)
317 gen_op_mov_reg_TN(ot
, 1, reg
);
320 static inline void gen_op_mov_reg_A0(int size
, int reg
)
324 tcg_gen_st16_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
328 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
329 /* high part of register set to zero */
330 tcg_gen_movi_tl(cpu_tmp0
, 0);
331 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
335 tcg_gen_st_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
340 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
346 static inline void gen_op_mov_TN_reg(int ot
, int t_index
, int reg
)
350 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
353 tcg_gen_ld8u_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
358 tcg_gen_ld_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]));
363 static inline void gen_op_movl_A0_reg(int reg
)
365 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
368 static inline void gen_op_addl_A0_im(int32_t val
)
370 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
372 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
377 static inline void gen_op_addq_A0_im(int64_t val
)
379 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
383 static void gen_add_A0_im(DisasContext
*s
, int val
)
387 gen_op_addq_A0_im(val
);
390 gen_op_addl_A0_im(val
);
393 static inline void gen_op_addl_T0_T1(void)
395 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
398 static inline void gen_op_jmp_T0(void)
400 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUState
, eip
));
403 static inline void gen_op_addw_ESP_im(int32_t val
)
405 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
406 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
407 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]) + REG_W_OFFSET
);
410 static inline void gen_op_addl_ESP_im(int32_t val
)
412 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
413 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
415 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffff);
417 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
421 static inline void gen_op_addq_ESP_im(int32_t val
)
423 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
424 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
425 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
429 static inline void gen_op_set_cc_op(int32_t val
)
431 tcg_gen_movi_tl(cpu_tmp0
, val
);
432 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, cc_op
));
435 static inline void gen_op_addl_A0_reg_sN(int shift
, int reg
)
437 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
439 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
440 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
442 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
446 static inline void gen_op_movl_A0_seg(int reg
)
448 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
) + REG_L_OFFSET
);
451 static inline void gen_op_addl_A0_seg(int reg
)
453 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
454 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
456 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
461 static inline void gen_op_movq_A0_seg(int reg
)
463 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
466 static inline void gen_op_addq_A0_seg(int reg
)
468 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
469 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
472 static inline void gen_op_movq_A0_reg(int reg
)
474 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
477 static inline void gen_op_addq_A0_reg_sN(int shift
, int reg
)
479 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
481 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
482 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
486 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
488 DEF_REGS(gen_op_cmovw_
, _T1_T0
)
491 DEF_REGS(gen_op_cmovl_
, _T1_T0
)
495 DEF_REGS(gen_op_cmovq_
, _T1_T0
)
500 #define DEF_ARITHC(SUFFIX)\
502 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
503 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
506 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
507 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
510 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
511 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
514 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
515 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
518 static GenOpFunc
*gen_op_arithc_T0_T1_cc
[4][2] = {
522 static GenOpFunc
*gen_op_arithc_mem_T0_T1_cc
[3 * 4][2] = {
524 #ifndef CONFIG_USER_ONLY
530 static const int cc_op_arithb
[8] = {
541 #define DEF_CMPXCHG(SUFFIX)\
542 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
543 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
544 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
545 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
547 static GenOpFunc
*gen_op_cmpxchg_T0_T1_EAX_cc
[4] = {
551 static GenOpFunc
*gen_op_cmpxchg_mem_T0_T1_EAX_cc
[3 * 4] = {
553 #ifndef CONFIG_USER_ONLY
559 #define DEF_SHIFT(SUFFIX)\
561 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
562 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
563 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
565 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
566 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
568 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
571 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
572 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
573 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
574 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
575 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
576 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
577 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
578 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
581 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
582 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
583 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
584 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
585 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
586 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
587 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
588 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
591 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
592 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
593 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
594 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
595 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
596 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
597 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
598 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
601 static GenOpFunc
*gen_op_shift_T0_T1_cc
[4][8] = {
605 static GenOpFunc
*gen_op_shift_mem_T0_T1_cc
[3 * 4][8] = {
607 #ifndef CONFIG_USER_ONLY
613 #define DEF_SHIFTD(SUFFIX, op)\
619 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
620 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
623 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
624 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
627 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
628 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
631 static GenOpFunc1
*gen_op_shiftd_T0_T1_im_cc
[4][2] = {
635 static GenOpFunc
*gen_op_shiftd_T0_T1_ECX_cc
[4][2] = {
639 static GenOpFunc1
*gen_op_shiftd_mem_T0_T1_im_cc
[3 * 4][2] = {
641 #ifndef CONFIG_USER_ONLY
642 DEF_SHIFTD(_kernel
, im
)
643 DEF_SHIFTD(_user
, im
)
647 static GenOpFunc
*gen_op_shiftd_mem_T0_T1_ECX_cc
[3 * 4][2] = {
648 DEF_SHIFTD(_raw
, ECX
)
649 #ifndef CONFIG_USER_ONLY
650 DEF_SHIFTD(_kernel
, ECX
)
651 DEF_SHIFTD(_user
, ECX
)
655 static GenOpFunc
*gen_op_btx_T0_T1_cc
[3][4] = {
658 gen_op_btsw_T0_T1_cc
,
659 gen_op_btrw_T0_T1_cc
,
660 gen_op_btcw_T0_T1_cc
,
664 gen_op_btsl_T0_T1_cc
,
665 gen_op_btrl_T0_T1_cc
,
666 gen_op_btcl_T0_T1_cc
,
671 gen_op_btsq_T0_T1_cc
,
672 gen_op_btrq_T0_T1_cc
,
673 gen_op_btcq_T0_T1_cc
,
678 static GenOpFunc
*gen_op_add_bit_A0_T1
[3] = {
679 gen_op_add_bitw_A0_T1
,
680 gen_op_add_bitl_A0_T1
,
681 X86_64_ONLY(gen_op_add_bitq_A0_T1
),
684 static GenOpFunc
*gen_op_bsx_T0_cc
[3][2] = {
701 static inline void gen_op_lds_T0_A0(int idx
)
703 int mem_index
= (idx
>> 2) - 1;
706 tcg_gen_qemu_ld8s(cpu_T
[0], cpu_A0
, mem_index
);
709 tcg_gen_qemu_ld16s(cpu_T
[0], cpu_A0
, mem_index
);
713 tcg_gen_qemu_ld32s(cpu_T
[0], cpu_A0
, mem_index
);
718 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719 static inline void gen_op_ld_T0_A0(int idx
)
721 int mem_index
= (idx
>> 2) - 1;
724 tcg_gen_qemu_ld8u(cpu_T
[0], cpu_A0
, mem_index
);
727 tcg_gen_qemu_ld16u(cpu_T
[0], cpu_A0
, mem_index
);
730 tcg_gen_qemu_ld32u(cpu_T
[0], cpu_A0
, mem_index
);
734 tcg_gen_qemu_ld64(cpu_T
[0], cpu_A0
, mem_index
);
739 static inline void gen_op_ldu_T0_A0(int idx
)
741 gen_op_ld_T0_A0(idx
);
744 static inline void gen_op_ld_T1_A0(int idx
)
746 int mem_index
= (idx
>> 2) - 1;
749 tcg_gen_qemu_ld8u(cpu_T
[1], cpu_A0
, mem_index
);
752 tcg_gen_qemu_ld16u(cpu_T
[1], cpu_A0
, mem_index
);
755 tcg_gen_qemu_ld32u(cpu_T
[1], cpu_A0
, mem_index
);
759 tcg_gen_qemu_ld64(cpu_T
[1], cpu_A0
, mem_index
);
764 static inline void gen_op_st_T0_A0(int idx
)
766 int mem_index
= (idx
>> 2) - 1;
769 tcg_gen_qemu_st8(cpu_T
[0], cpu_A0
, mem_index
);
772 tcg_gen_qemu_st16(cpu_T
[0], cpu_A0
, mem_index
);
775 tcg_gen_qemu_st32(cpu_T
[0], cpu_A0
, mem_index
);
779 tcg_gen_qemu_st64(cpu_T
[0], cpu_A0
, mem_index
);
784 static inline void gen_op_st_T1_A0(int idx
)
786 int mem_index
= (idx
>> 2) - 1;
789 tcg_gen_qemu_st8(cpu_T
[1], cpu_A0
, mem_index
);
792 tcg_gen_qemu_st16(cpu_T
[1], cpu_A0
, mem_index
);
795 tcg_gen_qemu_st32(cpu_T
[1], cpu_A0
, mem_index
);
799 tcg_gen_qemu_st64(cpu_T
[1], cpu_A0
, mem_index
);
804 static inline void gen_jmp_im(target_ulong pc
)
806 tcg_gen_movi_tl(cpu_tmp0
, pc
);
807 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, eip
));
810 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
814 override
= s
->override
;
818 gen_op_movq_A0_seg(override
);
819 gen_op_addq_A0_reg_sN(0, R_ESI
);
821 gen_op_movq_A0_reg(R_ESI
);
827 if (s
->addseg
&& override
< 0)
830 gen_op_movl_A0_seg(override
);
831 gen_op_addl_A0_reg_sN(0, R_ESI
);
833 gen_op_movl_A0_reg(R_ESI
);
836 /* 16 address, always override */
839 gen_op_movl_A0_reg(R_ESI
);
840 gen_op_andl_A0_ffff();
841 gen_op_addl_A0_seg(override
);
845 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
849 gen_op_movq_A0_reg(R_EDI
);
854 gen_op_movl_A0_seg(R_ES
);
855 gen_op_addl_A0_reg_sN(0, R_EDI
);
857 gen_op_movl_A0_reg(R_EDI
);
860 gen_op_movl_A0_reg(R_EDI
);
861 gen_op_andl_A0_ffff();
862 gen_op_addl_A0_seg(R_ES
);
866 static GenOpFunc
*gen_op_movl_T0_Dshift
[4] = {
867 gen_op_movl_T0_Dshiftb
,
868 gen_op_movl_T0_Dshiftw
,
869 gen_op_movl_T0_Dshiftl
,
870 X86_64_ONLY(gen_op_movl_T0_Dshiftq
),
873 static GenOpFunc1
*gen_op_jnz_ecx
[3] = {
876 X86_64_ONLY(gen_op_jnz_ecxq
),
879 static GenOpFunc1
*gen_op_jz_ecx
[3] = {
882 X86_64_ONLY(gen_op_jz_ecxq
),
885 static GenOpFunc
*gen_op_dec_ECX
[3] = {
888 X86_64_ONLY(gen_op_decq_ECX
),
891 static GenOpFunc1
*gen_op_string_jnz_sub
[2][4] = {
896 X86_64_ONLY(gen_op_jnz_subq
),
902 X86_64_ONLY(gen_op_jz_subq
),
906 static GenOpFunc
*gen_op_in_DX_T0
[3] = {
912 static GenOpFunc
*gen_op_out_DX_T0
[3] = {
918 static GenOpFunc
*gen_op_in
[3] = {
924 static GenOpFunc
*gen_op_out
[3] = {
930 static GenOpFunc
*gen_check_io_T0
[3] = {
936 static GenOpFunc
*gen_check_io_DX
[3] = {
942 static void gen_check_io(DisasContext
*s
, int ot
, int use_dx
, target_ulong cur_eip
)
944 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
945 if (s
->cc_op
!= CC_OP_DYNAMIC
)
946 gen_op_set_cc_op(s
->cc_op
);
949 gen_check_io_DX
[ot
]();
951 gen_check_io_T0
[ot
]();
955 static inline void gen_movs(DisasContext
*s
, int ot
)
957 gen_string_movl_A0_ESI(s
);
958 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
959 gen_string_movl_A0_EDI(s
);
960 gen_op_st_T0_A0(ot
+ s
->mem_index
);
961 gen_op_movl_T0_Dshift
[ot
]();
964 gen_op_addq_ESI_T0();
965 gen_op_addq_EDI_T0();
969 gen_op_addl_ESI_T0();
970 gen_op_addl_EDI_T0();
972 gen_op_addw_ESI_T0();
973 gen_op_addw_EDI_T0();
977 static inline void gen_update_cc_op(DisasContext
*s
)
979 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
980 gen_op_set_cc_op(s
->cc_op
);
981 s
->cc_op
= CC_OP_DYNAMIC
;
985 /* XXX: does not work with gdbstub "ice" single step - not a
987 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
991 l1
= gen_new_label();
992 l2
= gen_new_label();
993 gen_op_jnz_ecx
[s
->aflag
](l1
);
995 gen_jmp_tb(s
, next_eip
, 1);
1000 static inline void gen_stos(DisasContext
*s
, int ot
)
1002 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1003 gen_string_movl_A0_EDI(s
);
1004 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1005 gen_op_movl_T0_Dshift
[ot
]();
1006 #ifdef TARGET_X86_64
1007 if (s
->aflag
== 2) {
1008 gen_op_addq_EDI_T0();
1012 gen_op_addl_EDI_T0();
1014 gen_op_addw_EDI_T0();
1018 static inline void gen_lods(DisasContext
*s
, int ot
)
1020 gen_string_movl_A0_ESI(s
);
1021 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1022 gen_op_mov_reg_T0(ot
, R_EAX
);
1023 gen_op_movl_T0_Dshift
[ot
]();
1024 #ifdef TARGET_X86_64
1025 if (s
->aflag
== 2) {
1026 gen_op_addq_ESI_T0();
1030 gen_op_addl_ESI_T0();
1032 gen_op_addw_ESI_T0();
1036 static inline void gen_scas(DisasContext
*s
, int ot
)
1038 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1039 gen_string_movl_A0_EDI(s
);
1040 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1041 gen_op_cmpl_T0_T1_cc();
1042 gen_op_movl_T0_Dshift
[ot
]();
1043 #ifdef TARGET_X86_64
1044 if (s
->aflag
== 2) {
1045 gen_op_addq_EDI_T0();
1049 gen_op_addl_EDI_T0();
1051 gen_op_addw_EDI_T0();
1055 static inline void gen_cmps(DisasContext
*s
, int ot
)
1057 gen_string_movl_A0_ESI(s
);
1058 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1059 gen_string_movl_A0_EDI(s
);
1060 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1061 gen_op_cmpl_T0_T1_cc();
1062 gen_op_movl_T0_Dshift
[ot
]();
1063 #ifdef TARGET_X86_64
1064 if (s
->aflag
== 2) {
1065 gen_op_addq_ESI_T0();
1066 gen_op_addq_EDI_T0();
1070 gen_op_addl_ESI_T0();
1071 gen_op_addl_EDI_T0();
1073 gen_op_addw_ESI_T0();
1074 gen_op_addw_EDI_T0();
1078 static inline void gen_ins(DisasContext
*s
, int ot
)
1080 gen_string_movl_A0_EDI(s
);
1082 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1083 gen_op_in_DX_T0
[ot
]();
1084 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1085 gen_op_movl_T0_Dshift
[ot
]();
1086 #ifdef TARGET_X86_64
1087 if (s
->aflag
== 2) {
1088 gen_op_addq_EDI_T0();
1092 gen_op_addl_EDI_T0();
1094 gen_op_addw_EDI_T0();
1098 static inline void gen_outs(DisasContext
*s
, int ot
)
1100 gen_string_movl_A0_ESI(s
);
1101 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1102 gen_op_out_DX_T0
[ot
]();
1103 gen_op_movl_T0_Dshift
[ot
]();
1104 #ifdef TARGET_X86_64
1105 if (s
->aflag
== 2) {
1106 gen_op_addq_ESI_T0();
1110 gen_op_addl_ESI_T0();
1112 gen_op_addw_ESI_T0();
1116 /* same method as Valgrind : we generate jumps to current or next
1118 #define GEN_REPZ(op) \
1119 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1120 target_ulong cur_eip, target_ulong next_eip) \
1123 gen_update_cc_op(s); \
1124 l2 = gen_jz_ecx_string(s, next_eip); \
1125 gen_ ## op(s, ot); \
1126 gen_op_dec_ECX[s->aflag](); \
1127 /* a loop would cause two single step exceptions if ECX = 1 \
1128 before rep string_insn */ \
1130 gen_op_jz_ecx[s->aflag](l2); \
1131 gen_jmp(s, cur_eip); \
1134 #define GEN_REPZ2(op) \
1135 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1136 target_ulong cur_eip, \
1137 target_ulong next_eip, \
1141 gen_update_cc_op(s); \
1142 l2 = gen_jz_ecx_string(s, next_eip); \
1143 gen_ ## op(s, ot); \
1144 gen_op_dec_ECX[s->aflag](); \
1145 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1146 gen_op_string_jnz_sub[nz][ot](l2);\
1148 gen_op_jz_ecx[s->aflag](l2); \
1149 gen_jmp(s, cur_eip); \
1171 static GenOpFunc1
*gen_jcc_sub
[4][8] = {
1202 #ifdef TARGET_X86_64
1205 BUGGY_64(gen_op_jb_subq
),
1207 BUGGY_64(gen_op_jbe_subq
),
1210 BUGGY_64(gen_op_jl_subq
),
1211 BUGGY_64(gen_op_jle_subq
),
1215 static GenOpFunc1
*gen_op_loop
[3][4] = {
1226 #ifdef TARGET_X86_64
1235 static GenOpFunc
*gen_setcc_slow
[8] = {
1246 static GenOpFunc
*gen_setcc_sub
[4][8] = {
1249 gen_op_setb_T0_subb
,
1250 gen_op_setz_T0_subb
,
1251 gen_op_setbe_T0_subb
,
1252 gen_op_sets_T0_subb
,
1254 gen_op_setl_T0_subb
,
1255 gen_op_setle_T0_subb
,
1259 gen_op_setb_T0_subw
,
1260 gen_op_setz_T0_subw
,
1261 gen_op_setbe_T0_subw
,
1262 gen_op_sets_T0_subw
,
1264 gen_op_setl_T0_subw
,
1265 gen_op_setle_T0_subw
,
1269 gen_op_setb_T0_subl
,
1270 gen_op_setz_T0_subl
,
1271 gen_op_setbe_T0_subl
,
1272 gen_op_sets_T0_subl
,
1274 gen_op_setl_T0_subl
,
1275 gen_op_setle_T0_subl
,
1277 #ifdef TARGET_X86_64
1280 gen_op_setb_T0_subq
,
1281 gen_op_setz_T0_subq
,
1282 gen_op_setbe_T0_subq
,
1283 gen_op_sets_T0_subq
,
1285 gen_op_setl_T0_subq
,
1286 gen_op_setle_T0_subq
,
1291 static GenOpFunc
*gen_op_fp_arith_ST0_FT0
[8] = {
1292 gen_op_fadd_ST0_FT0
,
1293 gen_op_fmul_ST0_FT0
,
1294 gen_op_fcom_ST0_FT0
,
1295 gen_op_fcom_ST0_FT0
,
1296 gen_op_fsub_ST0_FT0
,
1297 gen_op_fsubr_ST0_FT0
,
1298 gen_op_fdiv_ST0_FT0
,
1299 gen_op_fdivr_ST0_FT0
,
1302 /* NOTE the exception in "r" op ordering */
1303 static GenOpFunc1
*gen_op_fp_arith_STN_ST0
[8] = {
1304 gen_op_fadd_STN_ST0
,
1305 gen_op_fmul_STN_ST0
,
1308 gen_op_fsubr_STN_ST0
,
1309 gen_op_fsub_STN_ST0
,
1310 gen_op_fdivr_STN_ST0
,
1311 gen_op_fdiv_STN_ST0
,
1314 /* if d == OR_TMP0, it means memory operand (address in A0) */
1315 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1317 GenOpFunc
*gen_update_cc
;
1320 gen_op_mov_TN_reg(ot
, 0, d
);
1322 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1327 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1328 gen_op_set_cc_op(s1
->cc_op
);
1330 gen_op_arithc_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1331 gen_op_mov_reg_T0(ot
, d
);
1333 gen_op_arithc_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
- OP_ADCL
]();
1335 s1
->cc_op
= CC_OP_DYNAMIC
;
1338 gen_op_addl_T0_T1();
1339 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1340 gen_update_cc
= gen_op_update2_cc
;
1343 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1344 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1345 gen_update_cc
= gen_op_update2_cc
;
1349 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1350 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1351 gen_update_cc
= gen_op_update1_cc
;
1354 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1355 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1356 gen_update_cc
= gen_op_update1_cc
;
1359 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1360 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1361 gen_update_cc
= gen_op_update1_cc
;
1364 gen_op_cmpl_T0_T1_cc();
1365 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1366 gen_update_cc
= NULL
;
1369 if (op
!= OP_CMPL
) {
1371 gen_op_mov_reg_T0(ot
, d
);
1373 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1375 /* the flags update must happen after the memory write (precise
1376 exception support) */
1382 /* if d == OR_TMP0, it means memory operand (address in A0) */
1383 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1386 gen_op_mov_TN_reg(ot
, 0, d
);
1388 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1389 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1390 gen_op_set_cc_op(s1
->cc_op
);
1393 s1
->cc_op
= CC_OP_INCB
+ ot
;
1396 s1
->cc_op
= CC_OP_DECB
+ ot
;
1399 gen_op_mov_reg_T0(ot
, d
);
1401 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1402 gen_op_update_inc_cc();
1405 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1408 gen_op_mov_TN_reg(ot
, 0, d
);
1410 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1412 gen_op_mov_TN_reg(ot
, 1, s
);
1413 /* for zero counts, flags are not updated, so must do it dynamically */
1414 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1415 gen_op_set_cc_op(s1
->cc_op
);
1418 gen_op_shift_T0_T1_cc
[ot
][op
]();
1420 gen_op_shift_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
]();
1422 gen_op_mov_reg_T0(ot
, d
);
1423 s1
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1426 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1428 /* currently not optimized */
1429 gen_op_movl_T1_im(c
);
1430 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1433 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1441 int mod
, rm
, code
, override
, must_add_seg
;
1443 override
= s
->override
;
1444 must_add_seg
= s
->addseg
;
1447 mod
= (modrm
>> 6) & 3;
1459 code
= ldub_code(s
->pc
++);
1460 scale
= (code
>> 6) & 3;
1461 index
= ((code
>> 3) & 7) | REX_X(s
);
1468 if ((base
& 7) == 5) {
1470 disp
= (int32_t)ldl_code(s
->pc
);
1472 if (CODE64(s
) && !havesib
) {
1473 disp
+= s
->pc
+ s
->rip_offset
;
1480 disp
= (int8_t)ldub_code(s
->pc
++);
1484 disp
= ldl_code(s
->pc
);
1490 /* for correct popl handling with esp */
1491 if (base
== 4 && s
->popl_esp_hack
)
1492 disp
+= s
->popl_esp_hack
;
1493 #ifdef TARGET_X86_64
1494 if (s
->aflag
== 2) {
1495 gen_op_movq_A0_reg(base
);
1497 gen_op_addq_A0_im(disp
);
1502 gen_op_movl_A0_reg(base
);
1504 gen_op_addl_A0_im(disp
);
1507 #ifdef TARGET_X86_64
1508 if (s
->aflag
== 2) {
1509 gen_op_movq_A0_im(disp
);
1513 gen_op_movl_A0_im(disp
);
1516 /* XXX: index == 4 is always invalid */
1517 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1518 #ifdef TARGET_X86_64
1519 if (s
->aflag
== 2) {
1520 gen_op_addq_A0_reg_sN(scale
, index
);
1524 gen_op_addl_A0_reg_sN(scale
, index
);
1529 if (base
== R_EBP
|| base
== R_ESP
)
1534 #ifdef TARGET_X86_64
1535 if (s
->aflag
== 2) {
1536 gen_op_addq_A0_seg(override
);
1540 gen_op_addl_A0_seg(override
);
1547 disp
= lduw_code(s
->pc
);
1549 gen_op_movl_A0_im(disp
);
1550 rm
= 0; /* avoid SS override */
1557 disp
= (int8_t)ldub_code(s
->pc
++);
1561 disp
= lduw_code(s
->pc
);
1567 gen_op_movl_A0_reg(R_EBX
);
1568 gen_op_addl_A0_reg_sN(0, R_ESI
);
1571 gen_op_movl_A0_reg(R_EBX
);
1572 gen_op_addl_A0_reg_sN(0, R_EDI
);
1575 gen_op_movl_A0_reg(R_EBP
);
1576 gen_op_addl_A0_reg_sN(0, R_ESI
);
1579 gen_op_movl_A0_reg(R_EBP
);
1580 gen_op_addl_A0_reg_sN(0, R_EDI
);
1583 gen_op_movl_A0_reg(R_ESI
);
1586 gen_op_movl_A0_reg(R_EDI
);
1589 gen_op_movl_A0_reg(R_EBP
);
1593 gen_op_movl_A0_reg(R_EBX
);
1597 gen_op_addl_A0_im(disp
);
1598 gen_op_andl_A0_ffff();
1602 if (rm
== 2 || rm
== 3 || rm
== 6)
1607 gen_op_addl_A0_seg(override
);
1617 static void gen_nop_modrm(DisasContext
*s
, int modrm
)
1619 int mod
, rm
, base
, code
;
1621 mod
= (modrm
>> 6) & 3;
1631 code
= ldub_code(s
->pc
++);
1667 /* used for LEA and MOV AX, mem */
1668 static void gen_add_A0_ds_seg(DisasContext
*s
)
1670 int override
, must_add_seg
;
1671 must_add_seg
= s
->addseg
;
1673 if (s
->override
>= 0) {
1674 override
= s
->override
;
1680 #ifdef TARGET_X86_64
1682 gen_op_addq_A0_seg(override
);
1686 gen_op_addl_A0_seg(override
);
1691 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1693 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1695 int mod
, rm
, opreg
, disp
;
1697 mod
= (modrm
>> 6) & 3;
1698 rm
= (modrm
& 7) | REX_B(s
);
1702 gen_op_mov_TN_reg(ot
, 0, reg
);
1703 gen_op_mov_reg_T0(ot
, rm
);
1705 gen_op_mov_TN_reg(ot
, 0, rm
);
1707 gen_op_mov_reg_T0(ot
, reg
);
1710 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
1713 gen_op_mov_TN_reg(ot
, 0, reg
);
1714 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1716 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1718 gen_op_mov_reg_T0(ot
, reg
);
1723 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
1729 ret
= ldub_code(s
->pc
);
1733 ret
= lduw_code(s
->pc
);
1738 ret
= ldl_code(s
->pc
);
1745 static inline int insn_const_size(unsigned int ot
)
1753 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong eip
)
1755 TranslationBlock
*tb
;
1758 pc
= s
->cs_base
+ eip
;
1760 /* NOTE: we handle the case where the TB spans two pages here */
1761 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
1762 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
1763 /* jump to same page: we can use a direct jump */
1764 tcg_gen_goto_tb(tb_num
);
1766 tcg_gen_exit_tb((long)tb
+ tb_num
);
1768 /* jump to another page: currently not optimized */
1774 static inline void gen_jcc(DisasContext
*s
, int b
,
1775 target_ulong val
, target_ulong next_eip
)
1777 TranslationBlock
*tb
;
1784 jcc_op
= (b
>> 1) & 7;
1788 /* we optimize the cmp/jcc case */
1793 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1796 /* some jumps are easy to compute */
1838 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1841 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1853 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1854 gen_op_set_cc_op(s
->cc_op
);
1855 s
->cc_op
= CC_OP_DYNAMIC
;
1859 gen_setcc_slow
[jcc_op
]();
1860 func
= gen_op_jnz_T0_label
;
1870 l1
= gen_new_label();
1873 gen_goto_tb(s
, 0, next_eip
);
1876 gen_goto_tb(s
, 1, val
);
1881 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1882 gen_op_set_cc_op(s
->cc_op
);
1883 s
->cc_op
= CC_OP_DYNAMIC
;
1885 gen_setcc_slow
[jcc_op
]();
1891 l1
= gen_new_label();
1892 l2
= gen_new_label();
1893 gen_op_jnz_T0_label(l1
);
1894 gen_jmp_im(next_eip
);
1895 gen_op_jmp_label(l2
);
1903 static void gen_setcc(DisasContext
*s
, int b
)
1909 jcc_op
= (b
>> 1) & 7;
1911 /* we optimize the cmp/jcc case */
1916 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1921 /* some jumps are easy to compute */
1948 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1951 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1959 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1960 gen_op_set_cc_op(s
->cc_op
);
1961 func
= gen_setcc_slow
[jcc_op
];
1970 /* move T0 to seg_reg and compute if the CPU state may change. Never
1971 call this function with seg_reg == R_CS */
1972 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
1974 if (s
->pe
&& !s
->vm86
) {
1975 /* XXX: optimize by finding processor state dynamically */
1976 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1977 gen_op_set_cc_op(s
->cc_op
);
1978 gen_jmp_im(cur_eip
);
1979 gen_op_movl_seg_T0(seg_reg
);
1980 /* abort translation because the addseg value may change or
1981 because ss32 may change. For R_SS, translation must always
1982 stop as a special handling must be done to disable hardware
1983 interrupts for the next instruction */
1984 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
1987 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
1988 if (seg_reg
== R_SS
)
1993 #define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1996 gen_svm_check_io(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
1998 #if !defined(CONFIG_USER_ONLY)
1999 if(s
->flags
& (1ULL << INTERCEPT_IOIO_PROT
)) {
2000 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2001 gen_op_set_cc_op(s
->cc_op
);
2002 SVM_movq_T1_im(s
->pc
- s
->cs_base
);
2003 gen_jmp_im(pc_start
- s
->cs_base
);
2005 gen_op_svm_check_intercept_io((uint32_t)(type
>> 32), (uint32_t)type
);
2006 s
->cc_op
= CC_OP_DYNAMIC
;
2007 /* FIXME: maybe we could move the io intercept vector to the TB as well
2008 so we know if this is an EOB or not ... let's assume it's not
2015 static inline int svm_is_rep(int prefixes
)
2017 return ((prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) ? 8 : 0);
2021 gen_svm_check_intercept_param(DisasContext
*s
, target_ulong pc_start
,
2022 uint64_t type
, uint64_t param
)
2024 if(!(s
->flags
& (INTERCEPT_SVM_MASK
)))
2025 /* no SVM activated */
2028 /* CRx and DRx reads/writes */
2029 case SVM_EXIT_READ_CR0
... SVM_EXIT_EXCP_BASE
- 1:
2030 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2031 gen_op_set_cc_op(s
->cc_op
);
2032 s
->cc_op
= CC_OP_DYNAMIC
;
2034 gen_jmp_im(pc_start
- s
->cs_base
);
2035 SVM_movq_T1_im(param
);
2037 gen_op_svm_check_intercept_param((uint32_t)(type
>> 32), (uint32_t)type
);
2038 /* this is a special case as we do not know if the interception occurs
2039 so we assume there was none */
2042 if(s
->flags
& (1ULL << INTERCEPT_MSR_PROT
)) {
2043 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2044 gen_op_set_cc_op(s
->cc_op
);
2045 s
->cc_op
= CC_OP_DYNAMIC
;
2047 gen_jmp_im(pc_start
- s
->cs_base
);
2048 SVM_movq_T1_im(param
);
2050 gen_op_svm_check_intercept_param((uint32_t)(type
>> 32), (uint32_t)type
);
2051 /* this is a special case as we do not know if the interception occurs
2052 so we assume there was none */
2057 if(s
->flags
& (1ULL << ((type
- SVM_EXIT_INTR
) + INTERCEPT_INTR
))) {
2058 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2059 gen_op_set_cc_op(s
->cc_op
);
2060 s
->cc_op
= CC_OP_EFLAGS
;
2062 gen_jmp_im(pc_start
- s
->cs_base
);
2063 SVM_movq_T1_im(param
);
2065 gen_op_svm_vmexit(type
>> 32, type
);
2066 /* we can optimize this one so TBs don't get longer
2067 than up to vmexit */
2076 gen_svm_check_intercept(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
2078 return gen_svm_check_intercept_param(s
, pc_start
, type
, 0);
2081 static inline void gen_stack_update(DisasContext
*s
, int addend
)
2083 #ifdef TARGET_X86_64
2085 gen_op_addq_ESP_im(addend
);
2089 gen_op_addl_ESP_im(addend
);
2091 gen_op_addw_ESP_im(addend
);
2095 /* generate a push. It depends on ss32, addseg and dflag */
2096 static void gen_push_T0(DisasContext
*s
)
2098 #ifdef TARGET_X86_64
2100 gen_op_movq_A0_reg(R_ESP
);
2102 gen_op_addq_A0_im(-8);
2103 gen_op_st_T0_A0(OT_QUAD
+ s
->mem_index
);
2105 gen_op_addq_A0_im(-2);
2106 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2108 gen_op_mov_reg_A0(2, R_ESP
);
2112 gen_op_movl_A0_reg(R_ESP
);
2114 gen_op_addl_A0_im(-2);
2116 gen_op_addl_A0_im(-4);
2119 gen_op_movl_T1_A0();
2120 gen_op_addl_A0_seg(R_SS
);
2123 gen_op_andl_A0_ffff();
2124 gen_op_movl_T1_A0();
2125 gen_op_addl_A0_seg(R_SS
);
2127 gen_op_st_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2128 if (s
->ss32
&& !s
->addseg
)
2129 gen_op_mov_reg_A0(1, R_ESP
);
2131 gen_op_mov_reg_T1(s
->ss32
+ 1, R_ESP
);
2135 /* generate a push. It depends on ss32, addseg and dflag */
2136 /* slower version for T1, only used for call Ev */
2137 static void gen_push_T1(DisasContext
*s
)
2139 #ifdef TARGET_X86_64
2141 gen_op_movq_A0_reg(R_ESP
);
2143 gen_op_addq_A0_im(-8);
2144 gen_op_st_T1_A0(OT_QUAD
+ s
->mem_index
);
2146 gen_op_addq_A0_im(-2);
2147 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2149 gen_op_mov_reg_A0(2, R_ESP
);
2153 gen_op_movl_A0_reg(R_ESP
);
2155 gen_op_addl_A0_im(-2);
2157 gen_op_addl_A0_im(-4);
2160 gen_op_addl_A0_seg(R_SS
);
2163 gen_op_andl_A0_ffff();
2164 gen_op_addl_A0_seg(R_SS
);
2166 gen_op_st_T1_A0(s
->dflag
+ 1 + s
->mem_index
);
2168 if (s
->ss32
&& !s
->addseg
)
2169 gen_op_mov_reg_A0(1, R_ESP
);
2171 gen_stack_update(s
, (-2) << s
->dflag
);
2175 /* two step pop is necessary for precise exceptions */
2176 static void gen_pop_T0(DisasContext
*s
)
2178 #ifdef TARGET_X86_64
2180 gen_op_movq_A0_reg(R_ESP
);
2181 gen_op_ld_T0_A0((s
->dflag
? OT_QUAD
: OT_WORD
) + s
->mem_index
);
2185 gen_op_movl_A0_reg(R_ESP
);
2188 gen_op_addl_A0_seg(R_SS
);
2190 gen_op_andl_A0_ffff();
2191 gen_op_addl_A0_seg(R_SS
);
2193 gen_op_ld_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2197 static void gen_pop_update(DisasContext
*s
)
2199 #ifdef TARGET_X86_64
2200 if (CODE64(s
) && s
->dflag
) {
2201 gen_stack_update(s
, 8);
2205 gen_stack_update(s
, 2 << s
->dflag
);
2209 static void gen_stack_A0(DisasContext
*s
)
2211 gen_op_movl_A0_reg(R_ESP
);
2213 gen_op_andl_A0_ffff();
2214 gen_op_movl_T1_A0();
2216 gen_op_addl_A0_seg(R_SS
);
2219 /* NOTE: wrap around in 16 bit not fully handled */
2220 static void gen_pusha(DisasContext
*s
)
2223 gen_op_movl_A0_reg(R_ESP
);
2224 gen_op_addl_A0_im(-16 << s
->dflag
);
2226 gen_op_andl_A0_ffff();
2227 gen_op_movl_T1_A0();
2229 gen_op_addl_A0_seg(R_SS
);
2230 for(i
= 0;i
< 8; i
++) {
2231 gen_op_mov_TN_reg(OT_LONG
, 0, 7 - i
);
2232 gen_op_st_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2233 gen_op_addl_A0_im(2 << s
->dflag
);
2235 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2238 /* NOTE: wrap around in 16 bit not fully handled */
2239 static void gen_popa(DisasContext
*s
)
2242 gen_op_movl_A0_reg(R_ESP
);
2244 gen_op_andl_A0_ffff();
2245 gen_op_movl_T1_A0();
2246 gen_op_addl_T1_im(16 << s
->dflag
);
2248 gen_op_addl_A0_seg(R_SS
);
2249 for(i
= 0;i
< 8; i
++) {
2250 /* ESP is not reloaded */
2252 gen_op_ld_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2253 gen_op_mov_reg_T0(OT_WORD
+ s
->dflag
, 7 - i
);
2255 gen_op_addl_A0_im(2 << s
->dflag
);
2257 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2260 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2265 #ifdef TARGET_X86_64
2267 ot
= s
->dflag
? OT_QUAD
: OT_WORD
;
2270 gen_op_movl_A0_reg(R_ESP
);
2271 gen_op_addq_A0_im(-opsize
);
2272 gen_op_movl_T1_A0();
2275 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2276 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2278 gen_op_enter64_level(level
, (ot
== OT_QUAD
));
2280 gen_op_mov_reg_T1(ot
, R_EBP
);
2281 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2282 gen_op_mov_reg_T1(OT_QUAD
, R_ESP
);
2286 ot
= s
->dflag
+ OT_WORD
;
2287 opsize
= 2 << s
->dflag
;
2289 gen_op_movl_A0_reg(R_ESP
);
2290 gen_op_addl_A0_im(-opsize
);
2292 gen_op_andl_A0_ffff();
2293 gen_op_movl_T1_A0();
2295 gen_op_addl_A0_seg(R_SS
);
2297 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2298 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2300 gen_op_enter_level(level
, s
->dflag
);
2302 gen_op_mov_reg_T1(ot
, R_EBP
);
2303 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2304 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2308 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2310 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2311 gen_op_set_cc_op(s
->cc_op
);
2312 gen_jmp_im(cur_eip
);
2313 gen_op_raise_exception(trapno
);
2317 /* an interrupt is different from an exception because of the
2319 static void gen_interrupt(DisasContext
*s
, int intno
,
2320 target_ulong cur_eip
, target_ulong next_eip
)
2322 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2323 gen_op_set_cc_op(s
->cc_op
);
2324 gen_jmp_im(cur_eip
);
2325 gen_op_raise_interrupt(intno
, (int)(next_eip
- cur_eip
));
2329 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2331 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2332 gen_op_set_cc_op(s
->cc_op
);
2333 gen_jmp_im(cur_eip
);
2338 /* generate a generic end of block. Trace exception is also generated
2340 static void gen_eob(DisasContext
*s
)
2342 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2343 gen_op_set_cc_op(s
->cc_op
);
2344 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2345 gen_op_reset_inhibit_irq();
2347 if (s
->singlestep_enabled
) {
2350 gen_op_single_step();
2357 /* generate a jump to eip. No segment change must happen before as a
2358 direct call to the next block may occur */
2359 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2362 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2363 gen_op_set_cc_op(s
->cc_op
);
2364 s
->cc_op
= CC_OP_DYNAMIC
;
2366 gen_goto_tb(s
, tb_num
, eip
);
2374 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2376 gen_jmp_tb(s
, eip
, 0);
2379 static inline void gen_ldq_env_A0(int idx
, int offset
)
2381 int mem_index
= (idx
>> 2) - 1;
2382 tcg_gen_qemu_ld64(cpu_tmp1
, cpu_A0
, mem_index
);
2383 tcg_gen_st_i64(cpu_tmp1
, cpu_env
, offset
);
2386 static inline void gen_stq_env_A0(int idx
, int offset
)
2388 int mem_index
= (idx
>> 2) - 1;
2389 tcg_gen_ld_i64(cpu_tmp1
, cpu_env
, offset
);
2390 tcg_gen_qemu_st64(cpu_tmp1
, cpu_A0
, mem_index
);
2393 static inline void gen_ldo_env_A0(int idx
, int offset
)
2395 int mem_index
= (idx
>> 2) - 1;
2396 tcg_gen_qemu_ld64(cpu_tmp1
, cpu_A0
, mem_index
);
2397 tcg_gen_st_i64(cpu_tmp1
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2398 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2399 tcg_gen_qemu_ld64(cpu_tmp1
, cpu_tmp0
, mem_index
);
2400 tcg_gen_st_i64(cpu_tmp1
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2403 static inline void gen_sto_env_A0(int idx
, int offset
)
2405 int mem_index
= (idx
>> 2) - 1;
2406 tcg_gen_ld_i64(cpu_tmp1
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2407 tcg_gen_qemu_st64(cpu_tmp1
, cpu_A0
, mem_index
);
2408 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2409 tcg_gen_ld_i64(cpu_tmp1
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2410 tcg_gen_qemu_st64(cpu_tmp1
, cpu_tmp0
, mem_index
);
2413 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2414 #define SSE_DUMMY ((GenOpFunc2 *)2)
2416 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2417 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2418 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2420 static GenOpFunc2
*sse_op_table1
[256][4] = {
2421 /* 3DNow! extensions */
2422 [0x0e] = { SSE_DUMMY
}, /* femms */
2423 [0x0f] = { SSE_DUMMY
}, /* pf... */
2424 /* pure SSE operations */
2425 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2426 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2427 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd, movsldup, movddup */
2428 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2429 [0x14] = { gen_op_punpckldq_xmm
, gen_op_punpcklqdq_xmm
},
2430 [0x15] = { gen_op_punpckhdq_xmm
, gen_op_punpckhqdq_xmm
},
2431 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2432 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2434 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2435 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2436 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2437 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2438 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2439 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2440 [0x2e] = { gen_op_ucomiss
, gen_op_ucomisd
},
2441 [0x2f] = { gen_op_comiss
, gen_op_comisd
},
2442 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2443 [0x51] = SSE_FOP(sqrt
),
2444 [0x52] = { gen_op_rsqrtps
, NULL
, gen_op_rsqrtss
, NULL
},
2445 [0x53] = { gen_op_rcpps
, NULL
, gen_op_rcpss
, NULL
},
2446 [0x54] = { gen_op_pand_xmm
, gen_op_pand_xmm
}, /* andps, andpd */
2447 [0x55] = { gen_op_pandn_xmm
, gen_op_pandn_xmm
}, /* andnps, andnpd */
2448 [0x56] = { gen_op_por_xmm
, gen_op_por_xmm
}, /* orps, orpd */
2449 [0x57] = { gen_op_pxor_xmm
, gen_op_pxor_xmm
}, /* xorps, xorpd */
2450 [0x58] = SSE_FOP(add
),
2451 [0x59] = SSE_FOP(mul
),
2452 [0x5a] = { gen_op_cvtps2pd
, gen_op_cvtpd2ps
,
2453 gen_op_cvtss2sd
, gen_op_cvtsd2ss
},
2454 [0x5b] = { gen_op_cvtdq2ps
, gen_op_cvtps2dq
, gen_op_cvttps2dq
},
2455 [0x5c] = SSE_FOP(sub
),
2456 [0x5d] = SSE_FOP(min
),
2457 [0x5e] = SSE_FOP(div
),
2458 [0x5f] = SSE_FOP(max
),
2460 [0xc2] = SSE_FOP(cmpeq
),
2461 [0xc6] = { (GenOpFunc2
*)gen_op_shufps
, (GenOpFunc2
*)gen_op_shufpd
},
2463 /* MMX ops and their SSE extensions */
2464 [0x60] = MMX_OP2(punpcklbw
),
2465 [0x61] = MMX_OP2(punpcklwd
),
2466 [0x62] = MMX_OP2(punpckldq
),
2467 [0x63] = MMX_OP2(packsswb
),
2468 [0x64] = MMX_OP2(pcmpgtb
),
2469 [0x65] = MMX_OP2(pcmpgtw
),
2470 [0x66] = MMX_OP2(pcmpgtl
),
2471 [0x67] = MMX_OP2(packuswb
),
2472 [0x68] = MMX_OP2(punpckhbw
),
2473 [0x69] = MMX_OP2(punpckhwd
),
2474 [0x6a] = MMX_OP2(punpckhdq
),
2475 [0x6b] = MMX_OP2(packssdw
),
2476 [0x6c] = { NULL
, gen_op_punpcklqdq_xmm
},
2477 [0x6d] = { NULL
, gen_op_punpckhqdq_xmm
},
2478 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2479 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2480 [0x70] = { (GenOpFunc2
*)gen_op_pshufw_mmx
,
2481 (GenOpFunc2
*)gen_op_pshufd_xmm
,
2482 (GenOpFunc2
*)gen_op_pshufhw_xmm
,
2483 (GenOpFunc2
*)gen_op_pshuflw_xmm
},
2484 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2485 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2486 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2487 [0x74] = MMX_OP2(pcmpeqb
),
2488 [0x75] = MMX_OP2(pcmpeqw
),
2489 [0x76] = MMX_OP2(pcmpeql
),
2490 [0x77] = { SSE_DUMMY
}, /* emms */
2491 [0x7c] = { NULL
, gen_op_haddpd
, NULL
, gen_op_haddps
},
2492 [0x7d] = { NULL
, gen_op_hsubpd
, NULL
, gen_op_hsubps
},
2493 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2494 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2495 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2496 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2497 [0xd0] = { NULL
, gen_op_addsubpd
, NULL
, gen_op_addsubps
},
2498 [0xd1] = MMX_OP2(psrlw
),
2499 [0xd2] = MMX_OP2(psrld
),
2500 [0xd3] = MMX_OP2(psrlq
),
2501 [0xd4] = MMX_OP2(paddq
),
2502 [0xd5] = MMX_OP2(pmullw
),
2503 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2504 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2505 [0xd8] = MMX_OP2(psubusb
),
2506 [0xd9] = MMX_OP2(psubusw
),
2507 [0xda] = MMX_OP2(pminub
),
2508 [0xdb] = MMX_OP2(pand
),
2509 [0xdc] = MMX_OP2(paddusb
),
2510 [0xdd] = MMX_OP2(paddusw
),
2511 [0xde] = MMX_OP2(pmaxub
),
2512 [0xdf] = MMX_OP2(pandn
),
2513 [0xe0] = MMX_OP2(pavgb
),
2514 [0xe1] = MMX_OP2(psraw
),
2515 [0xe2] = MMX_OP2(psrad
),
2516 [0xe3] = MMX_OP2(pavgw
),
2517 [0xe4] = MMX_OP2(pmulhuw
),
2518 [0xe5] = MMX_OP2(pmulhw
),
2519 [0xe6] = { NULL
, gen_op_cvttpd2dq
, gen_op_cvtdq2pd
, gen_op_cvtpd2dq
},
2520 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2521 [0xe8] = MMX_OP2(psubsb
),
2522 [0xe9] = MMX_OP2(psubsw
),
2523 [0xea] = MMX_OP2(pminsw
),
2524 [0xeb] = MMX_OP2(por
),
2525 [0xec] = MMX_OP2(paddsb
),
2526 [0xed] = MMX_OP2(paddsw
),
2527 [0xee] = MMX_OP2(pmaxsw
),
2528 [0xef] = MMX_OP2(pxor
),
2529 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu */
2530 [0xf1] = MMX_OP2(psllw
),
2531 [0xf2] = MMX_OP2(pslld
),
2532 [0xf3] = MMX_OP2(psllq
),
2533 [0xf4] = MMX_OP2(pmuludq
),
2534 [0xf5] = MMX_OP2(pmaddwd
),
2535 [0xf6] = MMX_OP2(psadbw
),
2536 [0xf7] = MMX_OP2(maskmov
),
2537 [0xf8] = MMX_OP2(psubb
),
2538 [0xf9] = MMX_OP2(psubw
),
2539 [0xfa] = MMX_OP2(psubl
),
2540 [0xfb] = MMX_OP2(psubq
),
2541 [0xfc] = MMX_OP2(paddb
),
2542 [0xfd] = MMX_OP2(paddw
),
2543 [0xfe] = MMX_OP2(paddl
),
2546 static GenOpFunc2
*sse_op_table2
[3 * 8][2] = {
2547 [0 + 2] = MMX_OP2(psrlw
),
2548 [0 + 4] = MMX_OP2(psraw
),
2549 [0 + 6] = MMX_OP2(psllw
),
2550 [8 + 2] = MMX_OP2(psrld
),
2551 [8 + 4] = MMX_OP2(psrad
),
2552 [8 + 6] = MMX_OP2(pslld
),
2553 [16 + 2] = MMX_OP2(psrlq
),
2554 [16 + 3] = { NULL
, gen_op_psrldq_xmm
},
2555 [16 + 6] = MMX_OP2(psllq
),
2556 [16 + 7] = { NULL
, gen_op_pslldq_xmm
},
2559 static GenOpFunc1
*sse_op_table3
[4 * 3] = {
2562 X86_64_ONLY(gen_op_cvtsq2ss
),
2563 X86_64_ONLY(gen_op_cvtsq2sd
),
2567 X86_64_ONLY(gen_op_cvttss2sq
),
2568 X86_64_ONLY(gen_op_cvttsd2sq
),
2572 X86_64_ONLY(gen_op_cvtss2sq
),
2573 X86_64_ONLY(gen_op_cvtsd2sq
),
2576 static GenOpFunc2
*sse_op_table4
[8][4] = {
2587 static GenOpFunc2
*sse_op_table5
[256] = {
2588 [0x0c] = gen_op_pi2fw
,
2589 [0x0d] = gen_op_pi2fd
,
2590 [0x1c] = gen_op_pf2iw
,
2591 [0x1d] = gen_op_pf2id
,
2592 [0x8a] = gen_op_pfnacc
,
2593 [0x8e] = gen_op_pfpnacc
,
2594 [0x90] = gen_op_pfcmpge
,
2595 [0x94] = gen_op_pfmin
,
2596 [0x96] = gen_op_pfrcp
,
2597 [0x97] = gen_op_pfrsqrt
,
2598 [0x9a] = gen_op_pfsub
,
2599 [0x9e] = gen_op_pfadd
,
2600 [0xa0] = gen_op_pfcmpgt
,
2601 [0xa4] = gen_op_pfmax
,
2602 [0xa6] = gen_op_movq
, /* pfrcpit1; no need to actually increase precision */
2603 [0xa7] = gen_op_movq
, /* pfrsqit1 */
2604 [0xaa] = gen_op_pfsubr
,
2605 [0xae] = gen_op_pfacc
,
2606 [0xb0] = gen_op_pfcmpeq
,
2607 [0xb4] = gen_op_pfmul
,
2608 [0xb6] = gen_op_movq
, /* pfrcpit2 */
2609 [0xb7] = gen_op_pmulhrw_mmx
,
2610 [0xbb] = gen_op_pswapd
,
2611 [0xbf] = gen_op_pavgb_mmx
/* pavgusb */
2614 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
2616 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
2617 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
2618 GenOpFunc2
*sse_op2
;
2619 GenOpFunc3
*sse_op3
;
2622 if (s
->prefix
& PREFIX_DATA
)
2624 else if (s
->prefix
& PREFIX_REPZ
)
2626 else if (s
->prefix
& PREFIX_REPNZ
)
2630 sse_op2
= sse_op_table1
[b
][b1
];
2633 if ((b
<= 0x5f && b
>= 0x10) || b
== 0xc6 || b
== 0xc2) {
2643 /* simple MMX/SSE operation */
2644 if (s
->flags
& HF_TS_MASK
) {
2645 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
2648 if (s
->flags
& HF_EM_MASK
) {
2650 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
2653 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
2656 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
2667 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2668 the static cpu state) */
2673 modrm
= ldub_code(s
->pc
++);
2674 reg
= ((modrm
>> 3) & 7);
2677 mod
= (modrm
>> 6) & 3;
2678 if (sse_op2
== SSE_SPECIAL
) {
2681 case 0x0e7: /* movntq */
2684 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2685 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2687 case 0x1e7: /* movntdq */
2688 case 0x02b: /* movntps */
2689 case 0x12b: /* movntps */
2690 case 0x3f0: /* lddqu */
2693 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2694 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
2696 case 0x6e: /* movd mm, ea */
2697 #ifdef TARGET_X86_64
2698 if (s
->dflag
== 2) {
2699 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2700 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2704 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2705 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2708 case 0x16e: /* movd xmm, ea */
2709 #ifdef TARGET_X86_64
2710 if (s
->dflag
== 2) {
2711 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2712 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2716 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2717 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2720 case 0x6f: /* movq mm, ea */
2722 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2723 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2726 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
].mmx
),
2727 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2730 case 0x010: /* movups */
2731 case 0x110: /* movupd */
2732 case 0x028: /* movaps */
2733 case 0x128: /* movapd */
2734 case 0x16f: /* movdqa xmm, ea */
2735 case 0x26f: /* movdqu xmm, ea */
2737 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2738 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
2740 rm
= (modrm
& 7) | REX_B(s
);
2741 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
2742 offsetof(CPUX86State
,xmm_regs
[rm
]));
2745 case 0x210: /* movss xmm, ea */
2747 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2748 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
2749 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2751 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2752 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2753 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2755 rm
= (modrm
& 7) | REX_B(s
);
2756 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2757 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2760 case 0x310: /* movsd xmm, ea */
2762 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2763 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2765 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2766 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2768 rm
= (modrm
& 7) | REX_B(s
);
2769 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2770 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2773 case 0x012: /* movlps */
2774 case 0x112: /* movlpd */
2776 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2777 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2780 rm
= (modrm
& 7) | REX_B(s
);
2781 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2782 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2785 case 0x212: /* movsldup */
2787 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2788 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
2790 rm
= (modrm
& 7) | REX_B(s
);
2791 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2792 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2793 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2794 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(2)));
2796 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2797 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2798 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2799 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2801 case 0x312: /* movddup */
2803 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2804 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2806 rm
= (modrm
& 7) | REX_B(s
);
2807 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2808 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2810 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2811 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2813 case 0x016: /* movhps */
2814 case 0x116: /* movhpd */
2816 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2817 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2820 rm
= (modrm
& 7) | REX_B(s
);
2821 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2822 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2825 case 0x216: /* movshdup */
2827 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2828 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
2830 rm
= (modrm
& 7) | REX_B(s
);
2831 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2832 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
2833 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2834 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
2836 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2837 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2838 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2839 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2841 case 0x7e: /* movd ea, mm */
2842 #ifdef TARGET_X86_64
2843 if (s
->dflag
== 2) {
2844 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2845 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
2849 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2850 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2853 case 0x17e: /* movd ea, xmm */
2854 #ifdef TARGET_X86_64
2855 if (s
->dflag
== 2) {
2856 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2857 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
2861 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2862 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2865 case 0x27e: /* movq xmm, ea */
2867 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2868 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2870 rm
= (modrm
& 7) | REX_B(s
);
2871 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2872 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2874 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2876 case 0x7f: /* movq ea, mm */
2878 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2879 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2882 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
2883 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2886 case 0x011: /* movups */
2887 case 0x111: /* movupd */
2888 case 0x029: /* movaps */
2889 case 0x129: /* movapd */
2890 case 0x17f: /* movdqa ea, xmm */
2891 case 0x27f: /* movdqu ea, xmm */
2893 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2894 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
2896 rm
= (modrm
& 7) | REX_B(s
);
2897 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
2898 offsetof(CPUX86State
,xmm_regs
[reg
]));
2901 case 0x211: /* movss ea, xmm */
2903 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2904 gen_op_movl_T0_env(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2905 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
2907 rm
= (modrm
& 7) | REX_B(s
);
2908 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
2909 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2912 case 0x311: /* movsd ea, xmm */
2914 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2915 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2917 rm
= (modrm
& 7) | REX_B(s
);
2918 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2919 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2922 case 0x013: /* movlps */
2923 case 0x113: /* movlpd */
2925 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2926 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2931 case 0x017: /* movhps */
2932 case 0x117: /* movhpd */
2934 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2935 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2940 case 0x71: /* shift mm, im */
2943 case 0x171: /* shift xmm, im */
2946 val
= ldub_code(s
->pc
++);
2948 gen_op_movl_T0_im(val
);
2949 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
2951 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
2952 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
2954 gen_op_movl_T0_im(val
);
2955 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
2957 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
2958 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
2960 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
2964 rm
= (modrm
& 7) | REX_B(s
);
2965 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2968 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2970 sse_op2(op2_offset
, op1_offset
);
2972 case 0x050: /* movmskps */
2973 rm
= (modrm
& 7) | REX_B(s
);
2974 gen_op_movmskps(offsetof(CPUX86State
,xmm_regs
[rm
]));
2975 gen_op_mov_reg_T0(OT_LONG
, reg
);
2977 case 0x150: /* movmskpd */
2978 rm
= (modrm
& 7) | REX_B(s
);
2979 gen_op_movmskpd(offsetof(CPUX86State
,xmm_regs
[rm
]));
2980 gen_op_mov_reg_T0(OT_LONG
, reg
);
2982 case 0x02a: /* cvtpi2ps */
2983 case 0x12a: /* cvtpi2pd */
2986 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2987 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
2988 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
2991 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2993 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2996 gen_op_cvtpi2ps(op1_offset
, op2_offset
);
3000 gen_op_cvtpi2pd(op1_offset
, op2_offset
);
3004 case 0x22a: /* cvtsi2ss */
3005 case 0x32a: /* cvtsi2sd */
3006 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3007 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3008 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3009 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)](op1_offset
);
3011 case 0x02c: /* cvttps2pi */
3012 case 0x12c: /* cvttpd2pi */
3013 case 0x02d: /* cvtps2pi */
3014 case 0x12d: /* cvtpd2pi */
3017 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3018 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3019 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3021 rm
= (modrm
& 7) | REX_B(s
);
3022 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3024 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
3027 gen_op_cvttps2pi(op1_offset
, op2_offset
);
3030 gen_op_cvttpd2pi(op1_offset
, op2_offset
);
3033 gen_op_cvtps2pi(op1_offset
, op2_offset
);
3036 gen_op_cvtpd2pi(op1_offset
, op2_offset
);
3040 case 0x22c: /* cvttss2si */
3041 case 0x32c: /* cvttsd2si */
3042 case 0x22d: /* cvtss2si */
3043 case 0x32d: /* cvtsd2si */
3044 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3046 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3048 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_Q(0)));
3050 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3051 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3053 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3055 rm
= (modrm
& 7) | REX_B(s
);
3056 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3058 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
3059 (b
& 1) * 4](op2_offset
);
3060 gen_op_mov_reg_T0(ot
, reg
);
3062 case 0xc4: /* pinsrw */
3065 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3066 val
= ldub_code(s
->pc
++);
3069 gen_op_pinsrw_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]), val
);
3072 gen_op_pinsrw_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
), val
);
3075 case 0xc5: /* pextrw */
3079 val
= ldub_code(s
->pc
++);
3082 rm
= (modrm
& 7) | REX_B(s
);
3083 gen_op_pextrw_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]), val
);
3087 gen_op_pextrw_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
), val
);
3089 reg
= ((modrm
>> 3) & 7) | rex_r
;
3090 gen_op_mov_reg_T0(OT_LONG
, reg
);
3092 case 0x1d6: /* movq ea, xmm */
3094 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3095 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3097 rm
= (modrm
& 7) | REX_B(s
);
3098 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3099 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3100 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3103 case 0x2d6: /* movq2dq */
3106 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3107 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3108 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3110 case 0x3d6: /* movdq2q */
3112 rm
= (modrm
& 7) | REX_B(s
);
3113 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
),
3114 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3116 case 0xd7: /* pmovmskb */
3121 rm
= (modrm
& 7) | REX_B(s
);
3122 gen_op_pmovmskb_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]));
3125 gen_op_pmovmskb_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3127 reg
= ((modrm
>> 3) & 7) | rex_r
;
3128 gen_op_mov_reg_T0(OT_LONG
, reg
);
3134 /* generic MMX or SSE operation */
3137 /* maskmov : we must prepare A0 */
3140 #ifdef TARGET_X86_64
3141 if (s
->aflag
== 2) {
3142 gen_op_movq_A0_reg(R_EDI
);
3146 gen_op_movl_A0_reg(R_EDI
);
3148 gen_op_andl_A0_ffff();
3150 gen_add_A0_ds_seg(s
);
3152 case 0x70: /* pshufx insn */
3153 case 0xc6: /* pshufx insn */
3154 case 0xc2: /* compare insns */
3161 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3163 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3164 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3165 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f && b
!= 0x5b) ||
3167 /* specific case for SSE single instructions */
3170 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3171 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3174 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
3177 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3180 rm
= (modrm
& 7) | REX_B(s
);
3181 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3184 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3186 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3187 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3188 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3191 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3195 case 0x0f: /* 3DNow! data insns */
3196 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
3198 val
= ldub_code(s
->pc
++);
3199 sse_op2
= sse_op_table5
[val
];
3202 sse_op2(op1_offset
, op2_offset
);
3204 case 0x70: /* pshufx insn */
3205 case 0xc6: /* pshufx insn */
3206 val
= ldub_code(s
->pc
++);
3207 sse_op3
= (GenOpFunc3
*)sse_op2
;
3208 sse_op3(op1_offset
, op2_offset
, val
);
3212 val
= ldub_code(s
->pc
++);
3215 sse_op2
= sse_op_table4
[val
][b1
];
3216 sse_op2(op1_offset
, op2_offset
);
3219 sse_op2(op1_offset
, op2_offset
);
3222 if (b
== 0x2e || b
== 0x2f) {
3223 s
->cc_op
= CC_OP_EFLAGS
;
3229 /* convert one instruction. s->is_jmp is set if the translation must
3230 be stopped. Return the next pc value */
3231 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
3233 int b
, prefixes
, aflag
, dflag
;
3235 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
3236 target_ulong next_eip
, tval
;
3246 #ifdef TARGET_X86_64
3251 s
->rip_offset
= 0; /* for relative ip address */
3253 b
= ldub_code(s
->pc
);
3255 /* check prefixes */
3256 #ifdef TARGET_X86_64
3260 prefixes
|= PREFIX_REPZ
;
3263 prefixes
|= PREFIX_REPNZ
;
3266 prefixes
|= PREFIX_LOCK
;
3287 prefixes
|= PREFIX_DATA
;
3290 prefixes
|= PREFIX_ADR
;
3294 rex_w
= (b
>> 3) & 1;
3295 rex_r
= (b
& 0x4) << 1;
3296 s
->rex_x
= (b
& 0x2) << 2;
3297 REX_B(s
) = (b
& 0x1) << 3;
3298 x86_64_hregs
= 1; /* select uniform byte register addressing */
3302 /* 0x66 is ignored if rex.w is set */
3305 if (prefixes
& PREFIX_DATA
)
3308 if (!(prefixes
& PREFIX_ADR
))
3315 prefixes
|= PREFIX_REPZ
;
3318 prefixes
|= PREFIX_REPNZ
;
3321 prefixes
|= PREFIX_LOCK
;
3342 prefixes
|= PREFIX_DATA
;
3345 prefixes
|= PREFIX_ADR
;
3348 if (prefixes
& PREFIX_DATA
)
3350 if (prefixes
& PREFIX_ADR
)
3354 s
->prefix
= prefixes
;
3358 /* lock generation */
3359 if (prefixes
& PREFIX_LOCK
)
3362 /* now check op code */
3366 /**************************/
3367 /* extended op code */
3368 b
= ldub_code(s
->pc
++) | 0x100;
3371 /**************************/
3389 ot
= dflag
+ OT_WORD
;
3392 case 0: /* OP Ev, Gv */
3393 modrm
= ldub_code(s
->pc
++);
3394 reg
= ((modrm
>> 3) & 7) | rex_r
;
3395 mod
= (modrm
>> 6) & 3;
3396 rm
= (modrm
& 7) | REX_B(s
);
3398 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3400 } else if (op
== OP_XORL
&& rm
== reg
) {
3402 /* xor reg, reg optimisation */
3404 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3405 gen_op_mov_reg_T0(ot
, reg
);
3406 gen_op_update1_cc();
3411 gen_op_mov_TN_reg(ot
, 1, reg
);
3412 gen_op(s
, op
, ot
, opreg
);
3414 case 1: /* OP Gv, Ev */
3415 modrm
= ldub_code(s
->pc
++);
3416 mod
= (modrm
>> 6) & 3;
3417 reg
= ((modrm
>> 3) & 7) | rex_r
;
3418 rm
= (modrm
& 7) | REX_B(s
);
3420 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3421 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3422 } else if (op
== OP_XORL
&& rm
== reg
) {
3425 gen_op_mov_TN_reg(ot
, 1, rm
);
3427 gen_op(s
, op
, ot
, reg
);
3429 case 2: /* OP A, Iv */
3430 val
= insn_get(s
, ot
);
3431 gen_op_movl_T1_im(val
);
3432 gen_op(s
, op
, ot
, OR_EAX
);
3438 case 0x80: /* GRP1 */
3448 ot
= dflag
+ OT_WORD
;
3450 modrm
= ldub_code(s
->pc
++);
3451 mod
= (modrm
>> 6) & 3;
3452 rm
= (modrm
& 7) | REX_B(s
);
3453 op
= (modrm
>> 3) & 7;
3459 s
->rip_offset
= insn_const_size(ot
);
3460 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3471 val
= insn_get(s
, ot
);
3474 val
= (int8_t)insn_get(s
, OT_BYTE
);
3477 gen_op_movl_T1_im(val
);
3478 gen_op(s
, op
, ot
, opreg
);
3482 /**************************/
3483 /* inc, dec, and other misc arith */
3484 case 0x40 ... 0x47: /* inc Gv */
3485 ot
= dflag
? OT_LONG
: OT_WORD
;
3486 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
3488 case 0x48 ... 0x4f: /* dec Gv */
3489 ot
= dflag
? OT_LONG
: OT_WORD
;
3490 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
3492 case 0xf6: /* GRP3 */
3497 ot
= dflag
+ OT_WORD
;
3499 modrm
= ldub_code(s
->pc
++);
3500 mod
= (modrm
>> 6) & 3;
3501 rm
= (modrm
& 7) | REX_B(s
);
3502 op
= (modrm
>> 3) & 7;
3505 s
->rip_offset
= insn_const_size(ot
);
3506 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3507 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3509 gen_op_mov_TN_reg(ot
, 0, rm
);
3514 val
= insn_get(s
, ot
);
3515 gen_op_movl_T1_im(val
);
3516 gen_op_testl_T0_T1_cc();
3517 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3522 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3524 gen_op_mov_reg_T0(ot
, rm
);
3530 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3532 gen_op_mov_reg_T0(ot
, rm
);
3534 gen_op_update_neg_cc();
3535 s
->cc_op
= CC_OP_SUBB
+ ot
;
3540 gen_op_mulb_AL_T0();
3541 s
->cc_op
= CC_OP_MULB
;
3544 gen_op_mulw_AX_T0();
3545 s
->cc_op
= CC_OP_MULW
;
3549 gen_op_mull_EAX_T0();
3550 s
->cc_op
= CC_OP_MULL
;
3552 #ifdef TARGET_X86_64
3554 gen_op_mulq_EAX_T0();
3555 s
->cc_op
= CC_OP_MULQ
;
3563 gen_op_imulb_AL_T0();
3564 s
->cc_op
= CC_OP_MULB
;
3567 gen_op_imulw_AX_T0();
3568 s
->cc_op
= CC_OP_MULW
;
3572 gen_op_imull_EAX_T0();
3573 s
->cc_op
= CC_OP_MULL
;
3575 #ifdef TARGET_X86_64
3577 gen_op_imulq_EAX_T0();
3578 s
->cc_op
= CC_OP_MULQ
;
3586 gen_jmp_im(pc_start
- s
->cs_base
);
3587 gen_op_divb_AL_T0();
3590 gen_jmp_im(pc_start
- s
->cs_base
);
3591 gen_op_divw_AX_T0();
3595 gen_jmp_im(pc_start
- s
->cs_base
);
3597 /* XXX: this is just a test */
3598 tcg_gen_macro_2(cpu_T
[0], cpu_T
[0], MACRO_TEST
);
3600 tcg_gen_helper_0_1(helper_divl_EAX_T0
, cpu_T
[0]);
3603 #ifdef TARGET_X86_64
3605 gen_jmp_im(pc_start
- s
->cs_base
);
3606 gen_op_divq_EAX_T0();
3614 gen_jmp_im(pc_start
- s
->cs_base
);
3615 gen_op_idivb_AL_T0();
3618 gen_jmp_im(pc_start
- s
->cs_base
);
3619 gen_op_idivw_AX_T0();
3623 gen_jmp_im(pc_start
- s
->cs_base
);
3624 tcg_gen_helper_0_1(helper_idivl_EAX_T0
, cpu_T
[0]);
3626 #ifdef TARGET_X86_64
3628 gen_jmp_im(pc_start
- s
->cs_base
);
3629 gen_op_idivq_EAX_T0();
3639 case 0xfe: /* GRP4 */
3640 case 0xff: /* GRP5 */
3644 ot
= dflag
+ OT_WORD
;
3646 modrm
= ldub_code(s
->pc
++);
3647 mod
= (modrm
>> 6) & 3;
3648 rm
= (modrm
& 7) | REX_B(s
);
3649 op
= (modrm
>> 3) & 7;
3650 if (op
>= 2 && b
== 0xfe) {
3654 if (op
== 2 || op
== 4) {
3655 /* operand size for jumps is 64 bit */
3657 } else if (op
== 3 || op
== 5) {
3658 /* for call calls, the operand is 16 or 32 bit, even
3660 ot
= dflag
? OT_LONG
: OT_WORD
;
3661 } else if (op
== 6) {
3662 /* default push size is 64 bit */
3663 ot
= dflag
? OT_QUAD
: OT_WORD
;
3667 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3668 if (op
>= 2 && op
!= 3 && op
!= 5)
3669 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3671 gen_op_mov_TN_reg(ot
, 0, rm
);
3675 case 0: /* inc Ev */
3680 gen_inc(s
, ot
, opreg
, 1);
3682 case 1: /* dec Ev */
3687 gen_inc(s
, ot
, opreg
, -1);
3689 case 2: /* call Ev */
3690 /* XXX: optimize if memory (no 'and' is necessary) */
3692 gen_op_andl_T0_ffff();
3693 next_eip
= s
->pc
- s
->cs_base
;
3694 gen_movtl_T1_im(next_eip
);
3699 case 3: /* lcall Ev */
3700 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3701 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3702 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
3704 if (s
->pe
&& !s
->vm86
) {
3705 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3706 gen_op_set_cc_op(s
->cc_op
);
3707 gen_jmp_im(pc_start
- s
->cs_base
);
3708 gen_op_lcall_protected_T0_T1(dflag
, s
->pc
- pc_start
);
3710 gen_op_lcall_real_T0_T1(dflag
, s
->pc
- s
->cs_base
);
3714 case 4: /* jmp Ev */
3716 gen_op_andl_T0_ffff();
3720 case 5: /* ljmp Ev */
3721 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3722 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3723 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
3725 if (s
->pe
&& !s
->vm86
) {
3726 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3727 gen_op_set_cc_op(s
->cc_op
);
3728 gen_jmp_im(pc_start
- s
->cs_base
);
3729 gen_op_ljmp_protected_T0_T1(s
->pc
- pc_start
);
3731 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
3732 gen_op_movl_T0_T1();
3737 case 6: /* push Ev */
3745 case 0x84: /* test Ev, Gv */
3750 ot
= dflag
+ OT_WORD
;
3752 modrm
= ldub_code(s
->pc
++);
3753 mod
= (modrm
>> 6) & 3;
3754 rm
= (modrm
& 7) | REX_B(s
);
3755 reg
= ((modrm
>> 3) & 7) | rex_r
;
3757 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3758 gen_op_mov_TN_reg(ot
, 1, reg
);
3759 gen_op_testl_T0_T1_cc();
3760 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3763 case 0xa8: /* test eAX, Iv */
3768 ot
= dflag
+ OT_WORD
;
3769 val
= insn_get(s
, ot
);
3771 gen_op_mov_TN_reg(ot
, 0, OR_EAX
);
3772 gen_op_movl_T1_im(val
);
3773 gen_op_testl_T0_T1_cc();
3774 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3777 case 0x98: /* CWDE/CBW */
3778 #ifdef TARGET_X86_64
3780 gen_op_movslq_RAX_EAX();
3784 gen_op_movswl_EAX_AX();
3786 gen_op_movsbw_AX_AL();
3788 case 0x99: /* CDQ/CWD */
3789 #ifdef TARGET_X86_64
3791 gen_op_movsqo_RDX_RAX();
3795 gen_op_movslq_EDX_EAX();
3797 gen_op_movswl_DX_AX();
3799 case 0x1af: /* imul Gv, Ev */
3800 case 0x69: /* imul Gv, Ev, I */
3802 ot
= dflag
+ OT_WORD
;
3803 modrm
= ldub_code(s
->pc
++);
3804 reg
= ((modrm
>> 3) & 7) | rex_r
;
3806 s
->rip_offset
= insn_const_size(ot
);
3809 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3811 val
= insn_get(s
, ot
);
3812 gen_op_movl_T1_im(val
);
3813 } else if (b
== 0x6b) {
3814 val
= (int8_t)insn_get(s
, OT_BYTE
);
3815 gen_op_movl_T1_im(val
);
3817 gen_op_mov_TN_reg(ot
, 1, reg
);
3820 #ifdef TARGET_X86_64
3821 if (ot
== OT_QUAD
) {
3822 gen_op_imulq_T0_T1();
3825 if (ot
== OT_LONG
) {
3826 gen_op_imull_T0_T1();
3828 gen_op_imulw_T0_T1();
3830 gen_op_mov_reg_T0(ot
, reg
);
3831 s
->cc_op
= CC_OP_MULB
+ ot
;
3834 case 0x1c1: /* xadd Ev, Gv */
3838 ot
= dflag
+ OT_WORD
;
3839 modrm
= ldub_code(s
->pc
++);
3840 reg
= ((modrm
>> 3) & 7) | rex_r
;
3841 mod
= (modrm
>> 6) & 3;
3843 rm
= (modrm
& 7) | REX_B(s
);
3844 gen_op_mov_TN_reg(ot
, 0, reg
);
3845 gen_op_mov_TN_reg(ot
, 1, rm
);
3846 gen_op_addl_T0_T1();
3847 gen_op_mov_reg_T1(ot
, reg
);
3848 gen_op_mov_reg_T0(ot
, rm
);
3850 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3851 gen_op_mov_TN_reg(ot
, 0, reg
);
3852 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3853 gen_op_addl_T0_T1();
3854 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3855 gen_op_mov_reg_T1(ot
, reg
);
3857 gen_op_update2_cc();
3858 s
->cc_op
= CC_OP_ADDB
+ ot
;
3861 case 0x1b1: /* cmpxchg Ev, Gv */
3865 ot
= dflag
+ OT_WORD
;
3866 modrm
= ldub_code(s
->pc
++);
3867 reg
= ((modrm
>> 3) & 7) | rex_r
;
3868 mod
= (modrm
>> 6) & 3;
3869 gen_op_mov_TN_reg(ot
, 1, reg
);
3871 rm
= (modrm
& 7) | REX_B(s
);
3872 gen_op_mov_TN_reg(ot
, 0, rm
);
3873 gen_op_cmpxchg_T0_T1_EAX_cc
[ot
]();
3874 gen_op_mov_reg_T0(ot
, rm
);
3876 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3877 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3878 gen_op_cmpxchg_mem_T0_T1_EAX_cc
[ot
+ s
->mem_index
]();
3880 s
->cc_op
= CC_OP_SUBB
+ ot
;
3882 case 0x1c7: /* cmpxchg8b */
3883 modrm
= ldub_code(s
->pc
++);
3884 mod
= (modrm
>> 6) & 3;
3885 if ((mod
== 3) || ((modrm
& 0x38) != 0x8))
3887 gen_jmp_im(pc_start
- s
->cs_base
);
3888 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3889 gen_op_set_cc_op(s
->cc_op
);
3890 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3892 s
->cc_op
= CC_OP_EFLAGS
;
3895 /**************************/
3897 case 0x50 ... 0x57: /* push */
3898 gen_op_mov_TN_reg(OT_LONG
, 0, (b
& 7) | REX_B(s
));
3901 case 0x58 ... 0x5f: /* pop */
3903 ot
= dflag
? OT_QUAD
: OT_WORD
;
3905 ot
= dflag
+ OT_WORD
;
3908 /* NOTE: order is important for pop %sp */
3910 gen_op_mov_reg_T0(ot
, (b
& 7) | REX_B(s
));
3912 case 0x60: /* pusha */
3917 case 0x61: /* popa */
3922 case 0x68: /* push Iv */
3925 ot
= dflag
? OT_QUAD
: OT_WORD
;
3927 ot
= dflag
+ OT_WORD
;
3930 val
= insn_get(s
, ot
);
3932 val
= (int8_t)insn_get(s
, OT_BYTE
);
3933 gen_op_movl_T0_im(val
);
3936 case 0x8f: /* pop Ev */
3938 ot
= dflag
? OT_QUAD
: OT_WORD
;
3940 ot
= dflag
+ OT_WORD
;
3942 modrm
= ldub_code(s
->pc
++);
3943 mod
= (modrm
>> 6) & 3;
3946 /* NOTE: order is important for pop %sp */
3948 rm
= (modrm
& 7) | REX_B(s
);
3949 gen_op_mov_reg_T0(ot
, rm
);
3951 /* NOTE: order is important too for MMU exceptions */
3952 s
->popl_esp_hack
= 1 << ot
;
3953 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3954 s
->popl_esp_hack
= 0;
3958 case 0xc8: /* enter */
3961 val
= lduw_code(s
->pc
);
3963 level
= ldub_code(s
->pc
++);
3964 gen_enter(s
, val
, level
);
3967 case 0xc9: /* leave */
3968 /* XXX: exception not precise (ESP is updated before potential exception) */
3970 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EBP
);
3971 gen_op_mov_reg_T0(OT_QUAD
, R_ESP
);
3972 } else if (s
->ss32
) {
3973 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
3974 gen_op_mov_reg_T0(OT_LONG
, R_ESP
);
3976 gen_op_mov_TN_reg(OT_WORD
, 0, R_EBP
);
3977 gen_op_mov_reg_T0(OT_WORD
, R_ESP
);
3981 ot
= dflag
? OT_QUAD
: OT_WORD
;
3983 ot
= dflag
+ OT_WORD
;
3985 gen_op_mov_reg_T0(ot
, R_EBP
);
3988 case 0x06: /* push es */
3989 case 0x0e: /* push cs */
3990 case 0x16: /* push ss */
3991 case 0x1e: /* push ds */
3994 gen_op_movl_T0_seg(b
>> 3);
3997 case 0x1a0: /* push fs */
3998 case 0x1a8: /* push gs */
3999 gen_op_movl_T0_seg((b
>> 3) & 7);
4002 case 0x07: /* pop es */
4003 case 0x17: /* pop ss */
4004 case 0x1f: /* pop ds */
4009 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4012 /* if reg == SS, inhibit interrupts/trace. */
4013 /* If several instructions disable interrupts, only the
4015 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4016 gen_op_set_inhibit_irq();
4020 gen_jmp_im(s
->pc
- s
->cs_base
);
4024 case 0x1a1: /* pop fs */
4025 case 0x1a9: /* pop gs */
4027 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
4030 gen_jmp_im(s
->pc
- s
->cs_base
);
4035 /**************************/
4038 case 0x89: /* mov Gv, Ev */
4042 ot
= dflag
+ OT_WORD
;
4043 modrm
= ldub_code(s
->pc
++);
4044 reg
= ((modrm
>> 3) & 7) | rex_r
;
4046 /* generate a generic store */
4047 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
4050 case 0xc7: /* mov Ev, Iv */
4054 ot
= dflag
+ OT_WORD
;
4055 modrm
= ldub_code(s
->pc
++);
4056 mod
= (modrm
>> 6) & 3;
4058 s
->rip_offset
= insn_const_size(ot
);
4059 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4061 val
= insn_get(s
, ot
);
4062 gen_op_movl_T0_im(val
);
4064 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4066 gen_op_mov_reg_T0(ot
, (modrm
& 7) | REX_B(s
));
4069 case 0x8b: /* mov Ev, Gv */
4073 ot
= OT_WORD
+ dflag
;
4074 modrm
= ldub_code(s
->pc
++);
4075 reg
= ((modrm
>> 3) & 7) | rex_r
;
4077 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4078 gen_op_mov_reg_T0(ot
, reg
);
4080 case 0x8e: /* mov seg, Gv */
4081 modrm
= ldub_code(s
->pc
++);
4082 reg
= (modrm
>> 3) & 7;
4083 if (reg
>= 6 || reg
== R_CS
)
4085 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
4086 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4088 /* if reg == SS, inhibit interrupts/trace */
4089 /* If several instructions disable interrupts, only the
4091 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4092 gen_op_set_inhibit_irq();
4096 gen_jmp_im(s
->pc
- s
->cs_base
);
4100 case 0x8c: /* mov Gv, seg */
4101 modrm
= ldub_code(s
->pc
++);
4102 reg
= (modrm
>> 3) & 7;
4103 mod
= (modrm
>> 6) & 3;
4106 gen_op_movl_T0_seg(reg
);
4108 ot
= OT_WORD
+ dflag
;
4111 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4114 case 0x1b6: /* movzbS Gv, Eb */
4115 case 0x1b7: /* movzwS Gv, Eb */
4116 case 0x1be: /* movsbS Gv, Eb */
4117 case 0x1bf: /* movswS Gv, Eb */
4120 /* d_ot is the size of destination */
4121 d_ot
= dflag
+ OT_WORD
;
4122 /* ot is the size of source */
4123 ot
= (b
& 1) + OT_BYTE
;
4124 modrm
= ldub_code(s
->pc
++);
4125 reg
= ((modrm
>> 3) & 7) | rex_r
;
4126 mod
= (modrm
>> 6) & 3;
4127 rm
= (modrm
& 7) | REX_B(s
);
4130 gen_op_mov_TN_reg(ot
, 0, rm
);
4131 switch(ot
| (b
& 8)) {
4133 gen_op_movzbl_T0_T0();
4136 gen_op_movsbl_T0_T0();
4139 gen_op_movzwl_T0_T0();
4143 gen_op_movswl_T0_T0();
4146 gen_op_mov_reg_T0(d_ot
, reg
);
4148 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4150 gen_op_lds_T0_A0(ot
+ s
->mem_index
);
4152 gen_op_ldu_T0_A0(ot
+ s
->mem_index
);
4154 gen_op_mov_reg_T0(d_ot
, reg
);
4159 case 0x8d: /* lea */
4160 ot
= dflag
+ OT_WORD
;
4161 modrm
= ldub_code(s
->pc
++);
4162 mod
= (modrm
>> 6) & 3;
4165 reg
= ((modrm
>> 3) & 7) | rex_r
;
4166 /* we must ensure that no segment is added */
4170 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4172 gen_op_mov_reg_A0(ot
- OT_WORD
, reg
);
4175 case 0xa0: /* mov EAX, Ov */
4177 case 0xa2: /* mov Ov, EAX */
4180 target_ulong offset_addr
;
4185 ot
= dflag
+ OT_WORD
;
4186 #ifdef TARGET_X86_64
4187 if (s
->aflag
== 2) {
4188 offset_addr
= ldq_code(s
->pc
);
4190 gen_op_movq_A0_im(offset_addr
);
4195 offset_addr
= insn_get(s
, OT_LONG
);
4197 offset_addr
= insn_get(s
, OT_WORD
);
4199 gen_op_movl_A0_im(offset_addr
);
4201 gen_add_A0_ds_seg(s
);
4203 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4204 gen_op_mov_reg_T0(ot
, R_EAX
);
4206 gen_op_mov_TN_reg(ot
, 0, R_EAX
);
4207 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4211 case 0xd7: /* xlat */
4212 #ifdef TARGET_X86_64
4213 if (s
->aflag
== 2) {
4214 gen_op_movq_A0_reg(R_EBX
);
4215 gen_op_addq_A0_AL();
4219 gen_op_movl_A0_reg(R_EBX
);
4220 gen_op_addl_A0_AL();
4222 gen_op_andl_A0_ffff();
4224 gen_add_A0_ds_seg(s
);
4225 gen_op_ldu_T0_A0(OT_BYTE
+ s
->mem_index
);
4226 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
4228 case 0xb0 ... 0xb7: /* mov R, Ib */
4229 val
= insn_get(s
, OT_BYTE
);
4230 gen_op_movl_T0_im(val
);
4231 gen_op_mov_reg_T0(OT_BYTE
, (b
& 7) | REX_B(s
));
4233 case 0xb8 ... 0xbf: /* mov R, Iv */
4234 #ifdef TARGET_X86_64
4238 tmp
= ldq_code(s
->pc
);
4240 reg
= (b
& 7) | REX_B(s
);
4241 gen_movtl_T0_im(tmp
);
4242 gen_op_mov_reg_T0(OT_QUAD
, reg
);
4246 ot
= dflag
? OT_LONG
: OT_WORD
;
4247 val
= insn_get(s
, ot
);
4248 reg
= (b
& 7) | REX_B(s
);
4249 gen_op_movl_T0_im(val
);
4250 gen_op_mov_reg_T0(ot
, reg
);
4254 case 0x91 ... 0x97: /* xchg R, EAX */
4255 ot
= dflag
+ OT_WORD
;
4256 reg
= (b
& 7) | REX_B(s
);
4260 case 0x87: /* xchg Ev, Gv */
4264 ot
= dflag
+ OT_WORD
;
4265 modrm
= ldub_code(s
->pc
++);
4266 reg
= ((modrm
>> 3) & 7) | rex_r
;
4267 mod
= (modrm
>> 6) & 3;
4269 rm
= (modrm
& 7) | REX_B(s
);
4271 gen_op_mov_TN_reg(ot
, 0, reg
);
4272 gen_op_mov_TN_reg(ot
, 1, rm
);
4273 gen_op_mov_reg_T0(ot
, rm
);
4274 gen_op_mov_reg_T1(ot
, reg
);
4276 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4277 gen_op_mov_TN_reg(ot
, 0, reg
);
4278 /* for xchg, lock is implicit */
4279 if (!(prefixes
& PREFIX_LOCK
))
4281 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4282 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4283 if (!(prefixes
& PREFIX_LOCK
))
4285 gen_op_mov_reg_T1(ot
, reg
);
4288 case 0xc4: /* les Gv */
4293 case 0xc5: /* lds Gv */
4298 case 0x1b2: /* lss Gv */
4301 case 0x1b4: /* lfs Gv */
4304 case 0x1b5: /* lgs Gv */
4307 ot
= dflag
? OT_LONG
: OT_WORD
;
4308 modrm
= ldub_code(s
->pc
++);
4309 reg
= ((modrm
>> 3) & 7) | rex_r
;
4310 mod
= (modrm
>> 6) & 3;
4313 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4314 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4315 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4316 /* load the segment first to handle exceptions properly */
4317 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4318 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
4319 /* then put the data */
4320 gen_op_mov_reg_T1(ot
, reg
);
4322 gen_jmp_im(s
->pc
- s
->cs_base
);
4327 /************************/
4338 ot
= dflag
+ OT_WORD
;
4340 modrm
= ldub_code(s
->pc
++);
4341 mod
= (modrm
>> 6) & 3;
4342 op
= (modrm
>> 3) & 7;
4348 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4351 opreg
= (modrm
& 7) | REX_B(s
);
4356 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
4359 shift
= ldub_code(s
->pc
++);
4361 gen_shifti(s
, op
, ot
, opreg
, shift
);
4376 case 0x1a4: /* shld imm */
4380 case 0x1a5: /* shld cl */
4384 case 0x1ac: /* shrd imm */
4388 case 0x1ad: /* shrd cl */
4392 ot
= dflag
+ OT_WORD
;
4393 modrm
= ldub_code(s
->pc
++);
4394 mod
= (modrm
>> 6) & 3;
4395 rm
= (modrm
& 7) | REX_B(s
);
4396 reg
= ((modrm
>> 3) & 7) | rex_r
;
4399 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4400 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4402 gen_op_mov_TN_reg(ot
, 0, rm
);
4404 gen_op_mov_TN_reg(ot
, 1, reg
);
4407 val
= ldub_code(s
->pc
++);
4414 gen_op_shiftd_T0_T1_im_cc
[ot
][op
](val
);
4416 gen_op_shiftd_mem_T0_T1_im_cc
[ot
+ s
->mem_index
][op
](val
);
4417 if (op
== 0 && ot
!= OT_WORD
)
4418 s
->cc_op
= CC_OP_SHLB
+ ot
;
4420 s
->cc_op
= CC_OP_SARB
+ ot
;
4423 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4424 gen_op_set_cc_op(s
->cc_op
);
4426 gen_op_shiftd_T0_T1_ECX_cc
[ot
][op
]();
4428 gen_op_shiftd_mem_T0_T1_ECX_cc
[ot
+ s
->mem_index
][op
]();
4429 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
4432 gen_op_mov_reg_T0(ot
, rm
);
4436 /************************/
4439 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
4440 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4441 /* XXX: what to do if illegal op ? */
4442 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
4445 modrm
= ldub_code(s
->pc
++);
4446 mod
= (modrm
>> 6) & 3;
4448 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
4451 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4453 case 0x00 ... 0x07: /* fxxxs */
4454 case 0x10 ... 0x17: /* fixxxl */
4455 case 0x20 ... 0x27: /* fxxxl */
4456 case 0x30 ... 0x37: /* fixxx */
4463 gen_op_flds_FT0_A0();
4466 gen_op_fildl_FT0_A0();
4469 gen_op_fldl_FT0_A0();
4473 gen_op_fild_FT0_A0();
4477 gen_op_fp_arith_ST0_FT0
[op1
]();
4479 /* fcomp needs pop */
4484 case 0x08: /* flds */
4485 case 0x0a: /* fsts */
4486 case 0x0b: /* fstps */
4487 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4488 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4489 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4494 gen_op_flds_ST0_A0();
4497 gen_op_fildl_ST0_A0();
4500 gen_op_fldl_ST0_A0();
4504 gen_op_fild_ST0_A0();
4511 gen_op_fisttl_ST0_A0();
4514 gen_op_fisttll_ST0_A0();
4518 gen_op_fistt_ST0_A0();
4525 gen_op_fsts_ST0_A0();
4528 gen_op_fistl_ST0_A0();
4531 gen_op_fstl_ST0_A0();
4535 gen_op_fist_ST0_A0();
4543 case 0x0c: /* fldenv mem */
4544 gen_op_fldenv_A0(s
->dflag
);
4546 case 0x0d: /* fldcw mem */
4549 case 0x0e: /* fnstenv mem */
4550 gen_op_fnstenv_A0(s
->dflag
);
4552 case 0x0f: /* fnstcw mem */
4555 case 0x1d: /* fldt mem */
4556 gen_op_fldt_ST0_A0();
4558 case 0x1f: /* fstpt mem */
4559 gen_op_fstt_ST0_A0();
4562 case 0x2c: /* frstor mem */
4563 gen_op_frstor_A0(s
->dflag
);
4565 case 0x2e: /* fnsave mem */
4566 gen_op_fnsave_A0(s
->dflag
);
4568 case 0x2f: /* fnstsw mem */
4571 case 0x3c: /* fbld */
4572 gen_op_fbld_ST0_A0();
4574 case 0x3e: /* fbstp */
4575 gen_op_fbst_ST0_A0();
4578 case 0x3d: /* fildll */
4579 gen_op_fildll_ST0_A0();
4581 case 0x3f: /* fistpll */
4582 gen_op_fistll_ST0_A0();
4589 /* register float ops */
4593 case 0x08: /* fld sti */
4595 gen_op_fmov_ST0_STN((opreg
+ 1) & 7);
4597 case 0x09: /* fxchg sti */
4598 case 0x29: /* fxchg4 sti, undocumented op */
4599 case 0x39: /* fxchg7 sti, undocumented op */
4600 gen_op_fxchg_ST0_STN(opreg
);
4602 case 0x0a: /* grp d9/2 */
4605 /* check exceptions (FreeBSD FPU probe) */
4606 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4607 gen_op_set_cc_op(s
->cc_op
);
4608 gen_jmp_im(pc_start
- s
->cs_base
);
4615 case 0x0c: /* grp d9/4 */
4625 gen_op_fcom_ST0_FT0();
4634 case 0x0d: /* grp d9/5 */
4643 gen_op_fldl2t_ST0();
4647 gen_op_fldl2e_ST0();
4655 gen_op_fldlg2_ST0();
4659 gen_op_fldln2_ST0();
4670 case 0x0e: /* grp d9/6 */
4681 case 3: /* fpatan */
4684 case 4: /* fxtract */
4687 case 5: /* fprem1 */
4690 case 6: /* fdecstp */
4694 case 7: /* fincstp */
4699 case 0x0f: /* grp d9/7 */
4704 case 1: /* fyl2xp1 */
4710 case 3: /* fsincos */
4713 case 5: /* fscale */
4716 case 4: /* frndint */
4728 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4729 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4730 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4736 gen_op_fp_arith_STN_ST0
[op1
](opreg
);
4740 gen_op_fmov_FT0_STN(opreg
);
4741 gen_op_fp_arith_ST0_FT0
[op1
]();
4745 case 0x02: /* fcom */
4746 case 0x22: /* fcom2, undocumented op */
4747 gen_op_fmov_FT0_STN(opreg
);
4748 gen_op_fcom_ST0_FT0();
4750 case 0x03: /* fcomp */
4751 case 0x23: /* fcomp3, undocumented op */
4752 case 0x32: /* fcomp5, undocumented op */
4753 gen_op_fmov_FT0_STN(opreg
);
4754 gen_op_fcom_ST0_FT0();
4757 case 0x15: /* da/5 */
4759 case 1: /* fucompp */
4760 gen_op_fmov_FT0_STN(1);
4761 gen_op_fucom_ST0_FT0();
4771 case 0: /* feni (287 only, just do nop here) */
4773 case 1: /* fdisi (287 only, just do nop here) */
4778 case 3: /* fninit */
4781 case 4: /* fsetpm (287 only, just do nop here) */
4787 case 0x1d: /* fucomi */
4788 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4789 gen_op_set_cc_op(s
->cc_op
);
4790 gen_op_fmov_FT0_STN(opreg
);
4791 gen_op_fucomi_ST0_FT0();
4792 s
->cc_op
= CC_OP_EFLAGS
;
4794 case 0x1e: /* fcomi */
4795 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4796 gen_op_set_cc_op(s
->cc_op
);
4797 gen_op_fmov_FT0_STN(opreg
);
4798 gen_op_fcomi_ST0_FT0();
4799 s
->cc_op
= CC_OP_EFLAGS
;
4801 case 0x28: /* ffree sti */
4802 gen_op_ffree_STN(opreg
);
4804 case 0x2a: /* fst sti */
4805 gen_op_fmov_STN_ST0(opreg
);
4807 case 0x2b: /* fstp sti */
4808 case 0x0b: /* fstp1 sti, undocumented op */
4809 case 0x3a: /* fstp8 sti, undocumented op */
4810 case 0x3b: /* fstp9 sti, undocumented op */
4811 gen_op_fmov_STN_ST0(opreg
);
4814 case 0x2c: /* fucom st(i) */
4815 gen_op_fmov_FT0_STN(opreg
);
4816 gen_op_fucom_ST0_FT0();
4818 case 0x2d: /* fucomp st(i) */
4819 gen_op_fmov_FT0_STN(opreg
);
4820 gen_op_fucom_ST0_FT0();
4823 case 0x33: /* de/3 */
4825 case 1: /* fcompp */
4826 gen_op_fmov_FT0_STN(1);
4827 gen_op_fcom_ST0_FT0();
4835 case 0x38: /* ffreep sti, undocumented op */
4836 gen_op_ffree_STN(opreg
);
4839 case 0x3c: /* df/4 */
4842 gen_op_fnstsw_EAX();
4848 case 0x3d: /* fucomip */
4849 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4850 gen_op_set_cc_op(s
->cc_op
);
4851 gen_op_fmov_FT0_STN(opreg
);
4852 gen_op_fucomi_ST0_FT0();
4854 s
->cc_op
= CC_OP_EFLAGS
;
4856 case 0x3e: /* fcomip */
4857 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4858 gen_op_set_cc_op(s
->cc_op
);
4859 gen_op_fmov_FT0_STN(opreg
);
4860 gen_op_fcomi_ST0_FT0();
4862 s
->cc_op
= CC_OP_EFLAGS
;
4864 case 0x10 ... 0x13: /* fcmovxx */
4868 const static uint8_t fcmov_cc
[8] = {
4874 op1
= fcmov_cc
[op
& 3] | ((op
>> 3) & 1);
4876 gen_op_fcmov_ST0_STN_T0(opreg
);
4884 /************************/
4887 case 0xa4: /* movsS */
4892 ot
= dflag
+ OT_WORD
;
4894 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4895 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4901 case 0xaa: /* stosS */
4906 ot
= dflag
+ OT_WORD
;
4908 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4909 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4914 case 0xac: /* lodsS */
4919 ot
= dflag
+ OT_WORD
;
4920 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4921 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4926 case 0xae: /* scasS */
4931 ot
= dflag
+ OT_WORD
;
4932 if (prefixes
& PREFIX_REPNZ
) {
4933 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4934 } else if (prefixes
& PREFIX_REPZ
) {
4935 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4938 s
->cc_op
= CC_OP_SUBB
+ ot
;
4942 case 0xa6: /* cmpsS */
4947 ot
= dflag
+ OT_WORD
;
4948 if (prefixes
& PREFIX_REPNZ
) {
4949 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4950 } else if (prefixes
& PREFIX_REPZ
) {
4951 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4954 s
->cc_op
= CC_OP_SUBB
+ ot
;
4957 case 0x6c: /* insS */
4962 ot
= dflag
? OT_LONG
: OT_WORD
;
4963 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4964 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
4965 gen_op_andl_T0_ffff();
4966 if (gen_svm_check_io(s
, pc_start
,
4967 SVM_IOIO_TYPE_MASK
| (1 << (4+ot
)) |
4968 svm_is_rep(prefixes
) | 4 | (1 << (7+s
->aflag
))))
4970 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4971 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4976 case 0x6e: /* outsS */
4981 ot
= dflag
? OT_LONG
: OT_WORD
;
4982 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4983 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
4984 gen_op_andl_T0_ffff();
4985 if (gen_svm_check_io(s
, pc_start
,
4986 (1 << (4+ot
)) | svm_is_rep(prefixes
) |
4987 4 | (1 << (7+s
->aflag
))))
4989 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4990 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4996 /************************/
5004 ot
= dflag
? OT_LONG
: OT_WORD
;
5005 val
= ldub_code(s
->pc
++);
5006 gen_op_movl_T0_im(val
);
5007 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5008 if (gen_svm_check_io(s
, pc_start
,
5009 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) |
5013 gen_op_mov_reg_T1(ot
, R_EAX
);
5020 ot
= dflag
? OT_LONG
: OT_WORD
;
5021 val
= ldub_code(s
->pc
++);
5022 gen_op_movl_T0_im(val
);
5023 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5024 if (gen_svm_check_io(s
, pc_start
, svm_is_rep(prefixes
) |
5027 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5035 ot
= dflag
? OT_LONG
: OT_WORD
;
5036 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5037 gen_op_andl_T0_ffff();
5038 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5039 if (gen_svm_check_io(s
, pc_start
,
5040 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) |
5044 gen_op_mov_reg_T1(ot
, R_EAX
);
5051 ot
= dflag
? OT_LONG
: OT_WORD
;
5052 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5053 gen_op_andl_T0_ffff();
5054 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5055 if (gen_svm_check_io(s
, pc_start
,
5056 svm_is_rep(prefixes
) | (1 << (4+ot
))))
5058 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5062 /************************/
5064 case 0xc2: /* ret im */
5065 val
= ldsw_code(s
->pc
);
5068 if (CODE64(s
) && s
->dflag
)
5070 gen_stack_update(s
, val
+ (2 << s
->dflag
));
5072 gen_op_andl_T0_ffff();
5076 case 0xc3: /* ret */
5080 gen_op_andl_T0_ffff();
5084 case 0xca: /* lret im */
5085 val
= ldsw_code(s
->pc
);
5088 if (s
->pe
&& !s
->vm86
) {
5089 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5090 gen_op_set_cc_op(s
->cc_op
);
5091 gen_jmp_im(pc_start
- s
->cs_base
);
5092 gen_op_lret_protected(s
->dflag
, val
);
5096 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5098 gen_op_andl_T0_ffff();
5099 /* NOTE: keeping EIP updated is not a problem in case of
5103 gen_op_addl_A0_im(2 << s
->dflag
);
5104 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5105 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
5106 /* add stack offset */
5107 gen_stack_update(s
, val
+ (4 << s
->dflag
));
5111 case 0xcb: /* lret */
5114 case 0xcf: /* iret */
5115 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IRET
))
5119 gen_op_iret_real(s
->dflag
);
5120 s
->cc_op
= CC_OP_EFLAGS
;
5121 } else if (s
->vm86
) {
5123 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5125 gen_op_iret_real(s
->dflag
);
5126 s
->cc_op
= CC_OP_EFLAGS
;
5129 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5130 gen_op_set_cc_op(s
->cc_op
);
5131 gen_jmp_im(pc_start
- s
->cs_base
);
5132 gen_op_iret_protected(s
->dflag
, s
->pc
- s
->cs_base
);
5133 s
->cc_op
= CC_OP_EFLAGS
;
5137 case 0xe8: /* call im */
5140 tval
= (int32_t)insn_get(s
, OT_LONG
);
5142 tval
= (int16_t)insn_get(s
, OT_WORD
);
5143 next_eip
= s
->pc
- s
->cs_base
;
5147 gen_movtl_T0_im(next_eip
);
5152 case 0x9a: /* lcall im */
5154 unsigned int selector
, offset
;
5158 ot
= dflag
? OT_LONG
: OT_WORD
;
5159 offset
= insn_get(s
, ot
);
5160 selector
= insn_get(s
, OT_WORD
);
5162 gen_op_movl_T0_im(selector
);
5163 gen_op_movl_T1_imu(offset
);
5166 case 0xe9: /* jmp im */
5168 tval
= (int32_t)insn_get(s
, OT_LONG
);
5170 tval
= (int16_t)insn_get(s
, OT_WORD
);
5171 tval
+= s
->pc
- s
->cs_base
;
5176 case 0xea: /* ljmp im */
5178 unsigned int selector
, offset
;
5182 ot
= dflag
? OT_LONG
: OT_WORD
;
5183 offset
= insn_get(s
, ot
);
5184 selector
= insn_get(s
, OT_WORD
);
5186 gen_op_movl_T0_im(selector
);
5187 gen_op_movl_T1_imu(offset
);
5190 case 0xeb: /* jmp Jb */
5191 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5192 tval
+= s
->pc
- s
->cs_base
;
5197 case 0x70 ... 0x7f: /* jcc Jb */
5198 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5200 case 0x180 ... 0x18f: /* jcc Jv */
5202 tval
= (int32_t)insn_get(s
, OT_LONG
);
5204 tval
= (int16_t)insn_get(s
, OT_WORD
);
5207 next_eip
= s
->pc
- s
->cs_base
;
5211 gen_jcc(s
, b
, tval
, next_eip
);
5214 case 0x190 ... 0x19f: /* setcc Gv */
5215 modrm
= ldub_code(s
->pc
++);
5217 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
5219 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5220 ot
= dflag
+ OT_WORD
;
5221 modrm
= ldub_code(s
->pc
++);
5222 reg
= ((modrm
>> 3) & 7) | rex_r
;
5223 mod
= (modrm
>> 6) & 3;
5226 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5227 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
5229 rm
= (modrm
& 7) | REX_B(s
);
5230 gen_op_mov_TN_reg(ot
, 1, rm
);
5232 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
5235 /************************/
5237 case 0x9c: /* pushf */
5238 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PUSHF
))
5240 if (s
->vm86
&& s
->iopl
!= 3) {
5241 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5243 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5244 gen_op_set_cc_op(s
->cc_op
);
5245 gen_op_movl_T0_eflags();
5249 case 0x9d: /* popf */
5250 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_POPF
))
5252 if (s
->vm86
&& s
->iopl
!= 3) {
5253 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5258 gen_op_movl_eflags_T0_cpl0();
5260 gen_op_movw_eflags_T0_cpl0();
5263 if (s
->cpl
<= s
->iopl
) {
5265 gen_op_movl_eflags_T0_io();
5267 gen_op_movw_eflags_T0_io();
5271 gen_op_movl_eflags_T0();
5273 gen_op_movw_eflags_T0();
5278 s
->cc_op
= CC_OP_EFLAGS
;
5279 /* abort translation because TF flag may change */
5280 gen_jmp_im(s
->pc
- s
->cs_base
);
5284 case 0x9e: /* sahf */
5287 gen_op_mov_TN_reg(OT_BYTE
, 0, R_AH
);
5288 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5289 gen_op_set_cc_op(s
->cc_op
);
5290 gen_op_movb_eflags_T0();
5291 s
->cc_op
= CC_OP_EFLAGS
;
5293 case 0x9f: /* lahf */
5296 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5297 gen_op_set_cc_op(s
->cc_op
);
5298 gen_op_movl_T0_eflags();
5299 gen_op_mov_reg_T0(OT_BYTE
, R_AH
);
5301 case 0xf5: /* cmc */
5302 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5303 gen_op_set_cc_op(s
->cc_op
);
5305 s
->cc_op
= CC_OP_EFLAGS
;
5307 case 0xf8: /* clc */
5308 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5309 gen_op_set_cc_op(s
->cc_op
);
5311 s
->cc_op
= CC_OP_EFLAGS
;
5313 case 0xf9: /* stc */
5314 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5315 gen_op_set_cc_op(s
->cc_op
);
5317 s
->cc_op
= CC_OP_EFLAGS
;
5319 case 0xfc: /* cld */
5322 case 0xfd: /* std */
5326 /************************/
5327 /* bit operations */
5328 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5329 ot
= dflag
+ OT_WORD
;
5330 modrm
= ldub_code(s
->pc
++);
5331 op
= (modrm
>> 3) & 7;
5332 mod
= (modrm
>> 6) & 3;
5333 rm
= (modrm
& 7) | REX_B(s
);
5336 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5337 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5339 gen_op_mov_TN_reg(ot
, 0, rm
);
5342 val
= ldub_code(s
->pc
++);
5343 gen_op_movl_T1_im(val
);
5347 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5348 s
->cc_op
= CC_OP_SARB
+ ot
;
5351 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5353 gen_op_mov_reg_T0(ot
, rm
);
5354 gen_op_update_bt_cc();
5357 case 0x1a3: /* bt Gv, Ev */
5360 case 0x1ab: /* bts */
5363 case 0x1b3: /* btr */
5366 case 0x1bb: /* btc */
5369 ot
= dflag
+ OT_WORD
;
5370 modrm
= ldub_code(s
->pc
++);
5371 reg
= ((modrm
>> 3) & 7) | rex_r
;
5372 mod
= (modrm
>> 6) & 3;
5373 rm
= (modrm
& 7) | REX_B(s
);
5374 gen_op_mov_TN_reg(OT_LONG
, 1, reg
);
5376 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5377 /* specific case: we need to add a displacement */
5378 gen_op_add_bit_A0_T1
[ot
- OT_WORD
]();
5379 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5381 gen_op_mov_TN_reg(ot
, 0, rm
);
5383 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5384 s
->cc_op
= CC_OP_SARB
+ ot
;
5387 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5389 gen_op_mov_reg_T0(ot
, rm
);
5390 gen_op_update_bt_cc();
5393 case 0x1bc: /* bsf */
5394 case 0x1bd: /* bsr */
5395 ot
= dflag
+ OT_WORD
;
5396 modrm
= ldub_code(s
->pc
++);
5397 reg
= ((modrm
>> 3) & 7) | rex_r
;
5398 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5399 /* NOTE: in order to handle the 0 case, we must load the
5400 result. It could be optimized with a generated jump */
5401 gen_op_mov_TN_reg(ot
, 1, reg
);
5402 gen_op_bsx_T0_cc
[ot
- OT_WORD
][b
& 1]();
5403 gen_op_mov_reg_T1(ot
, reg
);
5404 s
->cc_op
= CC_OP_LOGICB
+ ot
;
5406 /************************/
5408 case 0x27: /* daa */
5411 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5412 gen_op_set_cc_op(s
->cc_op
);
5414 s
->cc_op
= CC_OP_EFLAGS
;
5416 case 0x2f: /* das */
5419 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5420 gen_op_set_cc_op(s
->cc_op
);
5422 s
->cc_op
= CC_OP_EFLAGS
;
5424 case 0x37: /* aaa */
5427 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5428 gen_op_set_cc_op(s
->cc_op
);
5430 s
->cc_op
= CC_OP_EFLAGS
;
5432 case 0x3f: /* aas */
5435 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5436 gen_op_set_cc_op(s
->cc_op
);
5438 s
->cc_op
= CC_OP_EFLAGS
;
5440 case 0xd4: /* aam */
5443 val
= ldub_code(s
->pc
++);
5445 gen_exception(s
, EXCP00_DIVZ
, pc_start
- s
->cs_base
);
5448 s
->cc_op
= CC_OP_LOGICB
;
5451 case 0xd5: /* aad */
5454 val
= ldub_code(s
->pc
++);
5456 s
->cc_op
= CC_OP_LOGICB
;
5458 /************************/
5460 case 0x90: /* nop */
5461 /* XXX: xchg + rex handling */
5462 /* XXX: correct lock test for all insn */
5463 if (prefixes
& PREFIX_LOCK
)
5465 if (prefixes
& PREFIX_REPZ
) {
5466 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PAUSE
);
5469 case 0x9b: /* fwait */
5470 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
5471 (HF_MP_MASK
| HF_TS_MASK
)) {
5472 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5474 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5475 gen_op_set_cc_op(s
->cc_op
);
5476 gen_jmp_im(pc_start
- s
->cs_base
);
5480 case 0xcc: /* int3 */
5481 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5483 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5485 case 0xcd: /* int N */
5486 val
= ldub_code(s
->pc
++);
5487 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5489 if (s
->vm86
&& s
->iopl
!= 3) {
5490 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5492 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5495 case 0xce: /* into */
5498 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5500 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5501 gen_op_set_cc_op(s
->cc_op
);
5502 gen_jmp_im(pc_start
- s
->cs_base
);
5503 gen_op_into(s
->pc
- pc_start
);
5505 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5506 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_ICEBP
))
5509 gen_debug(s
, pc_start
- s
->cs_base
);
5512 tb_flush(cpu_single_env
);
5513 cpu_set_log(CPU_LOG_INT
| CPU_LOG_TB_IN_ASM
);
5516 case 0xfa: /* cli */
5518 if (s
->cpl
<= s
->iopl
) {
5521 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5527 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5531 case 0xfb: /* sti */
5533 if (s
->cpl
<= s
->iopl
) {
5536 /* interruptions are enabled only the first insn after sti */
5537 /* If several instructions disable interrupts, only the
5539 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5540 gen_op_set_inhibit_irq();
5541 /* give a chance to handle pending irqs */
5542 gen_jmp_im(s
->pc
- s
->cs_base
);
5545 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5551 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5555 case 0x62: /* bound */
5558 ot
= dflag
? OT_LONG
: OT_WORD
;
5559 modrm
= ldub_code(s
->pc
++);
5560 reg
= (modrm
>> 3) & 7;
5561 mod
= (modrm
>> 6) & 3;
5564 gen_op_mov_TN_reg(ot
, 0, reg
);
5565 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5566 gen_jmp_im(pc_start
- s
->cs_base
);
5572 case 0x1c8 ... 0x1cf: /* bswap reg */
5573 reg
= (b
& 7) | REX_B(s
);
5574 #ifdef TARGET_X86_64
5576 gen_op_mov_TN_reg(OT_QUAD
, 0, reg
);
5577 tcg_gen_bswap_i64(cpu_T
[0], cpu_T
[0]);
5578 gen_op_mov_reg_T0(OT_QUAD
, reg
);
5582 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
5584 tmp0
= tcg_temp_new(TCG_TYPE_I32
);
5585 tcg_gen_trunc_i64_i32(tmp0
, cpu_T
[0]);
5586 tcg_gen_bswap_i32(tmp0
, tmp0
);
5587 tcg_gen_extu_i32_i64(cpu_T
[0], tmp0
);
5588 gen_op_mov_reg_T0(OT_LONG
, reg
);
5592 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
5593 tcg_gen_bswap_i32(cpu_T
[0], cpu_T
[0]);
5594 gen_op_mov_reg_T0(OT_LONG
, reg
);
5598 case 0xd6: /* salc */
5601 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5602 gen_op_set_cc_op(s
->cc_op
);
5605 case 0xe0: /* loopnz */
5606 case 0xe1: /* loopz */
5607 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5608 gen_op_set_cc_op(s
->cc_op
);
5610 case 0xe2: /* loop */
5611 case 0xe3: /* jecxz */
5615 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5616 next_eip
= s
->pc
- s
->cs_base
;
5621 l1
= gen_new_label();
5622 l2
= gen_new_label();
5625 gen_op_jz_ecx
[s
->aflag
](l1
);
5627 gen_op_dec_ECX
[s
->aflag
]();
5630 gen_op_loop
[s
->aflag
][b
](l1
);
5633 gen_jmp_im(next_eip
);
5634 gen_op_jmp_label(l2
);
5641 case 0x130: /* wrmsr */
5642 case 0x132: /* rdmsr */
5644 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5648 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 0);
5651 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 1);
5658 case 0x131: /* rdtsc */
5659 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RDTSC
))
5661 gen_jmp_im(pc_start
- s
->cs_base
);
5664 case 0x133: /* rdpmc */
5665 gen_jmp_im(pc_start
- s
->cs_base
);
5668 case 0x134: /* sysenter */
5672 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5674 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5675 gen_op_set_cc_op(s
->cc_op
);
5676 s
->cc_op
= CC_OP_DYNAMIC
;
5678 gen_jmp_im(pc_start
- s
->cs_base
);
5683 case 0x135: /* sysexit */
5687 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5689 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5690 gen_op_set_cc_op(s
->cc_op
);
5691 s
->cc_op
= CC_OP_DYNAMIC
;
5693 gen_jmp_im(pc_start
- s
->cs_base
);
5698 #ifdef TARGET_X86_64
5699 case 0x105: /* syscall */
5700 /* XXX: is it usable in real mode ? */
5701 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5702 gen_op_set_cc_op(s
->cc_op
);
5703 s
->cc_op
= CC_OP_DYNAMIC
;
5705 gen_jmp_im(pc_start
- s
->cs_base
);
5706 gen_op_syscall(s
->pc
- pc_start
);
5709 case 0x107: /* sysret */
5711 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5713 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5714 gen_op_set_cc_op(s
->cc_op
);
5715 s
->cc_op
= CC_OP_DYNAMIC
;
5717 gen_jmp_im(pc_start
- s
->cs_base
);
5718 gen_op_sysret(s
->dflag
);
5719 /* condition codes are modified only in long mode */
5721 s
->cc_op
= CC_OP_EFLAGS
;
5726 case 0x1a2: /* cpuid */
5727 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CPUID
))
5731 case 0xf4: /* hlt */
5733 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5735 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_HLT
))
5737 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5738 gen_op_set_cc_op(s
->cc_op
);
5739 gen_jmp_im(s
->pc
- s
->cs_base
);
5745 modrm
= ldub_code(s
->pc
++);
5746 mod
= (modrm
>> 6) & 3;
5747 op
= (modrm
>> 3) & 7;
5750 if (!s
->pe
|| s
->vm86
)
5752 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_READ
))
5754 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
5758 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5761 if (!s
->pe
|| s
->vm86
)
5764 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5766 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_WRITE
))
5768 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5769 gen_jmp_im(pc_start
- s
->cs_base
);
5774 if (!s
->pe
|| s
->vm86
)
5776 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_READ
))
5778 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
5782 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5785 if (!s
->pe
|| s
->vm86
)
5788 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5790 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_WRITE
))
5792 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5793 gen_jmp_im(pc_start
- s
->cs_base
);
5799 if (!s
->pe
|| s
->vm86
)
5801 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5802 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5803 gen_op_set_cc_op(s
->cc_op
);
5808 s
->cc_op
= CC_OP_EFLAGS
;
5815 modrm
= ldub_code(s
->pc
++);
5816 mod
= (modrm
>> 6) & 3;
5817 op
= (modrm
>> 3) & 7;
5823 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_GDTR_READ
))
5825 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5826 gen_op_movl_T0_env(offsetof(CPUX86State
, gdt
.limit
));
5827 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5828 gen_add_A0_im(s
, 2);
5829 gen_op_movtl_T0_env(offsetof(CPUX86State
, gdt
.base
));
5831 gen_op_andl_T0_im(0xffffff);
5832 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
5837 case 0: /* monitor */
5838 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
5841 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MONITOR
))
5843 gen_jmp_im(pc_start
- s
->cs_base
);
5844 #ifdef TARGET_X86_64
5845 if (s
->aflag
== 2) {
5846 gen_op_movq_A0_reg(R_EBX
);
5847 gen_op_addq_A0_AL();
5851 gen_op_movl_A0_reg(R_EBX
);
5852 gen_op_addl_A0_AL();
5854 gen_op_andl_A0_ffff();
5856 gen_add_A0_ds_seg(s
);
5860 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
5863 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5864 gen_op_set_cc_op(s
->cc_op
);
5865 s
->cc_op
= CC_OP_DYNAMIC
;
5867 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MWAIT
))
5869 gen_jmp_im(s
->pc
- s
->cs_base
);
5877 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IDTR_READ
))
5879 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5880 gen_op_movl_T0_env(offsetof(CPUX86State
, idt
.limit
));
5881 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5882 gen_add_A0_im(s
, 2);
5883 gen_op_movtl_T0_env(offsetof(CPUX86State
, idt
.base
));
5885 gen_op_andl_T0_im(0xffffff);
5886 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
5894 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMRUN
))
5896 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5897 gen_op_set_cc_op(s
->cc_op
);
5898 gen_jmp_im(s
->pc
- s
->cs_base
);
5900 s
->cc_op
= CC_OP_EFLAGS
;
5903 case 1: /* VMMCALL */
5904 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMMCALL
))
5906 /* FIXME: cause #UD if hflags & SVM */
5909 case 2: /* VMLOAD */
5910 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMLOAD
))
5914 case 3: /* VMSAVE */
5915 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMSAVE
))
5920 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_STGI
))
5925 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CLGI
))
5929 case 6: /* SKINIT */
5930 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SKINIT
))
5934 case 7: /* INVLPGA */
5935 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPGA
))
5942 } else if (s
->cpl
!= 0) {
5943 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5945 if (gen_svm_check_intercept(s
, pc_start
,
5946 op
==2 ? SVM_EXIT_GDTR_WRITE
: SVM_EXIT_IDTR_WRITE
))
5948 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5949 gen_op_ld_T1_A0(OT_WORD
+ s
->mem_index
);
5950 gen_add_A0_im(s
, 2);
5951 gen_op_ld_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
5953 gen_op_andl_T0_im(0xffffff);
5955 gen_op_movtl_env_T0(offsetof(CPUX86State
,gdt
.base
));
5956 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
5958 gen_op_movtl_env_T0(offsetof(CPUX86State
,idt
.base
));
5959 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
5964 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
))
5966 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
5967 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
5971 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5973 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
))
5975 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5977 gen_jmp_im(s
->pc
- s
->cs_base
);
5981 case 7: /* invlpg */
5983 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5986 #ifdef TARGET_X86_64
5987 if (CODE64(s
) && rm
== 0) {
5989 gen_op_movtl_T0_env(offsetof(CPUX86State
,segs
[R_GS
].base
));
5990 gen_op_movtl_T1_env(offsetof(CPUX86State
,kernelgsbase
));
5991 gen_op_movtl_env_T1(offsetof(CPUX86State
,segs
[R_GS
].base
));
5992 gen_op_movtl_env_T0(offsetof(CPUX86State
,kernelgsbase
));
5999 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPG
))
6001 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6003 gen_jmp_im(s
->pc
- s
->cs_base
);
6012 case 0x108: /* invd */
6013 case 0x109: /* wbinvd */
6015 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6017 if (gen_svm_check_intercept(s
, pc_start
, (b
& 2) ? SVM_EXIT_INVD
: SVM_EXIT_WBINVD
))
6022 case 0x63: /* arpl or movslS (x86_64) */
6023 #ifdef TARGET_X86_64
6026 /* d_ot is the size of destination */
6027 d_ot
= dflag
+ OT_WORD
;
6029 modrm
= ldub_code(s
->pc
++);
6030 reg
= ((modrm
>> 3) & 7) | rex_r
;
6031 mod
= (modrm
>> 6) & 3;
6032 rm
= (modrm
& 7) | REX_B(s
);
6035 gen_op_mov_TN_reg(OT_LONG
, 0, rm
);
6037 if (d_ot
== OT_QUAD
)
6038 gen_op_movslq_T0_T0();
6039 gen_op_mov_reg_T0(d_ot
, reg
);
6041 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6042 if (d_ot
== OT_QUAD
) {
6043 gen_op_lds_T0_A0(OT_LONG
+ s
->mem_index
);
6045 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6047 gen_op_mov_reg_T0(d_ot
, reg
);
6052 if (!s
->pe
|| s
->vm86
)
6054 ot
= dflag
? OT_LONG
: OT_WORD
;
6055 modrm
= ldub_code(s
->pc
++);
6056 reg
= (modrm
>> 3) & 7;
6057 mod
= (modrm
>> 6) & 3;
6060 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6061 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
6063 gen_op_mov_TN_reg(ot
, 0, rm
);
6065 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6066 gen_op_set_cc_op(s
->cc_op
);
6068 s
->cc_op
= CC_OP_EFLAGS
;
6070 gen_op_st_T0_A0(ot
+ s
->mem_index
);
6072 gen_op_mov_reg_T0(ot
, rm
);
6074 gen_op_arpl_update();
6077 case 0x102: /* lar */
6078 case 0x103: /* lsl */
6079 if (!s
->pe
|| s
->vm86
)
6081 ot
= dflag
? OT_LONG
: OT_WORD
;
6082 modrm
= ldub_code(s
->pc
++);
6083 reg
= ((modrm
>> 3) & 7) | rex_r
;
6084 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
6085 gen_op_mov_TN_reg(ot
, 1, reg
);
6086 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6087 gen_op_set_cc_op(s
->cc_op
);
6092 s
->cc_op
= CC_OP_EFLAGS
;
6093 gen_op_mov_reg_T1(ot
, reg
);
6096 modrm
= ldub_code(s
->pc
++);
6097 mod
= (modrm
>> 6) & 3;
6098 op
= (modrm
>> 3) & 7;
6100 case 0: /* prefetchnta */
6101 case 1: /* prefetchnt0 */
6102 case 2: /* prefetchnt0 */
6103 case 3: /* prefetchnt0 */
6106 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6107 /* nothing more to do */
6109 default: /* nop (multi byte) */
6110 gen_nop_modrm(s
, modrm
);
6114 case 0x119 ... 0x11f: /* nop (multi byte) */
6115 modrm
= ldub_code(s
->pc
++);
6116 gen_nop_modrm(s
, modrm
);
6118 case 0x120: /* mov reg, crN */
6119 case 0x122: /* mov crN, reg */
6121 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6123 modrm
= ldub_code(s
->pc
++);
6124 if ((modrm
& 0xc0) != 0xc0)
6126 rm
= (modrm
& 7) | REX_B(s
);
6127 reg
= ((modrm
>> 3) & 7) | rex_r
;
6139 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
+ reg
);
6140 gen_op_mov_TN_reg(ot
, 0, rm
);
6141 gen_op_movl_crN_T0(reg
);
6142 gen_jmp_im(s
->pc
- s
->cs_base
);
6145 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
+ reg
);
6146 #if !defined(CONFIG_USER_ONLY)
6148 gen_op_movtl_T0_cr8();
6151 gen_op_movtl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
6152 gen_op_mov_reg_T0(ot
, rm
);
6160 case 0x121: /* mov reg, drN */
6161 case 0x123: /* mov drN, reg */
6163 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6165 modrm
= ldub_code(s
->pc
++);
6166 if ((modrm
& 0xc0) != 0xc0)
6168 rm
= (modrm
& 7) | REX_B(s
);
6169 reg
= ((modrm
>> 3) & 7) | rex_r
;
6174 /* XXX: do it dynamically with CR4.DE bit */
6175 if (reg
== 4 || reg
== 5 || reg
>= 8)
6178 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_DR0
+ reg
);
6179 gen_op_mov_TN_reg(ot
, 0, rm
);
6180 gen_op_movl_drN_T0(reg
);
6181 gen_jmp_im(s
->pc
- s
->cs_base
);
6184 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_DR0
+ reg
);
6185 gen_op_movtl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
6186 gen_op_mov_reg_T0(ot
, rm
);
6190 case 0x106: /* clts */
6192 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6194 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
6196 /* abort block because static cpu state changed */
6197 gen_jmp_im(s
->pc
- s
->cs_base
);
6201 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6202 case 0x1c3: /* MOVNTI reg, mem */
6203 if (!(s
->cpuid_features
& CPUID_SSE2
))
6205 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
6206 modrm
= ldub_code(s
->pc
++);
6207 mod
= (modrm
>> 6) & 3;
6210 reg
= ((modrm
>> 3) & 7) | rex_r
;
6211 /* generate a generic store */
6212 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
6215 modrm
= ldub_code(s
->pc
++);
6216 mod
= (modrm
>> 6) & 3;
6217 op
= (modrm
>> 3) & 7;
6219 case 0: /* fxsave */
6220 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6221 (s
->flags
& HF_EM_MASK
))
6223 if (s
->flags
& HF_TS_MASK
) {
6224 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6227 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6228 gen_op_fxsave_A0((s
->dflag
== 2));
6230 case 1: /* fxrstor */
6231 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6232 (s
->flags
& HF_EM_MASK
))
6234 if (s
->flags
& HF_TS_MASK
) {
6235 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6238 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6239 gen_op_fxrstor_A0((s
->dflag
== 2));
6241 case 2: /* ldmxcsr */
6242 case 3: /* stmxcsr */
6243 if (s
->flags
& HF_TS_MASK
) {
6244 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6247 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
6250 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6252 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6253 gen_op_movl_env_T0(offsetof(CPUX86State
, mxcsr
));
6255 gen_op_movl_T0_env(offsetof(CPUX86State
, mxcsr
));
6256 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
6259 case 5: /* lfence */
6260 case 6: /* mfence */
6261 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
6264 case 7: /* sfence / clflush */
6265 if ((modrm
& 0xc7) == 0xc0) {
6267 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6268 if (!(s
->cpuid_features
& CPUID_SSE
))
6272 if (!(s
->cpuid_features
& CPUID_CLFLUSH
))
6274 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6281 case 0x10d: /* 3DNow! prefetch(w) */
6282 modrm
= ldub_code(s
->pc
++);
6283 mod
= (modrm
>> 6) & 3;
6286 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6287 /* ignore for now */
6289 case 0x1aa: /* rsm */
6290 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RSM
))
6292 if (!(s
->flags
& HF_SMM_MASK
))
6294 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6295 gen_op_set_cc_op(s
->cc_op
);
6296 s
->cc_op
= CC_OP_DYNAMIC
;
6298 gen_jmp_im(s
->pc
- s
->cs_base
);
6302 case 0x10e ... 0x10f:
6303 /* 3DNow! instructions, ignore prefixes */
6304 s
->prefix
&= ~(PREFIX_REPZ
| PREFIX_REPNZ
| PREFIX_DATA
);
6305 case 0x110 ... 0x117:
6306 case 0x128 ... 0x12f:
6307 case 0x150 ... 0x177:
6308 case 0x17c ... 0x17f:
6310 case 0x1c4 ... 0x1c6:
6311 case 0x1d0 ... 0x1fe:
6312 gen_sse(s
, b
, pc_start
, rex_r
);
6317 /* lock generation */
6318 if (s
->prefix
& PREFIX_LOCK
)
6322 if (s
->prefix
& PREFIX_LOCK
)
6324 /* XXX: ensure that no lock was generated */
6325 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
6329 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6330 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6332 /* flags read by an operation */
6333 static uint16_t opc_read_flags
[NB_OPS
] = {
6334 [INDEX_op_aas
] = CC_A
,
6335 [INDEX_op_aaa
] = CC_A
,
6336 [INDEX_op_das
] = CC_A
| CC_C
,
6337 [INDEX_op_daa
] = CC_A
| CC_C
,
6339 /* subtle: due to the incl/decl implementation, C is used */
6340 [INDEX_op_update_inc_cc
] = CC_C
,
6342 [INDEX_op_into
] = CC_O
,
6344 [INDEX_op_jb_subb
] = CC_C
,
6345 [INDEX_op_jb_subw
] = CC_C
,
6346 [INDEX_op_jb_subl
] = CC_C
,
6348 [INDEX_op_jz_subb
] = CC_Z
,
6349 [INDEX_op_jz_subw
] = CC_Z
,
6350 [INDEX_op_jz_subl
] = CC_Z
,
6352 [INDEX_op_jbe_subb
] = CC_Z
| CC_C
,
6353 [INDEX_op_jbe_subw
] = CC_Z
| CC_C
,
6354 [INDEX_op_jbe_subl
] = CC_Z
| CC_C
,
6356 [INDEX_op_js_subb
] = CC_S
,
6357 [INDEX_op_js_subw
] = CC_S
,
6358 [INDEX_op_js_subl
] = CC_S
,
6360 [INDEX_op_jl_subb
] = CC_O
| CC_S
,
6361 [INDEX_op_jl_subw
] = CC_O
| CC_S
,
6362 [INDEX_op_jl_subl
] = CC_O
| CC_S
,
6364 [INDEX_op_jle_subb
] = CC_O
| CC_S
| CC_Z
,
6365 [INDEX_op_jle_subw
] = CC_O
| CC_S
| CC_Z
,
6366 [INDEX_op_jle_subl
] = CC_O
| CC_S
| CC_Z
,
6368 [INDEX_op_loopnzw
] = CC_Z
,
6369 [INDEX_op_loopnzl
] = CC_Z
,
6370 [INDEX_op_loopzw
] = CC_Z
,
6371 [INDEX_op_loopzl
] = CC_Z
,
6373 [INDEX_op_seto_T0_cc
] = CC_O
,
6374 [INDEX_op_setb_T0_cc
] = CC_C
,
6375 [INDEX_op_setz_T0_cc
] = CC_Z
,
6376 [INDEX_op_setbe_T0_cc
] = CC_Z
| CC_C
,
6377 [INDEX_op_sets_T0_cc
] = CC_S
,
6378 [INDEX_op_setp_T0_cc
] = CC_P
,
6379 [INDEX_op_setl_T0_cc
] = CC_O
| CC_S
,
6380 [INDEX_op_setle_T0_cc
] = CC_O
| CC_S
| CC_Z
,
6382 [INDEX_op_setb_T0_subb
] = CC_C
,
6383 [INDEX_op_setb_T0_subw
] = CC_C
,
6384 [INDEX_op_setb_T0_subl
] = CC_C
,
6386 [INDEX_op_setz_T0_subb
] = CC_Z
,
6387 [INDEX_op_setz_T0_subw
] = CC_Z
,
6388 [INDEX_op_setz_T0_subl
] = CC_Z
,
6390 [INDEX_op_setbe_T0_subb
] = CC_Z
| CC_C
,
6391 [INDEX_op_setbe_T0_subw
] = CC_Z
| CC_C
,
6392 [INDEX_op_setbe_T0_subl
] = CC_Z
| CC_C
,
6394 [INDEX_op_sets_T0_subb
] = CC_S
,
6395 [INDEX_op_sets_T0_subw
] = CC_S
,
6396 [INDEX_op_sets_T0_subl
] = CC_S
,
6398 [INDEX_op_setl_T0_subb
] = CC_O
| CC_S
,
6399 [INDEX_op_setl_T0_subw
] = CC_O
| CC_S
,
6400 [INDEX_op_setl_T0_subl
] = CC_O
| CC_S
,
6402 [INDEX_op_setle_T0_subb
] = CC_O
| CC_S
| CC_Z
,
6403 [INDEX_op_setle_T0_subw
] = CC_O
| CC_S
| CC_Z
,
6404 [INDEX_op_setle_T0_subl
] = CC_O
| CC_S
| CC_Z
,
6406 [INDEX_op_movl_T0_eflags
] = CC_OSZAPC
,
6407 [INDEX_op_cmc
] = CC_C
,
6408 [INDEX_op_salc
] = CC_C
,
6410 /* needed for correct flag optimisation before string ops */
6411 [INDEX_op_jnz_ecxw
] = CC_OSZAPC
,
6412 [INDEX_op_jnz_ecxl
] = CC_OSZAPC
,
6413 [INDEX_op_jz_ecxw
] = CC_OSZAPC
,
6414 [INDEX_op_jz_ecxl
] = CC_OSZAPC
,
6416 #ifdef TARGET_X86_64
6417 [INDEX_op_jb_subq
] = CC_C
,
6418 [INDEX_op_jz_subq
] = CC_Z
,
6419 [INDEX_op_jbe_subq
] = CC_Z
| CC_C
,
6420 [INDEX_op_js_subq
] = CC_S
,
6421 [INDEX_op_jl_subq
] = CC_O
| CC_S
,
6422 [INDEX_op_jle_subq
] = CC_O
| CC_S
| CC_Z
,
6424 [INDEX_op_loopnzq
] = CC_Z
,
6425 [INDEX_op_loopzq
] = CC_Z
,
6427 [INDEX_op_setb_T0_subq
] = CC_C
,
6428 [INDEX_op_setz_T0_subq
] = CC_Z
,
6429 [INDEX_op_setbe_T0_subq
] = CC_Z
| CC_C
,
6430 [INDEX_op_sets_T0_subq
] = CC_S
,
6431 [INDEX_op_setl_T0_subq
] = CC_O
| CC_S
,
6432 [INDEX_op_setle_T0_subq
] = CC_O
| CC_S
| CC_Z
,
6434 [INDEX_op_jnz_ecxq
] = CC_OSZAPC
,
6435 [INDEX_op_jz_ecxq
] = CC_OSZAPC
,
6438 #define DEF_READF(SUFFIX)\
6439 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6440 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6441 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6442 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6443 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6444 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6445 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6446 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6448 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6449 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6450 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6451 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6452 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6453 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6454 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6455 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6459 #ifndef CONFIG_USER_ONLY
6465 /* flags written by an operation */
6466 static uint16_t opc_write_flags
[NB_OPS
] = {
6467 [INDEX_op_update2_cc
] = CC_OSZAPC
,
6468 [INDEX_op_update1_cc
] = CC_OSZAPC
,
6469 [INDEX_op_cmpl_T0_T1_cc
] = CC_OSZAPC
,
6470 [INDEX_op_update_neg_cc
] = CC_OSZAPC
,
6471 /* subtle: due to the incl/decl implementation, C is used */
6472 [INDEX_op_update_inc_cc
] = CC_OSZAPC
,
6473 [INDEX_op_testl_T0_T1_cc
] = CC_OSZAPC
,
6475 [INDEX_op_mulb_AL_T0
] = CC_OSZAPC
,
6476 [INDEX_op_mulw_AX_T0
] = CC_OSZAPC
,
6477 [INDEX_op_mull_EAX_T0
] = CC_OSZAPC
,
6478 X86_64_DEF([INDEX_op_mulq_EAX_T0
] = CC_OSZAPC
,)
6479 [INDEX_op_imulb_AL_T0
] = CC_OSZAPC
,
6480 [INDEX_op_imulw_AX_T0
] = CC_OSZAPC
,
6481 [INDEX_op_imull_EAX_T0
] = CC_OSZAPC
,
6482 X86_64_DEF([INDEX_op_imulq_EAX_T0
] = CC_OSZAPC
,)
6483 [INDEX_op_imulw_T0_T1
] = CC_OSZAPC
,
6484 [INDEX_op_imull_T0_T1
] = CC_OSZAPC
,
6485 X86_64_DEF([INDEX_op_imulq_T0_T1
] = CC_OSZAPC
,)
6488 [INDEX_op_ucomiss
] = CC_OSZAPC
,
6489 [INDEX_op_ucomisd
] = CC_OSZAPC
,
6490 [INDEX_op_comiss
] = CC_OSZAPC
,
6491 [INDEX_op_comisd
] = CC_OSZAPC
,
6494 [INDEX_op_aam
] = CC_OSZAPC
,
6495 [INDEX_op_aad
] = CC_OSZAPC
,
6496 [INDEX_op_aas
] = CC_OSZAPC
,
6497 [INDEX_op_aaa
] = CC_OSZAPC
,
6498 [INDEX_op_das
] = CC_OSZAPC
,
6499 [INDEX_op_daa
] = CC_OSZAPC
,
6501 [INDEX_op_movb_eflags_T0
] = CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
,
6502 [INDEX_op_movw_eflags_T0
] = CC_OSZAPC
,
6503 [INDEX_op_movl_eflags_T0
] = CC_OSZAPC
,
6504 [INDEX_op_movw_eflags_T0_io
] = CC_OSZAPC
,
6505 [INDEX_op_movl_eflags_T0_io
] = CC_OSZAPC
,
6506 [INDEX_op_movw_eflags_T0_cpl0
] = CC_OSZAPC
,
6507 [INDEX_op_movl_eflags_T0_cpl0
] = CC_OSZAPC
,
6508 [INDEX_op_clc
] = CC_C
,
6509 [INDEX_op_stc
] = CC_C
,
6510 [INDEX_op_cmc
] = CC_C
,
6512 [INDEX_op_btw_T0_T1_cc
] = CC_OSZAPC
,
6513 [INDEX_op_btl_T0_T1_cc
] = CC_OSZAPC
,
6514 X86_64_DEF([INDEX_op_btq_T0_T1_cc
] = CC_OSZAPC
,)
6515 [INDEX_op_btsw_T0_T1_cc
] = CC_OSZAPC
,
6516 [INDEX_op_btsl_T0_T1_cc
] = CC_OSZAPC
,
6517 X86_64_DEF([INDEX_op_btsq_T0_T1_cc
] = CC_OSZAPC
,)
6518 [INDEX_op_btrw_T0_T1_cc
] = CC_OSZAPC
,
6519 [INDEX_op_btrl_T0_T1_cc
] = CC_OSZAPC
,
6520 X86_64_DEF([INDEX_op_btrq_T0_T1_cc
] = CC_OSZAPC
,)
6521 [INDEX_op_btcw_T0_T1_cc
] = CC_OSZAPC
,
6522 [INDEX_op_btcl_T0_T1_cc
] = CC_OSZAPC
,
6523 X86_64_DEF([INDEX_op_btcq_T0_T1_cc
] = CC_OSZAPC
,)
6525 [INDEX_op_bsfw_T0_cc
] = CC_OSZAPC
,
6526 [INDEX_op_bsfl_T0_cc
] = CC_OSZAPC
,
6527 X86_64_DEF([INDEX_op_bsfq_T0_cc
] = CC_OSZAPC
,)
6528 [INDEX_op_bsrw_T0_cc
] = CC_OSZAPC
,
6529 [INDEX_op_bsrl_T0_cc
] = CC_OSZAPC
,
6530 X86_64_DEF([INDEX_op_bsrq_T0_cc
] = CC_OSZAPC
,)
6532 [INDEX_op_cmpxchgb_T0_T1_EAX_cc
] = CC_OSZAPC
,
6533 [INDEX_op_cmpxchgw_T0_T1_EAX_cc
] = CC_OSZAPC
,
6534 [INDEX_op_cmpxchgl_T0_T1_EAX_cc
] = CC_OSZAPC
,
6535 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc
] = CC_OSZAPC
,)
6537 [INDEX_op_cmpxchg8b
] = CC_Z
,
6538 [INDEX_op_lar
] = CC_Z
,
6539 [INDEX_op_lsl
] = CC_Z
,
6540 [INDEX_op_verr
] = CC_Z
,
6541 [INDEX_op_verw
] = CC_Z
,
6542 [INDEX_op_fcomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6543 [INDEX_op_fucomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6545 #define DEF_WRITEF(SUFFIX)\
6546 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6547 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6548 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6549 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6550 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6551 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6552 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6553 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6555 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6556 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6557 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6558 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6559 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6560 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6561 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6562 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6564 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6565 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6566 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6567 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6568 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6569 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6570 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6571 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6573 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6574 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6575 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6576 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6578 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6579 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6580 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6581 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6583 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6584 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6585 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6586 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6588 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6589 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6590 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6591 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6592 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6593 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6595 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6596 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6597 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6598 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6599 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6600 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6602 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6603 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6604 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6605 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6610 #ifndef CONFIG_USER_ONLY
6616 /* simpler form of an operation if no flags need to be generated */
6617 static uint16_t opc_simpler
[NB_OPS
] = {
6618 [INDEX_op_update2_cc
] = INDEX_op_nop
,
6619 [INDEX_op_update1_cc
] = INDEX_op_nop
,
6620 [INDEX_op_update_neg_cc
] = INDEX_op_nop
,
6622 /* broken: CC_OP logic must be rewritten */
6623 [INDEX_op_update_inc_cc
] = INDEX_op_nop
,
6626 [INDEX_op_shlb_T0_T1_cc
] = INDEX_op_shlb_T0_T1
,
6627 [INDEX_op_shlw_T0_T1_cc
] = INDEX_op_shlw_T0_T1
,
6628 [INDEX_op_shll_T0_T1_cc
] = INDEX_op_shll_T0_T1
,
6629 X86_64_DEF([INDEX_op_shlq_T0_T1_cc
] = INDEX_op_shlq_T0_T1
,)
6631 [INDEX_op_shrb_T0_T1_cc
] = INDEX_op_shrb_T0_T1
,
6632 [INDEX_op_shrw_T0_T1_cc
] = INDEX_op_shrw_T0_T1
,
6633 [INDEX_op_shrl_T0_T1_cc
] = INDEX_op_shrl_T0_T1
,
6634 X86_64_DEF([INDEX_op_shrq_T0_T1_cc
] = INDEX_op_shrq_T0_T1
,)
6636 [INDEX_op_sarb_T0_T1_cc
] = INDEX_op_sarb_T0_T1
,
6637 [INDEX_op_sarw_T0_T1_cc
] = INDEX_op_sarw_T0_T1
,
6638 [INDEX_op_sarl_T0_T1_cc
] = INDEX_op_sarl_T0_T1
,
6639 X86_64_DEF([INDEX_op_sarq_T0_T1_cc
] = INDEX_op_sarq_T0_T1
,)
6641 #define DEF_SIMPLER(SUFFIX)\
6642 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6643 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6644 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6645 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6647 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6648 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6649 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6650 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6654 #ifndef CONFIG_USER_ONLY
6655 DEF_SIMPLER(_kernel
)
6660 static void tcg_macro_func(TCGContext
*s
, int macro_id
, const int *dead_args
)
6665 tcg_gen_helper_0_1(helper_divl_EAX_T0
, cpu_T
[0]);
6671 void optimize_flags_init(void)
6674 /* put default values in arrays */
6675 for(i
= 0; i
< NB_OPS
; i
++) {
6676 if (opc_simpler
[i
] == 0)
6680 tcg_set_macro_func(&tcg_ctx
, tcg_macro_func
);
6682 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
6683 #if TARGET_LONG_BITS > HOST_LONG_BITS
6684 cpu_T
[0] = tcg_global_mem_new(TCG_TYPE_TL
,
6685 TCG_AREG0
, offsetof(CPUState
, t0
), "T0");
6686 cpu_T
[1] = tcg_global_mem_new(TCG_TYPE_TL
,
6687 TCG_AREG0
, offsetof(CPUState
, t1
), "T1");
6688 cpu_A0
= tcg_global_mem_new(TCG_TYPE_TL
,
6689 TCG_AREG0
, offsetof(CPUState
, t2
), "A0");
6691 cpu_T
[0] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG1
, "T0");
6692 cpu_T
[1] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG2
, "T1");
6693 cpu_A0
= tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG3
, "A0");
6694 cpu_tmp1
= tcg_global_reg2_new_hack(TCG_TYPE_I64
, TCG_AREG1
, TCG_AREG2
, "tmp1");
6696 /* the helpers are only registered to print debug info */
6697 TCG_HELPER(helper_divl_EAX_T0
);
6698 TCG_HELPER(helper_idivl_EAX_T0
);
6701 /* CPU flags computation optimization: we move backward thru the
6702 generated code to see which flags are needed. The operation is
6703 modified if suitable */
6704 static void optimize_flags(uint16_t *opc_buf
, int opc_buf_len
)
6707 int live_flags
, write_flags
, op
;
6709 opc_ptr
= opc_buf
+ opc_buf_len
;
6710 /* live_flags contains the flags needed by the next instructions
6711 in the code. At the end of the block, we consider that all the
6713 live_flags
= CC_OSZAPC
;
6714 while (opc_ptr
> opc_buf
) {
6716 /* if none of the flags written by the instruction is used,
6717 then we can try to find a simpler instruction */
6718 write_flags
= opc_write_flags
[op
];
6719 if ((live_flags
& write_flags
) == 0) {
6720 *opc_ptr
= opc_simpler
[op
];
6722 /* compute the live flags before the instruction */
6723 live_flags
&= ~write_flags
;
6724 live_flags
|= opc_read_flags
[op
];
6728 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6729 basic block 'tb'. If search_pc is TRUE, also generate PC
6730 information for each intermediate instruction. */
6731 static inline int gen_intermediate_code_internal(CPUState
*env
,
6732 TranslationBlock
*tb
,
6735 DisasContext dc1
, *dc
= &dc1
;
6736 target_ulong pc_ptr
;
6737 uint16_t *gen_opc_end
;
6740 target_ulong pc_start
;
6741 target_ulong cs_base
;
6743 /* generate intermediate code */
6745 cs_base
= tb
->cs_base
;
6747 cflags
= tb
->cflags
;
6749 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
6750 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
6751 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
6752 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
6754 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
6755 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
6756 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
6757 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
6758 dc
->singlestep_enabled
= env
->singlestep_enabled
;
6759 dc
->cc_op
= CC_OP_DYNAMIC
;
6760 dc
->cs_base
= cs_base
;
6762 dc
->popl_esp_hack
= 0;
6763 /* select memory access functions */
6765 if (flags
& HF_SOFTMMU_MASK
) {
6767 dc
->mem_index
= 2 * 4;
6769 dc
->mem_index
= 1 * 4;
6771 dc
->cpuid_features
= env
->cpuid_features
;
6772 dc
->cpuid_ext_features
= env
->cpuid_ext_features
;
6773 dc
->cpuid_ext2_features
= env
->cpuid_ext2_features
;
6774 #ifdef TARGET_X86_64
6775 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
6776 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
6779 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
6780 (flags
& HF_INHIBIT_IRQ_MASK
)
6781 #ifndef CONFIG_SOFTMMU
6782 || (flags
& HF_SOFTMMU_MASK
)
6786 /* check addseg logic */
6787 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
6788 printf("ERROR addseg\n");
6791 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
6792 #if TARGET_LONG_BITS > HOST_LONG_BITS
6793 cpu_tmp1
= tcg_temp_new(TCG_TYPE_I64
);
6796 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
6798 dc
->is_jmp
= DISAS_NEXT
;
6803 if (env
->nb_breakpoints
> 0) {
6804 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
6805 if (env
->breakpoints
[j
] == pc_ptr
) {
6806 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
6812 j
= gen_opc_ptr
- gen_opc_buf
;
6816 gen_opc_instr_start
[lj
++] = 0;
6818 gen_opc_pc
[lj
] = pc_ptr
;
6819 gen_opc_cc_op
[lj
] = dc
->cc_op
;
6820 gen_opc_instr_start
[lj
] = 1;
6822 pc_ptr
= disas_insn(dc
, pc_ptr
);
6823 /* stop translation if indicated */
6826 /* if single step mode, we generate only one instruction and
6827 generate an exception */
6828 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6829 the flag and abort the translation to give the irqs a
6830 change to be happen */
6831 if (dc
->tf
|| dc
->singlestep_enabled
||
6832 (flags
& HF_INHIBIT_IRQ_MASK
) ||
6833 (cflags
& CF_SINGLE_INSN
)) {
6834 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6838 /* if too long translation, stop generation too */
6839 if (gen_opc_ptr
>= gen_opc_end
||
6840 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
6841 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6846 *gen_opc_ptr
= INDEX_op_end
;
6847 /* we don't forget to fill the last values */
6849 j
= gen_opc_ptr
- gen_opc_buf
;
6852 gen_opc_instr_start
[lj
++] = 0;
6856 if (loglevel
& CPU_LOG_TB_CPU
) {
6857 cpu_dump_state(env
, logfile
, fprintf
, X86_DUMP_CCOP
);
6859 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
6861 fprintf(logfile
, "----------------\n");
6862 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
6863 #ifdef TARGET_X86_64
6868 disas_flags
= !dc
->code32
;
6869 target_disas(logfile
, pc_start
, pc_ptr
- pc_start
, disas_flags
);
6870 fprintf(logfile
, "\n");
6871 if (loglevel
& CPU_LOG_TB_OP_OPT
) {
6872 fprintf(logfile
, "OP before opt:\n");
6873 tcg_dump_ops(&tcg_ctx
, logfile
);
6874 fprintf(logfile
, "\n");
6879 /* optimize flag computations */
6880 optimize_flags(gen_opc_buf
, gen_opc_ptr
- gen_opc_buf
);
6883 tb
->size
= pc_ptr
- pc_start
;
6887 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
6889 return gen_intermediate_code_internal(env
, tb
, 0);
6892 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
6894 return gen_intermediate_code_internal(env
, tb
, 1);
6897 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
6898 unsigned long searched_pc
, int pc_pos
, void *puc
)
6902 if (loglevel
& CPU_LOG_TB_OP
) {
6904 fprintf(logfile
, "RESTORE:\n");
6905 for(i
= 0;i
<= pc_pos
; i
++) {
6906 if (gen_opc_instr_start
[i
]) {
6907 fprintf(logfile
, "0x%04x: " TARGET_FMT_lx
"\n", i
, gen_opc_pc
[i
]);
6910 fprintf(logfile
, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx
" cs_base=%x\n",
6911 searched_pc
, pc_pos
, gen_opc_pc
[pc_pos
] - tb
->cs_base
,
6912 (uint32_t)tb
->cs_base
);
6915 env
->eip
= gen_opc_pc
[pc_pos
] - tb
->cs_base
;
6916 cc_op
= gen_opc_cc_op
[pc_pos
];
6917 if (cc_op
!= CC_OP_DYNAMIC
)