4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
61 #define TCG_TYPE_TL TCG_TYPE_I64
62 #define tcg_gen_movi_tl tcg_gen_movi_i64
63 #define tcg_gen_mov_tl tcg_gen_mov_i64
64 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i64
65 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i64
66 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i64
67 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i64
68 #define tcg_gen_ld32u_tl tcg_gen_ld32u_i64
69 #define tcg_gen_ld32s_tl tcg_gen_ld32s_i64
70 #define tcg_gen_ld_tl tcg_gen_ld_i64
71 #define tcg_gen_st8_tl tcg_gen_st8_i64
72 #define tcg_gen_st16_tl tcg_gen_st16_i64
73 #define tcg_gen_st32_tl tcg_gen_st32_i64
74 #define tcg_gen_st_tl tcg_gen_st_i64
75 #define tcg_gen_add_tl tcg_gen_add_i64
76 #define tcg_gen_addi_tl tcg_gen_addi_i64
77 #define tcg_gen_sub_tl tcg_gen_sub_i64
78 #define tcg_gen_subi_tl tcg_gen_subi_i64
79 #define tcg_gen_and_tl tcg_gen_and_i64
80 #define tcg_gen_andi_tl tcg_gen_andi_i64
81 #define tcg_gen_or_tl tcg_gen_or_i64
82 #define tcg_gen_ori_tl tcg_gen_ori_i64
83 #define tcg_gen_xor_tl tcg_gen_xor_i64
84 #define tcg_gen_xori_tl tcg_gen_xori_i64
85 #define tcg_gen_shl_tl tcg_gen_shl_i64
86 #define tcg_gen_shli_tl tcg_gen_shli_i64
87 #define tcg_gen_shr_tl tcg_gen_shr_i64
88 #define tcg_gen_shri_tl tcg_gen_shri_i64
89 #define tcg_gen_sar_tl tcg_gen_sar_i64
90 #define tcg_gen_sari_tl tcg_gen_sari_i64
92 #define TCG_TYPE_TL TCG_TYPE_I32
93 #define tcg_gen_movi_tl tcg_gen_movi_i32
94 #define tcg_gen_mov_tl tcg_gen_mov_i32
95 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i32
96 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i32
97 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i32
98 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i32
99 #define tcg_gen_ld32u_tl tcg_gen_ld_i32
100 #define tcg_gen_ld32s_tl tcg_gen_ld_i32
101 #define tcg_gen_ld_tl tcg_gen_ld_i32
102 #define tcg_gen_st8_tl tcg_gen_st8_i32
103 #define tcg_gen_st16_tl tcg_gen_st16_i32
104 #define tcg_gen_st32_tl tcg_gen_st_i32
105 #define tcg_gen_st_tl tcg_gen_st_i32
106 #define tcg_gen_add_tl tcg_gen_add_i32
107 #define tcg_gen_addi_tl tcg_gen_addi_i32
108 #define tcg_gen_sub_tl tcg_gen_sub_i32
109 #define tcg_gen_subi_tl tcg_gen_subi_i32
110 #define tcg_gen_and_tl tcg_gen_and_i32
111 #define tcg_gen_andi_tl tcg_gen_andi_i32
112 #define tcg_gen_or_tl tcg_gen_or_i32
113 #define tcg_gen_ori_tl tcg_gen_ori_i32
114 #define tcg_gen_xor_tl tcg_gen_xor_i32
115 #define tcg_gen_xori_tl tcg_gen_xori_i32
116 #define tcg_gen_shl_tl tcg_gen_shl_i32
117 #define tcg_gen_shli_tl tcg_gen_shli_i32
118 #define tcg_gen_shr_tl tcg_gen_shr_i32
119 #define tcg_gen_shri_tl tcg_gen_shri_i32
120 #define tcg_gen_sar_tl tcg_gen_sar_i32
121 #define tcg_gen_sari_tl tcg_gen_sari_i32
124 /* global register indexes */
125 static TCGv cpu_env
, cpu_T
[2], cpu_A0
;
126 /* local register indexes (only used inside old micro ops) */
127 static TCGv cpu_tmp0
;
130 static int x86_64_hregs
;
133 typedef struct DisasContext
{
134 /* current insn context */
135 int override
; /* -1 if no override */
138 target_ulong pc
; /* pc = eip + cs_base */
139 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
140 static state change (stop translation) */
141 /* current block context */
142 target_ulong cs_base
; /* base of CS segment */
143 int pe
; /* protected mode */
144 int code32
; /* 32 bit code segment */
146 int lma
; /* long mode active */
147 int code64
; /* 64 bit code segment */
150 int ss32
; /* 32 bit stack segment */
151 int cc_op
; /* current CC operation */
152 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
153 int f_st
; /* currently unused */
154 int vm86
; /* vm86 mode */
157 int tf
; /* TF cpu flag */
158 int singlestep_enabled
; /* "hardware" single step enabled */
159 int jmp_opt
; /* use direct block chaining for direct jumps */
160 int mem_index
; /* select memory access functions */
161 uint64_t flags
; /* all execution flags */
162 struct TranslationBlock
*tb
;
163 int popl_esp_hack
; /* for correct popl with esp base handling */
164 int rip_offset
; /* only used in x86_64, but left for simplicity */
166 int cpuid_ext_features
;
169 static void gen_eob(DisasContext
*s
);
170 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
171 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
173 /* i386 arith/logic operations */
193 OP_SHL1
, /* undocumented */
206 /* I386 int registers */
207 OR_EAX
, /* MUST be even numbered */
216 OR_TMP0
= 16, /* temporary operand register */
218 OR_A0
, /* temporary register used when doing address evaluation */
221 static inline void gen_op_movl_T0_0(void)
223 tcg_gen_movi_tl(cpu_T
[0], 0);
226 static inline void gen_op_movl_T0_im(int32_t val
)
228 tcg_gen_movi_tl(cpu_T
[0], val
);
231 static inline void gen_op_movl_T0_imu(uint32_t val
)
233 tcg_gen_movi_tl(cpu_T
[0], val
);
236 static inline void gen_op_movl_T1_im(int32_t val
)
238 tcg_gen_movi_tl(cpu_T
[1], val
);
241 static inline void gen_op_movl_T1_imu(uint32_t val
)
243 tcg_gen_movi_tl(cpu_T
[1], val
);
246 static inline void gen_op_movl_A0_im(uint32_t val
)
248 tcg_gen_movi_tl(cpu_A0
, val
);
252 static inline void gen_op_movq_A0_im(int64_t val
)
254 tcg_gen_movi_tl(cpu_A0
, val
);
258 static inline void gen_movtl_T0_im(target_ulong val
)
260 tcg_gen_movi_tl(cpu_T
[0], val
);
263 static inline void gen_movtl_T1_im(target_ulong val
)
265 tcg_gen_movi_tl(cpu_T
[1], val
);
268 static inline void gen_op_andl_T0_ffff(void)
270 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
273 static inline void gen_op_andl_T0_im(uint32_t val
)
275 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], val
);
278 static inline void gen_op_movl_T0_T1(void)
280 tcg_gen_mov_tl(cpu_T
[0], cpu_T
[1]);
283 static inline void gen_op_andl_A0_ffff(void)
285 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffff);
290 #define NB_OP_SIZES 4
292 #define DEF_REGS(prefix, suffix) \
293 prefix ## EAX ## suffix,\
294 prefix ## ECX ## suffix,\
295 prefix ## EDX ## suffix,\
296 prefix ## EBX ## suffix,\
297 prefix ## ESP ## suffix,\
298 prefix ## EBP ## suffix,\
299 prefix ## ESI ## suffix,\
300 prefix ## EDI ## suffix,\
301 prefix ## R8 ## suffix,\
302 prefix ## R9 ## suffix,\
303 prefix ## R10 ## suffix,\
304 prefix ## R11 ## suffix,\
305 prefix ## R12 ## suffix,\
306 prefix ## R13 ## suffix,\
307 prefix ## R14 ## suffix,\
308 prefix ## R15 ## suffix,
310 #else /* !TARGET_X86_64 */
312 #define NB_OP_SIZES 3
314 #define DEF_REGS(prefix, suffix) \
315 prefix ## EAX ## suffix,\
316 prefix ## ECX ## suffix,\
317 prefix ## EDX ## suffix,\
318 prefix ## EBX ## suffix,\
319 prefix ## ESP ## suffix,\
320 prefix ## EBP ## suffix,\
321 prefix ## ESI ## suffix,\
322 prefix ## EDI ## suffix,
324 #endif /* !TARGET_X86_64 */
326 #if defined(WORDS_BIGENDIAN)
327 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
328 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
329 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
330 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
331 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
333 #define REG_B_OFFSET 0
334 #define REG_H_OFFSET 1
335 #define REG_W_OFFSET 0
336 #define REG_L_OFFSET 0
337 #define REG_LH_OFFSET 4
340 static inline void gen_op_mov_reg_TN(int ot
, int t_index
, int reg
)
344 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
345 tcg_gen_st8_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_B_OFFSET
);
347 tcg_gen_st8_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
351 tcg_gen_st16_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
355 tcg_gen_st32_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
356 /* high part of register set to zero */
357 tcg_gen_movi_tl(cpu_tmp0
, 0);
358 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
362 tcg_gen_st_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]));
367 tcg_gen_st32_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
373 static inline void gen_op_mov_reg_T0(int ot
, int reg
)
375 gen_op_mov_reg_TN(ot
, 0, reg
);
378 static inline void gen_op_mov_reg_T1(int ot
, int reg
)
380 gen_op_mov_reg_TN(ot
, 1, reg
);
383 static inline void gen_op_mov_reg_A0(int size
, int reg
)
387 tcg_gen_st16_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
391 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
392 /* high part of register set to zero */
393 tcg_gen_movi_tl(cpu_tmp0
, 0);
394 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
398 tcg_gen_st_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
403 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
409 static inline void gen_op_mov_TN_reg(int ot
, int t_index
, int reg
)
413 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
416 tcg_gen_ld8u_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
421 tcg_gen_ld_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]));
426 static inline void gen_op_movl_A0_reg(int reg
)
428 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
431 static inline void gen_op_addl_A0_im(int32_t val
)
433 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
435 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
440 static inline void gen_op_addq_A0_im(int64_t val
)
442 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
446 static void gen_add_A0_im(DisasContext
*s
, int val
)
450 gen_op_addq_A0_im(val
);
453 gen_op_addl_A0_im(val
);
456 static inline void gen_op_addl_T0_T1(void)
458 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
461 static inline void gen_op_jmp_T0(void)
463 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUState
, eip
));
466 static inline void gen_op_addw_ESP_im(int32_t val
)
468 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
469 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
470 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]) + REG_W_OFFSET
);
473 static inline void gen_op_addl_ESP_im(int32_t val
)
475 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
476 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
478 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffff);
480 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
484 static inline void gen_op_addq_ESP_im(int32_t val
)
486 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
487 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
488 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
492 static inline void gen_op_set_cc_op(int32_t val
)
494 tcg_gen_movi_tl(cpu_tmp0
, val
);
495 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, cc_op
));
498 static inline void gen_op_addl_A0_reg_sN(int shift
, int reg
)
500 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
502 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
503 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
505 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
509 static inline void gen_op_movl_A0_seg(int reg
)
511 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
) + REG_L_OFFSET
);
514 static inline void gen_op_addl_A0_seg(int reg
)
516 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
517 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
519 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
524 static inline void gen_op_movq_A0_seg(int reg
)
526 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
529 static inline void gen_op_addq_A0_seg(int reg
)
531 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
532 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
535 static inline void gen_op_movq_A0_reg(int reg
)
537 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
540 static inline void gen_op_addq_A0_reg_sN(int shift
, int reg
)
542 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
544 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
545 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
549 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
551 DEF_REGS(gen_op_cmovw_
, _T1_T0
)
554 DEF_REGS(gen_op_cmovl_
, _T1_T0
)
558 DEF_REGS(gen_op_cmovq_
, _T1_T0
)
563 #define DEF_ARITHC(SUFFIX)\
565 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
566 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
569 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
570 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
573 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
574 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
577 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
578 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
581 static GenOpFunc
*gen_op_arithc_T0_T1_cc
[4][2] = {
585 static GenOpFunc
*gen_op_arithc_mem_T0_T1_cc
[3 * 4][2] = {
587 #ifndef CONFIG_USER_ONLY
593 static const int cc_op_arithb
[8] = {
604 #define DEF_CMPXCHG(SUFFIX)\
605 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
606 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
607 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
608 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
610 static GenOpFunc
*gen_op_cmpxchg_T0_T1_EAX_cc
[4] = {
614 static GenOpFunc
*gen_op_cmpxchg_mem_T0_T1_EAX_cc
[3 * 4] = {
616 #ifndef CONFIG_USER_ONLY
622 #define DEF_SHIFT(SUFFIX)\
624 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
625 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
626 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
627 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
628 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
629 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
630 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
631 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
634 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
635 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
636 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
637 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
638 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
639 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
640 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
641 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
644 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
645 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
646 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
647 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
648 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
649 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
650 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
651 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
654 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
655 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
656 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
657 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
658 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
659 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
660 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
661 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
664 static GenOpFunc
*gen_op_shift_T0_T1_cc
[4][8] = {
668 static GenOpFunc
*gen_op_shift_mem_T0_T1_cc
[3 * 4][8] = {
670 #ifndef CONFIG_USER_ONLY
676 #define DEF_SHIFTD(SUFFIX, op)\
682 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
683 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
686 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
687 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
690 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
691 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
694 static GenOpFunc1
*gen_op_shiftd_T0_T1_im_cc
[4][2] = {
698 static GenOpFunc
*gen_op_shiftd_T0_T1_ECX_cc
[4][2] = {
702 static GenOpFunc1
*gen_op_shiftd_mem_T0_T1_im_cc
[3 * 4][2] = {
704 #ifndef CONFIG_USER_ONLY
705 DEF_SHIFTD(_kernel
, im
)
706 DEF_SHIFTD(_user
, im
)
710 static GenOpFunc
*gen_op_shiftd_mem_T0_T1_ECX_cc
[3 * 4][2] = {
711 DEF_SHIFTD(_raw
, ECX
)
712 #ifndef CONFIG_USER_ONLY
713 DEF_SHIFTD(_kernel
, ECX
)
714 DEF_SHIFTD(_user
, ECX
)
718 static GenOpFunc
*gen_op_btx_T0_T1_cc
[3][4] = {
721 gen_op_btsw_T0_T1_cc
,
722 gen_op_btrw_T0_T1_cc
,
723 gen_op_btcw_T0_T1_cc
,
727 gen_op_btsl_T0_T1_cc
,
728 gen_op_btrl_T0_T1_cc
,
729 gen_op_btcl_T0_T1_cc
,
734 gen_op_btsq_T0_T1_cc
,
735 gen_op_btrq_T0_T1_cc
,
736 gen_op_btcq_T0_T1_cc
,
741 static GenOpFunc
*gen_op_add_bit_A0_T1
[3] = {
742 gen_op_add_bitw_A0_T1
,
743 gen_op_add_bitl_A0_T1
,
744 X86_64_ONLY(gen_op_add_bitq_A0_T1
),
747 static GenOpFunc
*gen_op_bsx_T0_cc
[3][2] = {
764 static inline void gen_op_lds_T0_A0(int idx
)
766 int mem_index
= (idx
>> 2) - 1;
769 tcg_gen_qemu_ld8s(cpu_T
[0], cpu_A0
, mem_index
);
772 tcg_gen_qemu_ld16s(cpu_T
[0], cpu_A0
, mem_index
);
776 tcg_gen_qemu_ld32s(cpu_T
[0], cpu_A0
, mem_index
);
781 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
782 static inline void gen_op_ld_T0_A0(int idx
)
784 int mem_index
= (idx
>> 2) - 1;
787 tcg_gen_qemu_ld8u(cpu_T
[0], cpu_A0
, mem_index
);
790 tcg_gen_qemu_ld16u(cpu_T
[0], cpu_A0
, mem_index
);
793 tcg_gen_qemu_ld32u(cpu_T
[0], cpu_A0
, mem_index
);
797 tcg_gen_qemu_ld64(cpu_T
[0], cpu_A0
, mem_index
);
802 static inline void gen_op_ldu_T0_A0(int idx
)
804 gen_op_ld_T0_A0(idx
);
807 static inline void gen_op_ld_T1_A0(int idx
)
809 int mem_index
= (idx
>> 2) - 1;
812 tcg_gen_qemu_ld8u(cpu_T
[1], cpu_A0
, mem_index
);
815 tcg_gen_qemu_ld16u(cpu_T
[1], cpu_A0
, mem_index
);
818 tcg_gen_qemu_ld32u(cpu_T
[1], cpu_A0
, mem_index
);
822 tcg_gen_qemu_ld64(cpu_T
[1], cpu_A0
, mem_index
);
827 static inline void gen_op_st_T0_A0(int idx
)
829 int mem_index
= (idx
>> 2) - 1;
832 tcg_gen_qemu_st8(cpu_T
[0], cpu_A0
, mem_index
);
835 tcg_gen_qemu_st16(cpu_T
[0], cpu_A0
, mem_index
);
838 tcg_gen_qemu_st32(cpu_T
[0], cpu_A0
, mem_index
);
842 tcg_gen_qemu_st64(cpu_T
[0], cpu_A0
, mem_index
);
847 static inline void gen_op_st_T1_A0(int idx
)
849 int mem_index
= (idx
>> 2) - 1;
852 tcg_gen_qemu_st8(cpu_T
[1], cpu_A0
, mem_index
);
855 tcg_gen_qemu_st16(cpu_T
[1], cpu_A0
, mem_index
);
858 tcg_gen_qemu_st32(cpu_T
[1], cpu_A0
, mem_index
);
862 tcg_gen_qemu_st64(cpu_T
[1], cpu_A0
, mem_index
);
867 static inline void gen_jmp_im(target_ulong pc
)
869 tcg_gen_movi_tl(cpu_tmp0
, pc
);
870 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, eip
));
873 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
877 override
= s
->override
;
881 gen_op_movq_A0_seg(override
);
882 gen_op_addq_A0_reg_sN(0, R_ESI
);
884 gen_op_movq_A0_reg(R_ESI
);
890 if (s
->addseg
&& override
< 0)
893 gen_op_movl_A0_seg(override
);
894 gen_op_addl_A0_reg_sN(0, R_ESI
);
896 gen_op_movl_A0_reg(R_ESI
);
899 /* 16 address, always override */
902 gen_op_movl_A0_reg(R_ESI
);
903 gen_op_andl_A0_ffff();
904 gen_op_addl_A0_seg(override
);
908 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
912 gen_op_movq_A0_reg(R_EDI
);
917 gen_op_movl_A0_seg(R_ES
);
918 gen_op_addl_A0_reg_sN(0, R_EDI
);
920 gen_op_movl_A0_reg(R_EDI
);
923 gen_op_movl_A0_reg(R_EDI
);
924 gen_op_andl_A0_ffff();
925 gen_op_addl_A0_seg(R_ES
);
929 static GenOpFunc
*gen_op_movl_T0_Dshift
[4] = {
930 gen_op_movl_T0_Dshiftb
,
931 gen_op_movl_T0_Dshiftw
,
932 gen_op_movl_T0_Dshiftl
,
933 X86_64_ONLY(gen_op_movl_T0_Dshiftq
),
936 static GenOpFunc1
*gen_op_jnz_ecx
[3] = {
939 X86_64_ONLY(gen_op_jnz_ecxq
),
942 static GenOpFunc1
*gen_op_jz_ecx
[3] = {
945 X86_64_ONLY(gen_op_jz_ecxq
),
948 static GenOpFunc
*gen_op_dec_ECX
[3] = {
951 X86_64_ONLY(gen_op_decq_ECX
),
954 static GenOpFunc1
*gen_op_string_jnz_sub
[2][4] = {
959 X86_64_ONLY(gen_op_jnz_subq
),
965 X86_64_ONLY(gen_op_jz_subq
),
969 static GenOpFunc
*gen_op_in_DX_T0
[3] = {
975 static GenOpFunc
*gen_op_out_DX_T0
[3] = {
981 static GenOpFunc
*gen_op_in
[3] = {
987 static GenOpFunc
*gen_op_out
[3] = {
993 static GenOpFunc
*gen_check_io_T0
[3] = {
999 static GenOpFunc
*gen_check_io_DX
[3] = {
1000 gen_op_check_iob_DX
,
1001 gen_op_check_iow_DX
,
1002 gen_op_check_iol_DX
,
1005 static void gen_check_io(DisasContext
*s
, int ot
, int use_dx
, target_ulong cur_eip
)
1007 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
1008 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1009 gen_op_set_cc_op(s
->cc_op
);
1010 gen_jmp_im(cur_eip
);
1012 gen_check_io_DX
[ot
]();
1014 gen_check_io_T0
[ot
]();
1018 static inline void gen_movs(DisasContext
*s
, int ot
)
1020 gen_string_movl_A0_ESI(s
);
1021 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1022 gen_string_movl_A0_EDI(s
);
1023 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1024 gen_op_movl_T0_Dshift
[ot
]();
1025 #ifdef TARGET_X86_64
1026 if (s
->aflag
== 2) {
1027 gen_op_addq_ESI_T0();
1028 gen_op_addq_EDI_T0();
1032 gen_op_addl_ESI_T0();
1033 gen_op_addl_EDI_T0();
1035 gen_op_addw_ESI_T0();
1036 gen_op_addw_EDI_T0();
1040 static inline void gen_update_cc_op(DisasContext
*s
)
1042 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1043 gen_op_set_cc_op(s
->cc_op
);
1044 s
->cc_op
= CC_OP_DYNAMIC
;
1048 /* XXX: does not work with gdbstub "ice" single step - not a
1050 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
1054 l1
= gen_new_label();
1055 l2
= gen_new_label();
1056 gen_op_jnz_ecx
[s
->aflag
](l1
);
1058 gen_jmp_tb(s
, next_eip
, 1);
1063 static inline void gen_stos(DisasContext
*s
, int ot
)
1065 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1066 gen_string_movl_A0_EDI(s
);
1067 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1068 gen_op_movl_T0_Dshift
[ot
]();
1069 #ifdef TARGET_X86_64
1070 if (s
->aflag
== 2) {
1071 gen_op_addq_EDI_T0();
1075 gen_op_addl_EDI_T0();
1077 gen_op_addw_EDI_T0();
1081 static inline void gen_lods(DisasContext
*s
, int ot
)
1083 gen_string_movl_A0_ESI(s
);
1084 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1085 gen_op_mov_reg_T0(ot
, R_EAX
);
1086 gen_op_movl_T0_Dshift
[ot
]();
1087 #ifdef TARGET_X86_64
1088 if (s
->aflag
== 2) {
1089 gen_op_addq_ESI_T0();
1093 gen_op_addl_ESI_T0();
1095 gen_op_addw_ESI_T0();
1099 static inline void gen_scas(DisasContext
*s
, int ot
)
1101 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1102 gen_string_movl_A0_EDI(s
);
1103 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1104 gen_op_cmpl_T0_T1_cc();
1105 gen_op_movl_T0_Dshift
[ot
]();
1106 #ifdef TARGET_X86_64
1107 if (s
->aflag
== 2) {
1108 gen_op_addq_EDI_T0();
1112 gen_op_addl_EDI_T0();
1114 gen_op_addw_EDI_T0();
1118 static inline void gen_cmps(DisasContext
*s
, int ot
)
1120 gen_string_movl_A0_ESI(s
);
1121 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1122 gen_string_movl_A0_EDI(s
);
1123 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1124 gen_op_cmpl_T0_T1_cc();
1125 gen_op_movl_T0_Dshift
[ot
]();
1126 #ifdef TARGET_X86_64
1127 if (s
->aflag
== 2) {
1128 gen_op_addq_ESI_T0();
1129 gen_op_addq_EDI_T0();
1133 gen_op_addl_ESI_T0();
1134 gen_op_addl_EDI_T0();
1136 gen_op_addw_ESI_T0();
1137 gen_op_addw_EDI_T0();
1141 static inline void gen_ins(DisasContext
*s
, int ot
)
1143 gen_string_movl_A0_EDI(s
);
1145 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1146 gen_op_in_DX_T0
[ot
]();
1147 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1148 gen_op_movl_T0_Dshift
[ot
]();
1149 #ifdef TARGET_X86_64
1150 if (s
->aflag
== 2) {
1151 gen_op_addq_EDI_T0();
1155 gen_op_addl_EDI_T0();
1157 gen_op_addw_EDI_T0();
1161 static inline void gen_outs(DisasContext
*s
, int ot
)
1163 gen_string_movl_A0_ESI(s
);
1164 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1165 gen_op_out_DX_T0
[ot
]();
1166 gen_op_movl_T0_Dshift
[ot
]();
1167 #ifdef TARGET_X86_64
1168 if (s
->aflag
== 2) {
1169 gen_op_addq_ESI_T0();
1173 gen_op_addl_ESI_T0();
1175 gen_op_addw_ESI_T0();
1179 /* same method as Valgrind : we generate jumps to current or next
1181 #define GEN_REPZ(op) \
1182 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1183 target_ulong cur_eip, target_ulong next_eip) \
1186 gen_update_cc_op(s); \
1187 l2 = gen_jz_ecx_string(s, next_eip); \
1188 gen_ ## op(s, ot); \
1189 gen_op_dec_ECX[s->aflag](); \
1190 /* a loop would cause two single step exceptions if ECX = 1 \
1191 before rep string_insn */ \
1193 gen_op_jz_ecx[s->aflag](l2); \
1194 gen_jmp(s, cur_eip); \
1197 #define GEN_REPZ2(op) \
1198 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1199 target_ulong cur_eip, \
1200 target_ulong next_eip, \
1204 gen_update_cc_op(s); \
1205 l2 = gen_jz_ecx_string(s, next_eip); \
1206 gen_ ## op(s, ot); \
1207 gen_op_dec_ECX[s->aflag](); \
1208 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1209 gen_op_string_jnz_sub[nz][ot](l2);\
1211 gen_op_jz_ecx[s->aflag](l2); \
1212 gen_jmp(s, cur_eip); \
1234 static GenOpFunc1
*gen_jcc_sub
[4][8] = {
1265 #ifdef TARGET_X86_64
1268 BUGGY_64(gen_op_jb_subq
),
1270 BUGGY_64(gen_op_jbe_subq
),
1273 BUGGY_64(gen_op_jl_subq
),
1274 BUGGY_64(gen_op_jle_subq
),
1278 static GenOpFunc1
*gen_op_loop
[3][4] = {
1289 #ifdef TARGET_X86_64
1298 static GenOpFunc
*gen_setcc_slow
[8] = {
1309 static GenOpFunc
*gen_setcc_sub
[4][8] = {
1312 gen_op_setb_T0_subb
,
1313 gen_op_setz_T0_subb
,
1314 gen_op_setbe_T0_subb
,
1315 gen_op_sets_T0_subb
,
1317 gen_op_setl_T0_subb
,
1318 gen_op_setle_T0_subb
,
1322 gen_op_setb_T0_subw
,
1323 gen_op_setz_T0_subw
,
1324 gen_op_setbe_T0_subw
,
1325 gen_op_sets_T0_subw
,
1327 gen_op_setl_T0_subw
,
1328 gen_op_setle_T0_subw
,
1332 gen_op_setb_T0_subl
,
1333 gen_op_setz_T0_subl
,
1334 gen_op_setbe_T0_subl
,
1335 gen_op_sets_T0_subl
,
1337 gen_op_setl_T0_subl
,
1338 gen_op_setle_T0_subl
,
1340 #ifdef TARGET_X86_64
1343 gen_op_setb_T0_subq
,
1344 gen_op_setz_T0_subq
,
1345 gen_op_setbe_T0_subq
,
1346 gen_op_sets_T0_subq
,
1348 gen_op_setl_T0_subq
,
1349 gen_op_setle_T0_subq
,
1354 static GenOpFunc
*gen_op_fp_arith_ST0_FT0
[8] = {
1355 gen_op_fadd_ST0_FT0
,
1356 gen_op_fmul_ST0_FT0
,
1357 gen_op_fcom_ST0_FT0
,
1358 gen_op_fcom_ST0_FT0
,
1359 gen_op_fsub_ST0_FT0
,
1360 gen_op_fsubr_ST0_FT0
,
1361 gen_op_fdiv_ST0_FT0
,
1362 gen_op_fdivr_ST0_FT0
,
1365 /* NOTE the exception in "r" op ordering */
1366 static GenOpFunc1
*gen_op_fp_arith_STN_ST0
[8] = {
1367 gen_op_fadd_STN_ST0
,
1368 gen_op_fmul_STN_ST0
,
1371 gen_op_fsubr_STN_ST0
,
1372 gen_op_fsub_STN_ST0
,
1373 gen_op_fdivr_STN_ST0
,
1374 gen_op_fdiv_STN_ST0
,
1377 /* if d == OR_TMP0, it means memory operand (address in A0) */
1378 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1380 GenOpFunc
*gen_update_cc
;
1383 gen_op_mov_TN_reg(ot
, 0, d
);
1385 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1390 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1391 gen_op_set_cc_op(s1
->cc_op
);
1393 gen_op_arithc_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1394 gen_op_mov_reg_T0(ot
, d
);
1396 gen_op_arithc_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
- OP_ADCL
]();
1398 s1
->cc_op
= CC_OP_DYNAMIC
;
1401 gen_op_addl_T0_T1();
1402 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1403 gen_update_cc
= gen_op_update2_cc
;
1406 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1407 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1408 gen_update_cc
= gen_op_update2_cc
;
1412 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1413 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1414 gen_update_cc
= gen_op_update1_cc
;
1417 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1418 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1419 gen_update_cc
= gen_op_update1_cc
;
1422 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1423 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1424 gen_update_cc
= gen_op_update1_cc
;
1427 gen_op_cmpl_T0_T1_cc();
1428 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1429 gen_update_cc
= NULL
;
1432 if (op
!= OP_CMPL
) {
1434 gen_op_mov_reg_T0(ot
, d
);
1436 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1438 /* the flags update must happen after the memory write (precise
1439 exception support) */
1445 /* if d == OR_TMP0, it means memory operand (address in A0) */
1446 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1449 gen_op_mov_TN_reg(ot
, 0, d
);
1451 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1452 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1453 gen_op_set_cc_op(s1
->cc_op
);
1456 s1
->cc_op
= CC_OP_INCB
+ ot
;
1459 s1
->cc_op
= CC_OP_DECB
+ ot
;
1462 gen_op_mov_reg_T0(ot
, d
);
1464 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1465 gen_op_update_inc_cc();
1468 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1471 gen_op_mov_TN_reg(ot
, 0, d
);
1473 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1475 gen_op_mov_TN_reg(ot
, 1, s
);
1476 /* for zero counts, flags are not updated, so must do it dynamically */
1477 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1478 gen_op_set_cc_op(s1
->cc_op
);
1481 gen_op_shift_T0_T1_cc
[ot
][op
]();
1483 gen_op_shift_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
]();
1485 gen_op_mov_reg_T0(ot
, d
);
1486 s1
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1489 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1491 /* currently not optimized */
1492 gen_op_movl_T1_im(c
);
1493 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1496 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1504 int mod
, rm
, code
, override
, must_add_seg
;
1506 override
= s
->override
;
1507 must_add_seg
= s
->addseg
;
1510 mod
= (modrm
>> 6) & 3;
1522 code
= ldub_code(s
->pc
++);
1523 scale
= (code
>> 6) & 3;
1524 index
= ((code
>> 3) & 7) | REX_X(s
);
1531 if ((base
& 7) == 5) {
1533 disp
= (int32_t)ldl_code(s
->pc
);
1535 if (CODE64(s
) && !havesib
) {
1536 disp
+= s
->pc
+ s
->rip_offset
;
1543 disp
= (int8_t)ldub_code(s
->pc
++);
1547 disp
= ldl_code(s
->pc
);
1553 /* for correct popl handling with esp */
1554 if (base
== 4 && s
->popl_esp_hack
)
1555 disp
+= s
->popl_esp_hack
;
1556 #ifdef TARGET_X86_64
1557 if (s
->aflag
== 2) {
1558 gen_op_movq_A0_reg(base
);
1560 gen_op_addq_A0_im(disp
);
1565 gen_op_movl_A0_reg(base
);
1567 gen_op_addl_A0_im(disp
);
1570 #ifdef TARGET_X86_64
1571 if (s
->aflag
== 2) {
1572 gen_op_movq_A0_im(disp
);
1576 gen_op_movl_A0_im(disp
);
1579 /* XXX: index == 4 is always invalid */
1580 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1581 #ifdef TARGET_X86_64
1582 if (s
->aflag
== 2) {
1583 gen_op_addq_A0_reg_sN(scale
, index
);
1587 gen_op_addl_A0_reg_sN(scale
, index
);
1592 if (base
== R_EBP
|| base
== R_ESP
)
1597 #ifdef TARGET_X86_64
1598 if (s
->aflag
== 2) {
1599 gen_op_addq_A0_seg(override
);
1603 gen_op_addl_A0_seg(override
);
1610 disp
= lduw_code(s
->pc
);
1612 gen_op_movl_A0_im(disp
);
1613 rm
= 0; /* avoid SS override */
1620 disp
= (int8_t)ldub_code(s
->pc
++);
1624 disp
= lduw_code(s
->pc
);
1630 gen_op_movl_A0_reg(R_EBX
);
1631 gen_op_addl_A0_reg_sN(0, R_ESI
);
1634 gen_op_movl_A0_reg(R_EBX
);
1635 gen_op_addl_A0_reg_sN(0, R_EDI
);
1638 gen_op_movl_A0_reg(R_EBP
);
1639 gen_op_addl_A0_reg_sN(0, R_ESI
);
1642 gen_op_movl_A0_reg(R_EBP
);
1643 gen_op_addl_A0_reg_sN(0, R_EDI
);
1646 gen_op_movl_A0_reg(R_ESI
);
1649 gen_op_movl_A0_reg(R_EDI
);
1652 gen_op_movl_A0_reg(R_EBP
);
1656 gen_op_movl_A0_reg(R_EBX
);
1660 gen_op_addl_A0_im(disp
);
1661 gen_op_andl_A0_ffff();
1665 if (rm
== 2 || rm
== 3 || rm
== 6)
1670 gen_op_addl_A0_seg(override
);
1680 static void gen_nop_modrm(DisasContext
*s
, int modrm
)
1682 int mod
, rm
, base
, code
;
1684 mod
= (modrm
>> 6) & 3;
1694 code
= ldub_code(s
->pc
++);
1730 /* used for LEA and MOV AX, mem */
1731 static void gen_add_A0_ds_seg(DisasContext
*s
)
1733 int override
, must_add_seg
;
1734 must_add_seg
= s
->addseg
;
1736 if (s
->override
>= 0) {
1737 override
= s
->override
;
1743 #ifdef TARGET_X86_64
1745 gen_op_addq_A0_seg(override
);
1749 gen_op_addl_A0_seg(override
);
1754 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1756 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1758 int mod
, rm
, opreg
, disp
;
1760 mod
= (modrm
>> 6) & 3;
1761 rm
= (modrm
& 7) | REX_B(s
);
1765 gen_op_mov_TN_reg(ot
, 0, reg
);
1766 gen_op_mov_reg_T0(ot
, rm
);
1768 gen_op_mov_TN_reg(ot
, 0, rm
);
1770 gen_op_mov_reg_T0(ot
, reg
);
1773 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
1776 gen_op_mov_TN_reg(ot
, 0, reg
);
1777 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1779 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1781 gen_op_mov_reg_T0(ot
, reg
);
1786 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
1792 ret
= ldub_code(s
->pc
);
1796 ret
= lduw_code(s
->pc
);
1801 ret
= ldl_code(s
->pc
);
1808 static inline int insn_const_size(unsigned int ot
)
1816 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong eip
)
1818 TranslationBlock
*tb
;
1821 pc
= s
->cs_base
+ eip
;
1823 /* NOTE: we handle the case where the TB spans two pages here */
1824 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
1825 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
1826 /* jump to same page: we can use a direct jump */
1827 tcg_gen_goto_tb(tb_num
);
1829 tcg_gen_exit_tb((long)tb
+ tb_num
);
1831 /* jump to another page: currently not optimized */
1837 static inline void gen_jcc(DisasContext
*s
, int b
,
1838 target_ulong val
, target_ulong next_eip
)
1840 TranslationBlock
*tb
;
1847 jcc_op
= (b
>> 1) & 7;
1851 /* we optimize the cmp/jcc case */
1856 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1859 /* some jumps are easy to compute */
1901 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1904 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1916 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1917 gen_op_set_cc_op(s
->cc_op
);
1918 s
->cc_op
= CC_OP_DYNAMIC
;
1922 gen_setcc_slow
[jcc_op
]();
1923 func
= gen_op_jnz_T0_label
;
1933 l1
= gen_new_label();
1936 gen_goto_tb(s
, 0, next_eip
);
1939 gen_goto_tb(s
, 1, val
);
1944 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1945 gen_op_set_cc_op(s
->cc_op
);
1946 s
->cc_op
= CC_OP_DYNAMIC
;
1948 gen_setcc_slow
[jcc_op
]();
1954 l1
= gen_new_label();
1955 l2
= gen_new_label();
1956 gen_op_jnz_T0_label(l1
);
1957 gen_jmp_im(next_eip
);
1958 gen_op_jmp_label(l2
);
1966 static void gen_setcc(DisasContext
*s
, int b
)
1972 jcc_op
= (b
>> 1) & 7;
1974 /* we optimize the cmp/jcc case */
1979 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1984 /* some jumps are easy to compute */
2011 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
2014 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
2022 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2023 gen_op_set_cc_op(s
->cc_op
);
2024 func
= gen_setcc_slow
[jcc_op
];
2033 /* move T0 to seg_reg and compute if the CPU state may change. Never
2034 call this function with seg_reg == R_CS */
2035 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
2037 if (s
->pe
&& !s
->vm86
) {
2038 /* XXX: optimize by finding processor state dynamically */
2039 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2040 gen_op_set_cc_op(s
->cc_op
);
2041 gen_jmp_im(cur_eip
);
2042 gen_op_movl_seg_T0(seg_reg
);
2043 /* abort translation because the addseg value may change or
2044 because ss32 may change. For R_SS, translation must always
2045 stop as a special handling must be done to disable hardware
2046 interrupts for the next instruction */
2047 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
2050 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
2051 if (seg_reg
== R_SS
)
2056 #define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
2059 gen_svm_check_io(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
2061 #if !defined(CONFIG_USER_ONLY)
2062 if(s
->flags
& (1ULL << INTERCEPT_IOIO_PROT
)) {
2063 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2064 gen_op_set_cc_op(s
->cc_op
);
2065 SVM_movq_T1_im(s
->pc
- s
->cs_base
);
2066 gen_jmp_im(pc_start
- s
->cs_base
);
2068 gen_op_svm_check_intercept_io((uint32_t)(type
>> 32), (uint32_t)type
);
2069 s
->cc_op
= CC_OP_DYNAMIC
;
2070 /* FIXME: maybe we could move the io intercept vector to the TB as well
2071 so we know if this is an EOB or not ... let's assume it's not
2078 static inline int svm_is_rep(int prefixes
)
2080 return ((prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) ? 8 : 0);
2084 gen_svm_check_intercept_param(DisasContext
*s
, target_ulong pc_start
,
2085 uint64_t type
, uint64_t param
)
2087 if(!(s
->flags
& (INTERCEPT_SVM_MASK
)))
2088 /* no SVM activated */
2091 /* CRx and DRx reads/writes */
2092 case SVM_EXIT_READ_CR0
... SVM_EXIT_EXCP_BASE
- 1:
2093 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2094 gen_op_set_cc_op(s
->cc_op
);
2095 s
->cc_op
= CC_OP_DYNAMIC
;
2097 gen_jmp_im(pc_start
- s
->cs_base
);
2098 SVM_movq_T1_im(param
);
2100 gen_op_svm_check_intercept_param((uint32_t)(type
>> 32), (uint32_t)type
);
2101 /* this is a special case as we do not know if the interception occurs
2102 so we assume there was none */
2105 if(s
->flags
& (1ULL << INTERCEPT_MSR_PROT
)) {
2106 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2107 gen_op_set_cc_op(s
->cc_op
);
2108 s
->cc_op
= CC_OP_DYNAMIC
;
2110 gen_jmp_im(pc_start
- s
->cs_base
);
2111 SVM_movq_T1_im(param
);
2113 gen_op_svm_check_intercept_param((uint32_t)(type
>> 32), (uint32_t)type
);
2114 /* this is a special case as we do not know if the interception occurs
2115 so we assume there was none */
2120 if(s
->flags
& (1ULL << ((type
- SVM_EXIT_INTR
) + INTERCEPT_INTR
))) {
2121 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2122 gen_op_set_cc_op(s
->cc_op
);
2123 s
->cc_op
= CC_OP_EFLAGS
;
2125 gen_jmp_im(pc_start
- s
->cs_base
);
2126 SVM_movq_T1_im(param
);
2128 gen_op_svm_vmexit(type
>> 32, type
);
2129 /* we can optimize this one so TBs don't get longer
2130 than up to vmexit */
2139 gen_svm_check_intercept(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
2141 return gen_svm_check_intercept_param(s
, pc_start
, type
, 0);
2144 static inline void gen_stack_update(DisasContext
*s
, int addend
)
2146 #ifdef TARGET_X86_64
2148 gen_op_addq_ESP_im(addend
);
2152 gen_op_addl_ESP_im(addend
);
2154 gen_op_addw_ESP_im(addend
);
2158 /* generate a push. It depends on ss32, addseg and dflag */
2159 static void gen_push_T0(DisasContext
*s
)
2161 #ifdef TARGET_X86_64
2163 gen_op_movq_A0_reg(R_ESP
);
2165 gen_op_addq_A0_im(-8);
2166 gen_op_st_T0_A0(OT_QUAD
+ s
->mem_index
);
2168 gen_op_addq_A0_im(-2);
2169 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2171 gen_op_mov_reg_A0(2, R_ESP
);
2175 gen_op_movl_A0_reg(R_ESP
);
2177 gen_op_addl_A0_im(-2);
2179 gen_op_addl_A0_im(-4);
2182 gen_op_movl_T1_A0();
2183 gen_op_addl_A0_seg(R_SS
);
2186 gen_op_andl_A0_ffff();
2187 gen_op_movl_T1_A0();
2188 gen_op_addl_A0_seg(R_SS
);
2190 gen_op_st_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2191 if (s
->ss32
&& !s
->addseg
)
2192 gen_op_mov_reg_A0(1, R_ESP
);
2194 gen_op_mov_reg_T1(s
->ss32
+ 1, R_ESP
);
2198 /* generate a push. It depends on ss32, addseg and dflag */
2199 /* slower version for T1, only used for call Ev */
2200 static void gen_push_T1(DisasContext
*s
)
2202 #ifdef TARGET_X86_64
2204 gen_op_movq_A0_reg(R_ESP
);
2206 gen_op_addq_A0_im(-8);
2207 gen_op_st_T1_A0(OT_QUAD
+ s
->mem_index
);
2209 gen_op_addq_A0_im(-2);
2210 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2212 gen_op_mov_reg_A0(2, R_ESP
);
2216 gen_op_movl_A0_reg(R_ESP
);
2218 gen_op_addl_A0_im(-2);
2220 gen_op_addl_A0_im(-4);
2223 gen_op_addl_A0_seg(R_SS
);
2226 gen_op_andl_A0_ffff();
2227 gen_op_addl_A0_seg(R_SS
);
2229 gen_op_st_T1_A0(s
->dflag
+ 1 + s
->mem_index
);
2231 if (s
->ss32
&& !s
->addseg
)
2232 gen_op_mov_reg_A0(1, R_ESP
);
2234 gen_stack_update(s
, (-2) << s
->dflag
);
2238 /* two step pop is necessary for precise exceptions */
2239 static void gen_pop_T0(DisasContext
*s
)
2241 #ifdef TARGET_X86_64
2243 gen_op_movq_A0_reg(R_ESP
);
2244 gen_op_ld_T0_A0((s
->dflag
? OT_QUAD
: OT_WORD
) + s
->mem_index
);
2248 gen_op_movl_A0_reg(R_ESP
);
2251 gen_op_addl_A0_seg(R_SS
);
2253 gen_op_andl_A0_ffff();
2254 gen_op_addl_A0_seg(R_SS
);
2256 gen_op_ld_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2260 static void gen_pop_update(DisasContext
*s
)
2262 #ifdef TARGET_X86_64
2263 if (CODE64(s
) && s
->dflag
) {
2264 gen_stack_update(s
, 8);
2268 gen_stack_update(s
, 2 << s
->dflag
);
2272 static void gen_stack_A0(DisasContext
*s
)
2274 gen_op_movl_A0_reg(R_ESP
);
2276 gen_op_andl_A0_ffff();
2277 gen_op_movl_T1_A0();
2279 gen_op_addl_A0_seg(R_SS
);
2282 /* NOTE: wrap around in 16 bit not fully handled */
2283 static void gen_pusha(DisasContext
*s
)
2286 gen_op_movl_A0_reg(R_ESP
);
2287 gen_op_addl_A0_im(-16 << s
->dflag
);
2289 gen_op_andl_A0_ffff();
2290 gen_op_movl_T1_A0();
2292 gen_op_addl_A0_seg(R_SS
);
2293 for(i
= 0;i
< 8; i
++) {
2294 gen_op_mov_TN_reg(OT_LONG
, 0, 7 - i
);
2295 gen_op_st_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2296 gen_op_addl_A0_im(2 << s
->dflag
);
2298 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2301 /* NOTE: wrap around in 16 bit not fully handled */
2302 static void gen_popa(DisasContext
*s
)
2305 gen_op_movl_A0_reg(R_ESP
);
2307 gen_op_andl_A0_ffff();
2308 gen_op_movl_T1_A0();
2309 gen_op_addl_T1_im(16 << s
->dflag
);
2311 gen_op_addl_A0_seg(R_SS
);
2312 for(i
= 0;i
< 8; i
++) {
2313 /* ESP is not reloaded */
2315 gen_op_ld_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2316 gen_op_mov_reg_T0(OT_WORD
+ s
->dflag
, 7 - i
);
2318 gen_op_addl_A0_im(2 << s
->dflag
);
2320 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2323 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2328 #ifdef TARGET_X86_64
2330 ot
= s
->dflag
? OT_QUAD
: OT_WORD
;
2333 gen_op_movl_A0_reg(R_ESP
);
2334 gen_op_addq_A0_im(-opsize
);
2335 gen_op_movl_T1_A0();
2338 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2339 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2341 gen_op_enter64_level(level
, (ot
== OT_QUAD
));
2343 gen_op_mov_reg_T1(ot
, R_EBP
);
2344 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2345 gen_op_mov_reg_T1(OT_QUAD
, R_ESP
);
2349 ot
= s
->dflag
+ OT_WORD
;
2350 opsize
= 2 << s
->dflag
;
2352 gen_op_movl_A0_reg(R_ESP
);
2353 gen_op_addl_A0_im(-opsize
);
2355 gen_op_andl_A0_ffff();
2356 gen_op_movl_T1_A0();
2358 gen_op_addl_A0_seg(R_SS
);
2360 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2361 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2363 gen_op_enter_level(level
, s
->dflag
);
2365 gen_op_mov_reg_T1(ot
, R_EBP
);
2366 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2367 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2371 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2373 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2374 gen_op_set_cc_op(s
->cc_op
);
2375 gen_jmp_im(cur_eip
);
2376 gen_op_raise_exception(trapno
);
2380 /* an interrupt is different from an exception because of the
2382 static void gen_interrupt(DisasContext
*s
, int intno
,
2383 target_ulong cur_eip
, target_ulong next_eip
)
2385 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2386 gen_op_set_cc_op(s
->cc_op
);
2387 gen_jmp_im(cur_eip
);
2388 gen_op_raise_interrupt(intno
, (int)(next_eip
- cur_eip
));
2392 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2394 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2395 gen_op_set_cc_op(s
->cc_op
);
2396 gen_jmp_im(cur_eip
);
2401 /* generate a generic end of block. Trace exception is also generated
2403 static void gen_eob(DisasContext
*s
)
2405 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2406 gen_op_set_cc_op(s
->cc_op
);
2407 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2408 gen_op_reset_inhibit_irq();
2410 if (s
->singlestep_enabled
) {
2413 gen_op_single_step();
2420 /* generate a jump to eip. No segment change must happen before as a
2421 direct call to the next block may occur */
2422 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2425 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2426 gen_op_set_cc_op(s
->cc_op
);
2427 s
->cc_op
= CC_OP_DYNAMIC
;
2429 gen_goto_tb(s
, tb_num
, eip
);
2437 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2439 gen_jmp_tb(s
, eip
, 0);
2442 static GenOpFunc1
*gen_ldq_env_A0
[3] = {
2443 gen_op_ldq_raw_env_A0
,
2444 #ifndef CONFIG_USER_ONLY
2445 gen_op_ldq_kernel_env_A0
,
2446 gen_op_ldq_user_env_A0
,
2450 static GenOpFunc1
*gen_stq_env_A0
[3] = {
2451 gen_op_stq_raw_env_A0
,
2452 #ifndef CONFIG_USER_ONLY
2453 gen_op_stq_kernel_env_A0
,
2454 gen_op_stq_user_env_A0
,
2458 static GenOpFunc1
*gen_ldo_env_A0
[3] = {
2459 gen_op_ldo_raw_env_A0
,
2460 #ifndef CONFIG_USER_ONLY
2461 gen_op_ldo_kernel_env_A0
,
2462 gen_op_ldo_user_env_A0
,
2466 static GenOpFunc1
*gen_sto_env_A0
[3] = {
2467 gen_op_sto_raw_env_A0
,
2468 #ifndef CONFIG_USER_ONLY
2469 gen_op_sto_kernel_env_A0
,
2470 gen_op_sto_user_env_A0
,
2474 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2476 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2477 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2478 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2480 static GenOpFunc2
*sse_op_table1
[256][4] = {
2481 /* pure SSE operations */
2482 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2483 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2484 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd, movsldup, movddup */
2485 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2486 [0x14] = { gen_op_punpckldq_xmm
, gen_op_punpcklqdq_xmm
},
2487 [0x15] = { gen_op_punpckhdq_xmm
, gen_op_punpckhqdq_xmm
},
2488 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2489 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2491 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2492 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2493 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2494 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2495 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2496 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2497 [0x2e] = { gen_op_ucomiss
, gen_op_ucomisd
},
2498 [0x2f] = { gen_op_comiss
, gen_op_comisd
},
2499 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2500 [0x51] = SSE_FOP(sqrt
),
2501 [0x52] = { gen_op_rsqrtps
, NULL
, gen_op_rsqrtss
, NULL
},
2502 [0x53] = { gen_op_rcpps
, NULL
, gen_op_rcpss
, NULL
},
2503 [0x54] = { gen_op_pand_xmm
, gen_op_pand_xmm
}, /* andps, andpd */
2504 [0x55] = { gen_op_pandn_xmm
, gen_op_pandn_xmm
}, /* andnps, andnpd */
2505 [0x56] = { gen_op_por_xmm
, gen_op_por_xmm
}, /* orps, orpd */
2506 [0x57] = { gen_op_pxor_xmm
, gen_op_pxor_xmm
}, /* xorps, xorpd */
2507 [0x58] = SSE_FOP(add
),
2508 [0x59] = SSE_FOP(mul
),
2509 [0x5a] = { gen_op_cvtps2pd
, gen_op_cvtpd2ps
,
2510 gen_op_cvtss2sd
, gen_op_cvtsd2ss
},
2511 [0x5b] = { gen_op_cvtdq2ps
, gen_op_cvtps2dq
, gen_op_cvttps2dq
},
2512 [0x5c] = SSE_FOP(sub
),
2513 [0x5d] = SSE_FOP(min
),
2514 [0x5e] = SSE_FOP(div
),
2515 [0x5f] = SSE_FOP(max
),
2517 [0xc2] = SSE_FOP(cmpeq
),
2518 [0xc6] = { (GenOpFunc2
*)gen_op_shufps
, (GenOpFunc2
*)gen_op_shufpd
},
2520 /* MMX ops and their SSE extensions */
2521 [0x60] = MMX_OP2(punpcklbw
),
2522 [0x61] = MMX_OP2(punpcklwd
),
2523 [0x62] = MMX_OP2(punpckldq
),
2524 [0x63] = MMX_OP2(packsswb
),
2525 [0x64] = MMX_OP2(pcmpgtb
),
2526 [0x65] = MMX_OP2(pcmpgtw
),
2527 [0x66] = MMX_OP2(pcmpgtl
),
2528 [0x67] = MMX_OP2(packuswb
),
2529 [0x68] = MMX_OP2(punpckhbw
),
2530 [0x69] = MMX_OP2(punpckhwd
),
2531 [0x6a] = MMX_OP2(punpckhdq
),
2532 [0x6b] = MMX_OP2(packssdw
),
2533 [0x6c] = { NULL
, gen_op_punpcklqdq_xmm
},
2534 [0x6d] = { NULL
, gen_op_punpckhqdq_xmm
},
2535 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2536 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2537 [0x70] = { (GenOpFunc2
*)gen_op_pshufw_mmx
,
2538 (GenOpFunc2
*)gen_op_pshufd_xmm
,
2539 (GenOpFunc2
*)gen_op_pshufhw_xmm
,
2540 (GenOpFunc2
*)gen_op_pshuflw_xmm
},
2541 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2542 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2543 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2544 [0x74] = MMX_OP2(pcmpeqb
),
2545 [0x75] = MMX_OP2(pcmpeqw
),
2546 [0x76] = MMX_OP2(pcmpeql
),
2547 [0x77] = { SSE_SPECIAL
}, /* emms */
2548 [0x7c] = { NULL
, gen_op_haddpd
, NULL
, gen_op_haddps
},
2549 [0x7d] = { NULL
, gen_op_hsubpd
, NULL
, gen_op_hsubps
},
2550 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2551 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2552 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2553 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2554 [0xd0] = { NULL
, gen_op_addsubpd
, NULL
, gen_op_addsubps
},
2555 [0xd1] = MMX_OP2(psrlw
),
2556 [0xd2] = MMX_OP2(psrld
),
2557 [0xd3] = MMX_OP2(psrlq
),
2558 [0xd4] = MMX_OP2(paddq
),
2559 [0xd5] = MMX_OP2(pmullw
),
2560 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2561 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2562 [0xd8] = MMX_OP2(psubusb
),
2563 [0xd9] = MMX_OP2(psubusw
),
2564 [0xda] = MMX_OP2(pminub
),
2565 [0xdb] = MMX_OP2(pand
),
2566 [0xdc] = MMX_OP2(paddusb
),
2567 [0xdd] = MMX_OP2(paddusw
),
2568 [0xde] = MMX_OP2(pmaxub
),
2569 [0xdf] = MMX_OP2(pandn
),
2570 [0xe0] = MMX_OP2(pavgb
),
2571 [0xe1] = MMX_OP2(psraw
),
2572 [0xe2] = MMX_OP2(psrad
),
2573 [0xe3] = MMX_OP2(pavgw
),
2574 [0xe4] = MMX_OP2(pmulhuw
),
2575 [0xe5] = MMX_OP2(pmulhw
),
2576 [0xe6] = { NULL
, gen_op_cvttpd2dq
, gen_op_cvtdq2pd
, gen_op_cvtpd2dq
},
2577 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2578 [0xe8] = MMX_OP2(psubsb
),
2579 [0xe9] = MMX_OP2(psubsw
),
2580 [0xea] = MMX_OP2(pminsw
),
2581 [0xeb] = MMX_OP2(por
),
2582 [0xec] = MMX_OP2(paddsb
),
2583 [0xed] = MMX_OP2(paddsw
),
2584 [0xee] = MMX_OP2(pmaxsw
),
2585 [0xef] = MMX_OP2(pxor
),
2586 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu */
2587 [0xf1] = MMX_OP2(psllw
),
2588 [0xf2] = MMX_OP2(pslld
),
2589 [0xf3] = MMX_OP2(psllq
),
2590 [0xf4] = MMX_OP2(pmuludq
),
2591 [0xf5] = MMX_OP2(pmaddwd
),
2592 [0xf6] = MMX_OP2(psadbw
),
2593 [0xf7] = MMX_OP2(maskmov
),
2594 [0xf8] = MMX_OP2(psubb
),
2595 [0xf9] = MMX_OP2(psubw
),
2596 [0xfa] = MMX_OP2(psubl
),
2597 [0xfb] = MMX_OP2(psubq
),
2598 [0xfc] = MMX_OP2(paddb
),
2599 [0xfd] = MMX_OP2(paddw
),
2600 [0xfe] = MMX_OP2(paddl
),
2603 static GenOpFunc2
*sse_op_table2
[3 * 8][2] = {
2604 [0 + 2] = MMX_OP2(psrlw
),
2605 [0 + 4] = MMX_OP2(psraw
),
2606 [0 + 6] = MMX_OP2(psllw
),
2607 [8 + 2] = MMX_OP2(psrld
),
2608 [8 + 4] = MMX_OP2(psrad
),
2609 [8 + 6] = MMX_OP2(pslld
),
2610 [16 + 2] = MMX_OP2(psrlq
),
2611 [16 + 3] = { NULL
, gen_op_psrldq_xmm
},
2612 [16 + 6] = MMX_OP2(psllq
),
2613 [16 + 7] = { NULL
, gen_op_pslldq_xmm
},
2616 static GenOpFunc1
*sse_op_table3
[4 * 3] = {
2619 X86_64_ONLY(gen_op_cvtsq2ss
),
2620 X86_64_ONLY(gen_op_cvtsq2sd
),
2624 X86_64_ONLY(gen_op_cvttss2sq
),
2625 X86_64_ONLY(gen_op_cvttsd2sq
),
2629 X86_64_ONLY(gen_op_cvtss2sq
),
2630 X86_64_ONLY(gen_op_cvtsd2sq
),
2633 static GenOpFunc2
*sse_op_table4
[8][4] = {
2644 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
2646 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
2647 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
2648 GenOpFunc2
*sse_op2
;
2649 GenOpFunc3
*sse_op3
;
2652 if (s
->prefix
& PREFIX_DATA
)
2654 else if (s
->prefix
& PREFIX_REPZ
)
2656 else if (s
->prefix
& PREFIX_REPNZ
)
2660 sse_op2
= sse_op_table1
[b
][b1
];
2663 if (b
<= 0x5f || b
== 0xc6 || b
== 0xc2) {
2673 /* simple MMX/SSE operation */
2674 if (s
->flags
& HF_TS_MASK
) {
2675 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
2678 if (s
->flags
& HF_EM_MASK
) {
2680 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
2683 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
2690 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2691 the static cpu state) */
2696 modrm
= ldub_code(s
->pc
++);
2697 reg
= ((modrm
>> 3) & 7);
2700 mod
= (modrm
>> 6) & 3;
2701 if (sse_op2
== SSE_SPECIAL
) {
2704 case 0x0e7: /* movntq */
2707 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2708 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2710 case 0x1e7: /* movntdq */
2711 case 0x02b: /* movntps */
2712 case 0x12b: /* movntps */
2713 case 0x3f0: /* lddqu */
2716 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2717 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2719 case 0x6e: /* movd mm, ea */
2720 #ifdef TARGET_X86_64
2721 if (s
->dflag
== 2) {
2722 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2723 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2727 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2728 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2731 case 0x16e: /* movd xmm, ea */
2732 #ifdef TARGET_X86_64
2733 if (s
->dflag
== 2) {
2734 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2735 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2739 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2740 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2743 case 0x6f: /* movq mm, ea */
2745 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2746 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2749 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
].mmx
),
2750 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2753 case 0x010: /* movups */
2754 case 0x110: /* movupd */
2755 case 0x028: /* movaps */
2756 case 0x128: /* movapd */
2757 case 0x16f: /* movdqa xmm, ea */
2758 case 0x26f: /* movdqu xmm, ea */
2760 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2761 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2763 rm
= (modrm
& 7) | REX_B(s
);
2764 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
2765 offsetof(CPUX86State
,xmm_regs
[rm
]));
2768 case 0x210: /* movss xmm, ea */
2770 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2771 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
2772 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2774 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2775 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2776 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2778 rm
= (modrm
& 7) | REX_B(s
);
2779 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2780 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2783 case 0x310: /* movsd xmm, ea */
2785 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2786 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2788 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2789 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2791 rm
= (modrm
& 7) | REX_B(s
);
2792 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2793 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2796 case 0x012: /* movlps */
2797 case 0x112: /* movlpd */
2799 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2800 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2803 rm
= (modrm
& 7) | REX_B(s
);
2804 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2805 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2808 case 0x212: /* movsldup */
2810 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2811 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2813 rm
= (modrm
& 7) | REX_B(s
);
2814 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2815 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2816 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2817 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(2)));
2819 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2820 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2821 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2822 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2824 case 0x312: /* movddup */
2826 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2827 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2829 rm
= (modrm
& 7) | REX_B(s
);
2830 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2831 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2833 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2834 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2836 case 0x016: /* movhps */
2837 case 0x116: /* movhpd */
2839 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2840 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2843 rm
= (modrm
& 7) | REX_B(s
);
2844 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2845 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2848 case 0x216: /* movshdup */
2850 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2851 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2853 rm
= (modrm
& 7) | REX_B(s
);
2854 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2855 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
2856 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2857 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
2859 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2860 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2861 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2862 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2864 case 0x7e: /* movd ea, mm */
2865 #ifdef TARGET_X86_64
2866 if (s
->dflag
== 2) {
2867 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2868 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
2872 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2873 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2876 case 0x17e: /* movd ea, xmm */
2877 #ifdef TARGET_X86_64
2878 if (s
->dflag
== 2) {
2879 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2880 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
2884 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2885 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2888 case 0x27e: /* movq xmm, ea */
2890 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2891 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2893 rm
= (modrm
& 7) | REX_B(s
);
2894 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2895 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2897 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2899 case 0x7f: /* movq ea, mm */
2901 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2902 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2905 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
2906 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2909 case 0x011: /* movups */
2910 case 0x111: /* movupd */
2911 case 0x029: /* movaps */
2912 case 0x129: /* movapd */
2913 case 0x17f: /* movdqa ea, xmm */
2914 case 0x27f: /* movdqu ea, xmm */
2916 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2917 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2919 rm
= (modrm
& 7) | REX_B(s
);
2920 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
2921 offsetof(CPUX86State
,xmm_regs
[reg
]));
2924 case 0x211: /* movss ea, xmm */
2926 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2927 gen_op_movl_T0_env(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2928 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
2930 rm
= (modrm
& 7) | REX_B(s
);
2931 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
2932 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2935 case 0x311: /* movsd ea, xmm */
2937 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2938 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2940 rm
= (modrm
& 7) | REX_B(s
);
2941 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2942 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2945 case 0x013: /* movlps */
2946 case 0x113: /* movlpd */
2948 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2949 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2954 case 0x017: /* movhps */
2955 case 0x117: /* movhpd */
2957 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2958 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2963 case 0x71: /* shift mm, im */
2966 case 0x171: /* shift xmm, im */
2969 val
= ldub_code(s
->pc
++);
2971 gen_op_movl_T0_im(val
);
2972 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
2974 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
2975 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
2977 gen_op_movl_T0_im(val
);
2978 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
2980 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
2981 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
2983 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
2987 rm
= (modrm
& 7) | REX_B(s
);
2988 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2991 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2993 sse_op2(op2_offset
, op1_offset
);
2995 case 0x050: /* movmskps */
2996 rm
= (modrm
& 7) | REX_B(s
);
2997 gen_op_movmskps(offsetof(CPUX86State
,xmm_regs
[rm
]));
2998 gen_op_mov_reg_T0(OT_LONG
, reg
);
3000 case 0x150: /* movmskpd */
3001 rm
= (modrm
& 7) | REX_B(s
);
3002 gen_op_movmskpd(offsetof(CPUX86State
,xmm_regs
[rm
]));
3003 gen_op_mov_reg_T0(OT_LONG
, reg
);
3005 case 0x02a: /* cvtpi2ps */
3006 case 0x12a: /* cvtpi2pd */
3009 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3010 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3011 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
3014 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3016 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3019 gen_op_cvtpi2ps(op1_offset
, op2_offset
);
3023 gen_op_cvtpi2pd(op1_offset
, op2_offset
);
3027 case 0x22a: /* cvtsi2ss */
3028 case 0x32a: /* cvtsi2sd */
3029 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3030 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3031 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3032 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)](op1_offset
);
3034 case 0x02c: /* cvttps2pi */
3035 case 0x12c: /* cvttpd2pi */
3036 case 0x02d: /* cvtps2pi */
3037 case 0x12d: /* cvtpd2pi */
3040 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3041 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3042 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
3044 rm
= (modrm
& 7) | REX_B(s
);
3045 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3047 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
3050 gen_op_cvttps2pi(op1_offset
, op2_offset
);
3053 gen_op_cvttpd2pi(op1_offset
, op2_offset
);
3056 gen_op_cvtps2pi(op1_offset
, op2_offset
);
3059 gen_op_cvtpd2pi(op1_offset
, op2_offset
);
3063 case 0x22c: /* cvttss2si */
3064 case 0x32c: /* cvttsd2si */
3065 case 0x22d: /* cvtss2si */
3066 case 0x32d: /* cvtsd2si */
3067 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3069 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3071 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_t0
.XMM_Q(0)));
3073 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3074 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3076 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3078 rm
= (modrm
& 7) | REX_B(s
);
3079 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3081 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
3082 (b
& 1) * 4](op2_offset
);
3083 gen_op_mov_reg_T0(ot
, reg
);
3085 case 0xc4: /* pinsrw */
3088 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3089 val
= ldub_code(s
->pc
++);
3092 gen_op_pinsrw_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]), val
);
3095 gen_op_pinsrw_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
), val
);
3098 case 0xc5: /* pextrw */
3102 val
= ldub_code(s
->pc
++);
3105 rm
= (modrm
& 7) | REX_B(s
);
3106 gen_op_pextrw_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]), val
);
3110 gen_op_pextrw_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
), val
);
3112 reg
= ((modrm
>> 3) & 7) | rex_r
;
3113 gen_op_mov_reg_T0(OT_LONG
, reg
);
3115 case 0x1d6: /* movq ea, xmm */
3117 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3118 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3120 rm
= (modrm
& 7) | REX_B(s
);
3121 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3122 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3123 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3126 case 0x2d6: /* movq2dq */
3129 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3130 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3131 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3133 case 0x3d6: /* movdq2q */
3135 rm
= (modrm
& 7) | REX_B(s
);
3136 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
),
3137 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3139 case 0xd7: /* pmovmskb */
3144 rm
= (modrm
& 7) | REX_B(s
);
3145 gen_op_pmovmskb_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]));
3148 gen_op_pmovmskb_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3150 reg
= ((modrm
>> 3) & 7) | rex_r
;
3151 gen_op_mov_reg_T0(OT_LONG
, reg
);
3157 /* generic MMX or SSE operation */
3160 /* maskmov : we must prepare A0 */
3163 #ifdef TARGET_X86_64
3164 if (s
->aflag
== 2) {
3165 gen_op_movq_A0_reg(R_EDI
);
3169 gen_op_movl_A0_reg(R_EDI
);
3171 gen_op_andl_A0_ffff();
3173 gen_add_A0_ds_seg(s
);
3175 case 0x70: /* pshufx insn */
3176 case 0xc6: /* pshufx insn */
3177 case 0xc2: /* compare insns */
3184 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3186 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3187 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3188 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f && b
!= 0x5b) ||
3190 /* specific case for SSE single instructions */
3193 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3194 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3197 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
3200 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
3203 rm
= (modrm
& 7) | REX_B(s
);
3204 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3207 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3209 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3210 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3211 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
3214 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3218 case 0x70: /* pshufx insn */
3219 case 0xc6: /* pshufx insn */
3220 val
= ldub_code(s
->pc
++);
3221 sse_op3
= (GenOpFunc3
*)sse_op2
;
3222 sse_op3(op1_offset
, op2_offset
, val
);
3226 val
= ldub_code(s
->pc
++);
3229 sse_op2
= sse_op_table4
[val
][b1
];
3230 sse_op2(op1_offset
, op2_offset
);
3233 sse_op2(op1_offset
, op2_offset
);
3236 if (b
== 0x2e || b
== 0x2f) {
3237 s
->cc_op
= CC_OP_EFLAGS
;
3243 /* convert one instruction. s->is_jmp is set if the translation must
3244 be stopped. Return the next pc value */
3245 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
3247 int b
, prefixes
, aflag
, dflag
;
3249 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
3250 target_ulong next_eip
, tval
;
3260 #ifdef TARGET_X86_64
3265 s
->rip_offset
= 0; /* for relative ip address */
3267 b
= ldub_code(s
->pc
);
3269 /* check prefixes */
3270 #ifdef TARGET_X86_64
3274 prefixes
|= PREFIX_REPZ
;
3277 prefixes
|= PREFIX_REPNZ
;
3280 prefixes
|= PREFIX_LOCK
;
3301 prefixes
|= PREFIX_DATA
;
3304 prefixes
|= PREFIX_ADR
;
3308 rex_w
= (b
>> 3) & 1;
3309 rex_r
= (b
& 0x4) << 1;
3310 s
->rex_x
= (b
& 0x2) << 2;
3311 REX_B(s
) = (b
& 0x1) << 3;
3312 x86_64_hregs
= 1; /* select uniform byte register addressing */
3316 /* 0x66 is ignored if rex.w is set */
3319 if (prefixes
& PREFIX_DATA
)
3322 if (!(prefixes
& PREFIX_ADR
))
3329 prefixes
|= PREFIX_REPZ
;
3332 prefixes
|= PREFIX_REPNZ
;
3335 prefixes
|= PREFIX_LOCK
;
3356 prefixes
|= PREFIX_DATA
;
3359 prefixes
|= PREFIX_ADR
;
3362 if (prefixes
& PREFIX_DATA
)
3364 if (prefixes
& PREFIX_ADR
)
3368 s
->prefix
= prefixes
;
3372 /* lock generation */
3373 if (prefixes
& PREFIX_LOCK
)
3376 /* now check op code */
3380 /**************************/
3381 /* extended op code */
3382 b
= ldub_code(s
->pc
++) | 0x100;
3385 /**************************/
3403 ot
= dflag
+ OT_WORD
;
3406 case 0: /* OP Ev, Gv */
3407 modrm
= ldub_code(s
->pc
++);
3408 reg
= ((modrm
>> 3) & 7) | rex_r
;
3409 mod
= (modrm
>> 6) & 3;
3410 rm
= (modrm
& 7) | REX_B(s
);
3412 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3414 } else if (op
== OP_XORL
&& rm
== reg
) {
3416 /* xor reg, reg optimisation */
3418 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3419 gen_op_mov_reg_T0(ot
, reg
);
3420 gen_op_update1_cc();
3425 gen_op_mov_TN_reg(ot
, 1, reg
);
3426 gen_op(s
, op
, ot
, opreg
);
3428 case 1: /* OP Gv, Ev */
3429 modrm
= ldub_code(s
->pc
++);
3430 mod
= (modrm
>> 6) & 3;
3431 reg
= ((modrm
>> 3) & 7) | rex_r
;
3432 rm
= (modrm
& 7) | REX_B(s
);
3434 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3435 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3436 } else if (op
== OP_XORL
&& rm
== reg
) {
3439 gen_op_mov_TN_reg(ot
, 1, rm
);
3441 gen_op(s
, op
, ot
, reg
);
3443 case 2: /* OP A, Iv */
3444 val
= insn_get(s
, ot
);
3445 gen_op_movl_T1_im(val
);
3446 gen_op(s
, op
, ot
, OR_EAX
);
3452 case 0x80: /* GRP1 */
3462 ot
= dflag
+ OT_WORD
;
3464 modrm
= ldub_code(s
->pc
++);
3465 mod
= (modrm
>> 6) & 3;
3466 rm
= (modrm
& 7) | REX_B(s
);
3467 op
= (modrm
>> 3) & 7;
3473 s
->rip_offset
= insn_const_size(ot
);
3474 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3485 val
= insn_get(s
, ot
);
3488 val
= (int8_t)insn_get(s
, OT_BYTE
);
3491 gen_op_movl_T1_im(val
);
3492 gen_op(s
, op
, ot
, opreg
);
3496 /**************************/
3497 /* inc, dec, and other misc arith */
3498 case 0x40 ... 0x47: /* inc Gv */
3499 ot
= dflag
? OT_LONG
: OT_WORD
;
3500 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
3502 case 0x48 ... 0x4f: /* dec Gv */
3503 ot
= dflag
? OT_LONG
: OT_WORD
;
3504 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
3506 case 0xf6: /* GRP3 */
3511 ot
= dflag
+ OT_WORD
;
3513 modrm
= ldub_code(s
->pc
++);
3514 mod
= (modrm
>> 6) & 3;
3515 rm
= (modrm
& 7) | REX_B(s
);
3516 op
= (modrm
>> 3) & 7;
3519 s
->rip_offset
= insn_const_size(ot
);
3520 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3521 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3523 gen_op_mov_TN_reg(ot
, 0, rm
);
3528 val
= insn_get(s
, ot
);
3529 gen_op_movl_T1_im(val
);
3530 gen_op_testl_T0_T1_cc();
3531 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3536 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3538 gen_op_mov_reg_T0(ot
, rm
);
3544 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3546 gen_op_mov_reg_T0(ot
, rm
);
3548 gen_op_update_neg_cc();
3549 s
->cc_op
= CC_OP_SUBB
+ ot
;
3554 gen_op_mulb_AL_T0();
3555 s
->cc_op
= CC_OP_MULB
;
3558 gen_op_mulw_AX_T0();
3559 s
->cc_op
= CC_OP_MULW
;
3563 gen_op_mull_EAX_T0();
3564 s
->cc_op
= CC_OP_MULL
;
3566 #ifdef TARGET_X86_64
3568 gen_op_mulq_EAX_T0();
3569 s
->cc_op
= CC_OP_MULQ
;
3577 gen_op_imulb_AL_T0();
3578 s
->cc_op
= CC_OP_MULB
;
3581 gen_op_imulw_AX_T0();
3582 s
->cc_op
= CC_OP_MULW
;
3586 gen_op_imull_EAX_T0();
3587 s
->cc_op
= CC_OP_MULL
;
3589 #ifdef TARGET_X86_64
3591 gen_op_imulq_EAX_T0();
3592 s
->cc_op
= CC_OP_MULQ
;
3600 gen_jmp_im(pc_start
- s
->cs_base
);
3601 gen_op_divb_AL_T0();
3604 gen_jmp_im(pc_start
- s
->cs_base
);
3605 gen_op_divw_AX_T0();
3609 gen_jmp_im(pc_start
- s
->cs_base
);
3611 /* XXX: this is just a test */
3612 tcg_gen_macro_2(cpu_T
[0], cpu_T
[0], MACRO_TEST
);
3614 tcg_gen_helper_0_1(helper_divl_EAX_T0
, cpu_T
[0]);
3617 #ifdef TARGET_X86_64
3619 gen_jmp_im(pc_start
- s
->cs_base
);
3620 gen_op_divq_EAX_T0();
3628 gen_jmp_im(pc_start
- s
->cs_base
);
3629 gen_op_idivb_AL_T0();
3632 gen_jmp_im(pc_start
- s
->cs_base
);
3633 gen_op_idivw_AX_T0();
3637 gen_jmp_im(pc_start
- s
->cs_base
);
3638 tcg_gen_helper_0_1(helper_idivl_EAX_T0
, cpu_T
[0]);
3640 #ifdef TARGET_X86_64
3642 gen_jmp_im(pc_start
- s
->cs_base
);
3643 gen_op_idivq_EAX_T0();
3653 case 0xfe: /* GRP4 */
3654 case 0xff: /* GRP5 */
3658 ot
= dflag
+ OT_WORD
;
3660 modrm
= ldub_code(s
->pc
++);
3661 mod
= (modrm
>> 6) & 3;
3662 rm
= (modrm
& 7) | REX_B(s
);
3663 op
= (modrm
>> 3) & 7;
3664 if (op
>= 2 && b
== 0xfe) {
3668 if (op
== 2 || op
== 4) {
3669 /* operand size for jumps is 64 bit */
3671 } else if (op
== 3 || op
== 5) {
3672 /* for call calls, the operand is 16 or 32 bit, even
3674 ot
= dflag
? OT_LONG
: OT_WORD
;
3675 } else if (op
== 6) {
3676 /* default push size is 64 bit */
3677 ot
= dflag
? OT_QUAD
: OT_WORD
;
3681 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3682 if (op
>= 2 && op
!= 3 && op
!= 5)
3683 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3685 gen_op_mov_TN_reg(ot
, 0, rm
);
3689 case 0: /* inc Ev */
3694 gen_inc(s
, ot
, opreg
, 1);
3696 case 1: /* dec Ev */
3701 gen_inc(s
, ot
, opreg
, -1);
3703 case 2: /* call Ev */
3704 /* XXX: optimize if memory (no 'and' is necessary) */
3706 gen_op_andl_T0_ffff();
3707 next_eip
= s
->pc
- s
->cs_base
;
3708 gen_movtl_T1_im(next_eip
);
3713 case 3: /* lcall Ev */
3714 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3715 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3716 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
3718 if (s
->pe
&& !s
->vm86
) {
3719 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3720 gen_op_set_cc_op(s
->cc_op
);
3721 gen_jmp_im(pc_start
- s
->cs_base
);
3722 gen_op_lcall_protected_T0_T1(dflag
, s
->pc
- pc_start
);
3724 gen_op_lcall_real_T0_T1(dflag
, s
->pc
- s
->cs_base
);
3728 case 4: /* jmp Ev */
3730 gen_op_andl_T0_ffff();
3734 case 5: /* ljmp Ev */
3735 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3736 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3737 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
3739 if (s
->pe
&& !s
->vm86
) {
3740 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3741 gen_op_set_cc_op(s
->cc_op
);
3742 gen_jmp_im(pc_start
- s
->cs_base
);
3743 gen_op_ljmp_protected_T0_T1(s
->pc
- pc_start
);
3745 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
3746 gen_op_movl_T0_T1();
3751 case 6: /* push Ev */
3759 case 0x84: /* test Ev, Gv */
3764 ot
= dflag
+ OT_WORD
;
3766 modrm
= ldub_code(s
->pc
++);
3767 mod
= (modrm
>> 6) & 3;
3768 rm
= (modrm
& 7) | REX_B(s
);
3769 reg
= ((modrm
>> 3) & 7) | rex_r
;
3771 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3772 gen_op_mov_TN_reg(ot
, 1, reg
);
3773 gen_op_testl_T0_T1_cc();
3774 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3777 case 0xa8: /* test eAX, Iv */
3782 ot
= dflag
+ OT_WORD
;
3783 val
= insn_get(s
, ot
);
3785 gen_op_mov_TN_reg(ot
, 0, OR_EAX
);
3786 gen_op_movl_T1_im(val
);
3787 gen_op_testl_T0_T1_cc();
3788 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3791 case 0x98: /* CWDE/CBW */
3792 #ifdef TARGET_X86_64
3794 gen_op_movslq_RAX_EAX();
3798 gen_op_movswl_EAX_AX();
3800 gen_op_movsbw_AX_AL();
3802 case 0x99: /* CDQ/CWD */
3803 #ifdef TARGET_X86_64
3805 gen_op_movsqo_RDX_RAX();
3809 gen_op_movslq_EDX_EAX();
3811 gen_op_movswl_DX_AX();
3813 case 0x1af: /* imul Gv, Ev */
3814 case 0x69: /* imul Gv, Ev, I */
3816 ot
= dflag
+ OT_WORD
;
3817 modrm
= ldub_code(s
->pc
++);
3818 reg
= ((modrm
>> 3) & 7) | rex_r
;
3820 s
->rip_offset
= insn_const_size(ot
);
3823 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3825 val
= insn_get(s
, ot
);
3826 gen_op_movl_T1_im(val
);
3827 } else if (b
== 0x6b) {
3828 val
= (int8_t)insn_get(s
, OT_BYTE
);
3829 gen_op_movl_T1_im(val
);
3831 gen_op_mov_TN_reg(ot
, 1, reg
);
3834 #ifdef TARGET_X86_64
3835 if (ot
== OT_QUAD
) {
3836 gen_op_imulq_T0_T1();
3839 if (ot
== OT_LONG
) {
3840 gen_op_imull_T0_T1();
3842 gen_op_imulw_T0_T1();
3844 gen_op_mov_reg_T0(ot
, reg
);
3845 s
->cc_op
= CC_OP_MULB
+ ot
;
3848 case 0x1c1: /* xadd Ev, Gv */
3852 ot
= dflag
+ OT_WORD
;
3853 modrm
= ldub_code(s
->pc
++);
3854 reg
= ((modrm
>> 3) & 7) | rex_r
;
3855 mod
= (modrm
>> 6) & 3;
3857 rm
= (modrm
& 7) | REX_B(s
);
3858 gen_op_mov_TN_reg(ot
, 0, reg
);
3859 gen_op_mov_TN_reg(ot
, 1, rm
);
3860 gen_op_addl_T0_T1();
3861 gen_op_mov_reg_T1(ot
, reg
);
3862 gen_op_mov_reg_T0(ot
, rm
);
3864 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3865 gen_op_mov_TN_reg(ot
, 0, reg
);
3866 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3867 gen_op_addl_T0_T1();
3868 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3869 gen_op_mov_reg_T1(ot
, reg
);
3871 gen_op_update2_cc();
3872 s
->cc_op
= CC_OP_ADDB
+ ot
;
3875 case 0x1b1: /* cmpxchg Ev, Gv */
3879 ot
= dflag
+ OT_WORD
;
3880 modrm
= ldub_code(s
->pc
++);
3881 reg
= ((modrm
>> 3) & 7) | rex_r
;
3882 mod
= (modrm
>> 6) & 3;
3883 gen_op_mov_TN_reg(ot
, 1, reg
);
3885 rm
= (modrm
& 7) | REX_B(s
);
3886 gen_op_mov_TN_reg(ot
, 0, rm
);
3887 gen_op_cmpxchg_T0_T1_EAX_cc
[ot
]();
3888 gen_op_mov_reg_T0(ot
, rm
);
3890 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3891 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3892 gen_op_cmpxchg_mem_T0_T1_EAX_cc
[ot
+ s
->mem_index
]();
3894 s
->cc_op
= CC_OP_SUBB
+ ot
;
3896 case 0x1c7: /* cmpxchg8b */
3897 modrm
= ldub_code(s
->pc
++);
3898 mod
= (modrm
>> 6) & 3;
3899 if ((mod
== 3) || ((modrm
& 0x38) != 0x8))
3901 gen_jmp_im(pc_start
- s
->cs_base
);
3902 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3903 gen_op_set_cc_op(s
->cc_op
);
3904 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3906 s
->cc_op
= CC_OP_EFLAGS
;
3909 /**************************/
3911 case 0x50 ... 0x57: /* push */
3912 gen_op_mov_TN_reg(OT_LONG
, 0, (b
& 7) | REX_B(s
));
3915 case 0x58 ... 0x5f: /* pop */
3917 ot
= dflag
? OT_QUAD
: OT_WORD
;
3919 ot
= dflag
+ OT_WORD
;
3922 /* NOTE: order is important for pop %sp */
3924 gen_op_mov_reg_T0(ot
, (b
& 7) | REX_B(s
));
3926 case 0x60: /* pusha */
3931 case 0x61: /* popa */
3936 case 0x68: /* push Iv */
3939 ot
= dflag
? OT_QUAD
: OT_WORD
;
3941 ot
= dflag
+ OT_WORD
;
3944 val
= insn_get(s
, ot
);
3946 val
= (int8_t)insn_get(s
, OT_BYTE
);
3947 gen_op_movl_T0_im(val
);
3950 case 0x8f: /* pop Ev */
3952 ot
= dflag
? OT_QUAD
: OT_WORD
;
3954 ot
= dflag
+ OT_WORD
;
3956 modrm
= ldub_code(s
->pc
++);
3957 mod
= (modrm
>> 6) & 3;
3960 /* NOTE: order is important for pop %sp */
3962 rm
= (modrm
& 7) | REX_B(s
);
3963 gen_op_mov_reg_T0(ot
, rm
);
3965 /* NOTE: order is important too for MMU exceptions */
3966 s
->popl_esp_hack
= 1 << ot
;
3967 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3968 s
->popl_esp_hack
= 0;
3972 case 0xc8: /* enter */
3975 val
= lduw_code(s
->pc
);
3977 level
= ldub_code(s
->pc
++);
3978 gen_enter(s
, val
, level
);
3981 case 0xc9: /* leave */
3982 /* XXX: exception not precise (ESP is updated before potential exception) */
3984 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EBP
);
3985 gen_op_mov_reg_T0(OT_QUAD
, R_ESP
);
3986 } else if (s
->ss32
) {
3987 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
3988 gen_op_mov_reg_T0(OT_LONG
, R_ESP
);
3990 gen_op_mov_TN_reg(OT_WORD
, 0, R_EBP
);
3991 gen_op_mov_reg_T0(OT_WORD
, R_ESP
);
3995 ot
= dflag
? OT_QUAD
: OT_WORD
;
3997 ot
= dflag
+ OT_WORD
;
3999 gen_op_mov_reg_T0(ot
, R_EBP
);
4002 case 0x06: /* push es */
4003 case 0x0e: /* push cs */
4004 case 0x16: /* push ss */
4005 case 0x1e: /* push ds */
4008 gen_op_movl_T0_seg(b
>> 3);
4011 case 0x1a0: /* push fs */
4012 case 0x1a8: /* push gs */
4013 gen_op_movl_T0_seg((b
>> 3) & 7);
4016 case 0x07: /* pop es */
4017 case 0x17: /* pop ss */
4018 case 0x1f: /* pop ds */
4023 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4026 /* if reg == SS, inhibit interrupts/trace. */
4027 /* If several instructions disable interrupts, only the
4029 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4030 gen_op_set_inhibit_irq();
4034 gen_jmp_im(s
->pc
- s
->cs_base
);
4038 case 0x1a1: /* pop fs */
4039 case 0x1a9: /* pop gs */
4041 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
4044 gen_jmp_im(s
->pc
- s
->cs_base
);
4049 /**************************/
4052 case 0x89: /* mov Gv, Ev */
4056 ot
= dflag
+ OT_WORD
;
4057 modrm
= ldub_code(s
->pc
++);
4058 reg
= ((modrm
>> 3) & 7) | rex_r
;
4060 /* generate a generic store */
4061 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
4064 case 0xc7: /* mov Ev, Iv */
4068 ot
= dflag
+ OT_WORD
;
4069 modrm
= ldub_code(s
->pc
++);
4070 mod
= (modrm
>> 6) & 3;
4072 s
->rip_offset
= insn_const_size(ot
);
4073 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4075 val
= insn_get(s
, ot
);
4076 gen_op_movl_T0_im(val
);
4078 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4080 gen_op_mov_reg_T0(ot
, (modrm
& 7) | REX_B(s
));
4083 case 0x8b: /* mov Ev, Gv */
4087 ot
= OT_WORD
+ dflag
;
4088 modrm
= ldub_code(s
->pc
++);
4089 reg
= ((modrm
>> 3) & 7) | rex_r
;
4091 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4092 gen_op_mov_reg_T0(ot
, reg
);
4094 case 0x8e: /* mov seg, Gv */
4095 modrm
= ldub_code(s
->pc
++);
4096 reg
= (modrm
>> 3) & 7;
4097 if (reg
>= 6 || reg
== R_CS
)
4099 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
4100 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4102 /* if reg == SS, inhibit interrupts/trace */
4103 /* If several instructions disable interrupts, only the
4105 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4106 gen_op_set_inhibit_irq();
4110 gen_jmp_im(s
->pc
- s
->cs_base
);
4114 case 0x8c: /* mov Gv, seg */
4115 modrm
= ldub_code(s
->pc
++);
4116 reg
= (modrm
>> 3) & 7;
4117 mod
= (modrm
>> 6) & 3;
4120 gen_op_movl_T0_seg(reg
);
4122 ot
= OT_WORD
+ dflag
;
4125 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4128 case 0x1b6: /* movzbS Gv, Eb */
4129 case 0x1b7: /* movzwS Gv, Eb */
4130 case 0x1be: /* movsbS Gv, Eb */
4131 case 0x1bf: /* movswS Gv, Eb */
4134 /* d_ot is the size of destination */
4135 d_ot
= dflag
+ OT_WORD
;
4136 /* ot is the size of source */
4137 ot
= (b
& 1) + OT_BYTE
;
4138 modrm
= ldub_code(s
->pc
++);
4139 reg
= ((modrm
>> 3) & 7) | rex_r
;
4140 mod
= (modrm
>> 6) & 3;
4141 rm
= (modrm
& 7) | REX_B(s
);
4144 gen_op_mov_TN_reg(ot
, 0, rm
);
4145 switch(ot
| (b
& 8)) {
4147 gen_op_movzbl_T0_T0();
4150 gen_op_movsbl_T0_T0();
4153 gen_op_movzwl_T0_T0();
4157 gen_op_movswl_T0_T0();
4160 gen_op_mov_reg_T0(d_ot
, reg
);
4162 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4164 gen_op_lds_T0_A0(ot
+ s
->mem_index
);
4166 gen_op_ldu_T0_A0(ot
+ s
->mem_index
);
4168 gen_op_mov_reg_T0(d_ot
, reg
);
4173 case 0x8d: /* lea */
4174 ot
= dflag
+ OT_WORD
;
4175 modrm
= ldub_code(s
->pc
++);
4176 mod
= (modrm
>> 6) & 3;
4179 reg
= ((modrm
>> 3) & 7) | rex_r
;
4180 /* we must ensure that no segment is added */
4184 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4186 gen_op_mov_reg_A0(ot
- OT_WORD
, reg
);
4189 case 0xa0: /* mov EAX, Ov */
4191 case 0xa2: /* mov Ov, EAX */
4194 target_ulong offset_addr
;
4199 ot
= dflag
+ OT_WORD
;
4200 #ifdef TARGET_X86_64
4201 if (s
->aflag
== 2) {
4202 offset_addr
= ldq_code(s
->pc
);
4204 gen_op_movq_A0_im(offset_addr
);
4209 offset_addr
= insn_get(s
, OT_LONG
);
4211 offset_addr
= insn_get(s
, OT_WORD
);
4213 gen_op_movl_A0_im(offset_addr
);
4215 gen_add_A0_ds_seg(s
);
4217 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4218 gen_op_mov_reg_T0(ot
, R_EAX
);
4220 gen_op_mov_TN_reg(ot
, 0, R_EAX
);
4221 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4225 case 0xd7: /* xlat */
4226 #ifdef TARGET_X86_64
4227 if (s
->aflag
== 2) {
4228 gen_op_movq_A0_reg(R_EBX
);
4229 gen_op_addq_A0_AL();
4233 gen_op_movl_A0_reg(R_EBX
);
4234 gen_op_addl_A0_AL();
4236 gen_op_andl_A0_ffff();
4238 gen_add_A0_ds_seg(s
);
4239 gen_op_ldu_T0_A0(OT_BYTE
+ s
->mem_index
);
4240 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
4242 case 0xb0 ... 0xb7: /* mov R, Ib */
4243 val
= insn_get(s
, OT_BYTE
);
4244 gen_op_movl_T0_im(val
);
4245 gen_op_mov_reg_T0(OT_BYTE
, (b
& 7) | REX_B(s
));
4247 case 0xb8 ... 0xbf: /* mov R, Iv */
4248 #ifdef TARGET_X86_64
4252 tmp
= ldq_code(s
->pc
);
4254 reg
= (b
& 7) | REX_B(s
);
4255 gen_movtl_T0_im(tmp
);
4256 gen_op_mov_reg_T0(OT_QUAD
, reg
);
4260 ot
= dflag
? OT_LONG
: OT_WORD
;
4261 val
= insn_get(s
, ot
);
4262 reg
= (b
& 7) | REX_B(s
);
4263 gen_op_movl_T0_im(val
);
4264 gen_op_mov_reg_T0(ot
, reg
);
4268 case 0x91 ... 0x97: /* xchg R, EAX */
4269 ot
= dflag
+ OT_WORD
;
4270 reg
= (b
& 7) | REX_B(s
);
4274 case 0x87: /* xchg Ev, Gv */
4278 ot
= dflag
+ OT_WORD
;
4279 modrm
= ldub_code(s
->pc
++);
4280 reg
= ((modrm
>> 3) & 7) | rex_r
;
4281 mod
= (modrm
>> 6) & 3;
4283 rm
= (modrm
& 7) | REX_B(s
);
4285 gen_op_mov_TN_reg(ot
, 0, reg
);
4286 gen_op_mov_TN_reg(ot
, 1, rm
);
4287 gen_op_mov_reg_T0(ot
, rm
);
4288 gen_op_mov_reg_T1(ot
, reg
);
4290 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4291 gen_op_mov_TN_reg(ot
, 0, reg
);
4292 /* for xchg, lock is implicit */
4293 if (!(prefixes
& PREFIX_LOCK
))
4295 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4296 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4297 if (!(prefixes
& PREFIX_LOCK
))
4299 gen_op_mov_reg_T1(ot
, reg
);
4302 case 0xc4: /* les Gv */
4307 case 0xc5: /* lds Gv */
4312 case 0x1b2: /* lss Gv */
4315 case 0x1b4: /* lfs Gv */
4318 case 0x1b5: /* lgs Gv */
4321 ot
= dflag
? OT_LONG
: OT_WORD
;
4322 modrm
= ldub_code(s
->pc
++);
4323 reg
= ((modrm
>> 3) & 7) | rex_r
;
4324 mod
= (modrm
>> 6) & 3;
4327 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4328 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4329 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4330 /* load the segment first to handle exceptions properly */
4331 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4332 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
4333 /* then put the data */
4334 gen_op_mov_reg_T1(ot
, reg
);
4336 gen_jmp_im(s
->pc
- s
->cs_base
);
4341 /************************/
4352 ot
= dflag
+ OT_WORD
;
4354 modrm
= ldub_code(s
->pc
++);
4355 mod
= (modrm
>> 6) & 3;
4356 op
= (modrm
>> 3) & 7;
4362 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4365 opreg
= (modrm
& 7) | REX_B(s
);
4370 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
4373 shift
= ldub_code(s
->pc
++);
4375 gen_shifti(s
, op
, ot
, opreg
, shift
);
4390 case 0x1a4: /* shld imm */
4394 case 0x1a5: /* shld cl */
4398 case 0x1ac: /* shrd imm */
4402 case 0x1ad: /* shrd cl */
4406 ot
= dflag
+ OT_WORD
;
4407 modrm
= ldub_code(s
->pc
++);
4408 mod
= (modrm
>> 6) & 3;
4409 rm
= (modrm
& 7) | REX_B(s
);
4410 reg
= ((modrm
>> 3) & 7) | rex_r
;
4413 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4414 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4416 gen_op_mov_TN_reg(ot
, 0, rm
);
4418 gen_op_mov_TN_reg(ot
, 1, reg
);
4421 val
= ldub_code(s
->pc
++);
4428 gen_op_shiftd_T0_T1_im_cc
[ot
][op
](val
);
4430 gen_op_shiftd_mem_T0_T1_im_cc
[ot
+ s
->mem_index
][op
](val
);
4431 if (op
== 0 && ot
!= OT_WORD
)
4432 s
->cc_op
= CC_OP_SHLB
+ ot
;
4434 s
->cc_op
= CC_OP_SARB
+ ot
;
4437 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4438 gen_op_set_cc_op(s
->cc_op
);
4440 gen_op_shiftd_T0_T1_ECX_cc
[ot
][op
]();
4442 gen_op_shiftd_mem_T0_T1_ECX_cc
[ot
+ s
->mem_index
][op
]();
4443 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
4446 gen_op_mov_reg_T0(ot
, rm
);
4450 /************************/
4453 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
4454 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4455 /* XXX: what to do if illegal op ? */
4456 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
4459 modrm
= ldub_code(s
->pc
++);
4460 mod
= (modrm
>> 6) & 3;
4462 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
4465 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4467 case 0x00 ... 0x07: /* fxxxs */
4468 case 0x10 ... 0x17: /* fixxxl */
4469 case 0x20 ... 0x27: /* fxxxl */
4470 case 0x30 ... 0x37: /* fixxx */
4477 gen_op_flds_FT0_A0();
4480 gen_op_fildl_FT0_A0();
4483 gen_op_fldl_FT0_A0();
4487 gen_op_fild_FT0_A0();
4491 gen_op_fp_arith_ST0_FT0
[op1
]();
4493 /* fcomp needs pop */
4498 case 0x08: /* flds */
4499 case 0x0a: /* fsts */
4500 case 0x0b: /* fstps */
4501 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4502 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4503 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4508 gen_op_flds_ST0_A0();
4511 gen_op_fildl_ST0_A0();
4514 gen_op_fldl_ST0_A0();
4518 gen_op_fild_ST0_A0();
4525 gen_op_fisttl_ST0_A0();
4528 gen_op_fisttll_ST0_A0();
4532 gen_op_fistt_ST0_A0();
4539 gen_op_fsts_ST0_A0();
4542 gen_op_fistl_ST0_A0();
4545 gen_op_fstl_ST0_A0();
4549 gen_op_fist_ST0_A0();
4557 case 0x0c: /* fldenv mem */
4558 gen_op_fldenv_A0(s
->dflag
);
4560 case 0x0d: /* fldcw mem */
4563 case 0x0e: /* fnstenv mem */
4564 gen_op_fnstenv_A0(s
->dflag
);
4566 case 0x0f: /* fnstcw mem */
4569 case 0x1d: /* fldt mem */
4570 gen_op_fldt_ST0_A0();
4572 case 0x1f: /* fstpt mem */
4573 gen_op_fstt_ST0_A0();
4576 case 0x2c: /* frstor mem */
4577 gen_op_frstor_A0(s
->dflag
);
4579 case 0x2e: /* fnsave mem */
4580 gen_op_fnsave_A0(s
->dflag
);
4582 case 0x2f: /* fnstsw mem */
4585 case 0x3c: /* fbld */
4586 gen_op_fbld_ST0_A0();
4588 case 0x3e: /* fbstp */
4589 gen_op_fbst_ST0_A0();
4592 case 0x3d: /* fildll */
4593 gen_op_fildll_ST0_A0();
4595 case 0x3f: /* fistpll */
4596 gen_op_fistll_ST0_A0();
4603 /* register float ops */
4607 case 0x08: /* fld sti */
4609 gen_op_fmov_ST0_STN((opreg
+ 1) & 7);
4611 case 0x09: /* fxchg sti */
4612 case 0x29: /* fxchg4 sti, undocumented op */
4613 case 0x39: /* fxchg7 sti, undocumented op */
4614 gen_op_fxchg_ST0_STN(opreg
);
4616 case 0x0a: /* grp d9/2 */
4619 /* check exceptions (FreeBSD FPU probe) */
4620 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4621 gen_op_set_cc_op(s
->cc_op
);
4622 gen_jmp_im(pc_start
- s
->cs_base
);
4629 case 0x0c: /* grp d9/4 */
4639 gen_op_fcom_ST0_FT0();
4648 case 0x0d: /* grp d9/5 */
4657 gen_op_fldl2t_ST0();
4661 gen_op_fldl2e_ST0();
4669 gen_op_fldlg2_ST0();
4673 gen_op_fldln2_ST0();
4684 case 0x0e: /* grp d9/6 */
4695 case 3: /* fpatan */
4698 case 4: /* fxtract */
4701 case 5: /* fprem1 */
4704 case 6: /* fdecstp */
4708 case 7: /* fincstp */
4713 case 0x0f: /* grp d9/7 */
4718 case 1: /* fyl2xp1 */
4724 case 3: /* fsincos */
4727 case 5: /* fscale */
4730 case 4: /* frndint */
4742 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4743 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4744 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4750 gen_op_fp_arith_STN_ST0
[op1
](opreg
);
4754 gen_op_fmov_FT0_STN(opreg
);
4755 gen_op_fp_arith_ST0_FT0
[op1
]();
4759 case 0x02: /* fcom */
4760 case 0x22: /* fcom2, undocumented op */
4761 gen_op_fmov_FT0_STN(opreg
);
4762 gen_op_fcom_ST0_FT0();
4764 case 0x03: /* fcomp */
4765 case 0x23: /* fcomp3, undocumented op */
4766 case 0x32: /* fcomp5, undocumented op */
4767 gen_op_fmov_FT0_STN(opreg
);
4768 gen_op_fcom_ST0_FT0();
4771 case 0x15: /* da/5 */
4773 case 1: /* fucompp */
4774 gen_op_fmov_FT0_STN(1);
4775 gen_op_fucom_ST0_FT0();
4785 case 0: /* feni (287 only, just do nop here) */
4787 case 1: /* fdisi (287 only, just do nop here) */
4792 case 3: /* fninit */
4795 case 4: /* fsetpm (287 only, just do nop here) */
4801 case 0x1d: /* fucomi */
4802 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4803 gen_op_set_cc_op(s
->cc_op
);
4804 gen_op_fmov_FT0_STN(opreg
);
4805 gen_op_fucomi_ST0_FT0();
4806 s
->cc_op
= CC_OP_EFLAGS
;
4808 case 0x1e: /* fcomi */
4809 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4810 gen_op_set_cc_op(s
->cc_op
);
4811 gen_op_fmov_FT0_STN(opreg
);
4812 gen_op_fcomi_ST0_FT0();
4813 s
->cc_op
= CC_OP_EFLAGS
;
4815 case 0x28: /* ffree sti */
4816 gen_op_ffree_STN(opreg
);
4818 case 0x2a: /* fst sti */
4819 gen_op_fmov_STN_ST0(opreg
);
4821 case 0x2b: /* fstp sti */
4822 case 0x0b: /* fstp1 sti, undocumented op */
4823 case 0x3a: /* fstp8 sti, undocumented op */
4824 case 0x3b: /* fstp9 sti, undocumented op */
4825 gen_op_fmov_STN_ST0(opreg
);
4828 case 0x2c: /* fucom st(i) */
4829 gen_op_fmov_FT0_STN(opreg
);
4830 gen_op_fucom_ST0_FT0();
4832 case 0x2d: /* fucomp st(i) */
4833 gen_op_fmov_FT0_STN(opreg
);
4834 gen_op_fucom_ST0_FT0();
4837 case 0x33: /* de/3 */
4839 case 1: /* fcompp */
4840 gen_op_fmov_FT0_STN(1);
4841 gen_op_fcom_ST0_FT0();
4849 case 0x38: /* ffreep sti, undocumented op */
4850 gen_op_ffree_STN(opreg
);
4853 case 0x3c: /* df/4 */
4856 gen_op_fnstsw_EAX();
4862 case 0x3d: /* fucomip */
4863 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4864 gen_op_set_cc_op(s
->cc_op
);
4865 gen_op_fmov_FT0_STN(opreg
);
4866 gen_op_fucomi_ST0_FT0();
4868 s
->cc_op
= CC_OP_EFLAGS
;
4870 case 0x3e: /* fcomip */
4871 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4872 gen_op_set_cc_op(s
->cc_op
);
4873 gen_op_fmov_FT0_STN(opreg
);
4874 gen_op_fcomi_ST0_FT0();
4876 s
->cc_op
= CC_OP_EFLAGS
;
4878 case 0x10 ... 0x13: /* fcmovxx */
4882 const static uint8_t fcmov_cc
[8] = {
4888 op1
= fcmov_cc
[op
& 3] | ((op
>> 3) & 1);
4890 gen_op_fcmov_ST0_STN_T0(opreg
);
4898 /************************/
4901 case 0xa4: /* movsS */
4906 ot
= dflag
+ OT_WORD
;
4908 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4909 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4915 case 0xaa: /* stosS */
4920 ot
= dflag
+ OT_WORD
;
4922 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4923 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4928 case 0xac: /* lodsS */
4933 ot
= dflag
+ OT_WORD
;
4934 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4935 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4940 case 0xae: /* scasS */
4945 ot
= dflag
+ OT_WORD
;
4946 if (prefixes
& PREFIX_REPNZ
) {
4947 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4948 } else if (prefixes
& PREFIX_REPZ
) {
4949 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4952 s
->cc_op
= CC_OP_SUBB
+ ot
;
4956 case 0xa6: /* cmpsS */
4961 ot
= dflag
+ OT_WORD
;
4962 if (prefixes
& PREFIX_REPNZ
) {
4963 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4964 } else if (prefixes
& PREFIX_REPZ
) {
4965 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4968 s
->cc_op
= CC_OP_SUBB
+ ot
;
4971 case 0x6c: /* insS */
4976 ot
= dflag
? OT_LONG
: OT_WORD
;
4977 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4978 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
4979 gen_op_andl_T0_ffff();
4980 if (gen_svm_check_io(s
, pc_start
,
4981 SVM_IOIO_TYPE_MASK
| (1 << (4+ot
)) |
4982 svm_is_rep(prefixes
) | 4 | (1 << (7+s
->aflag
))))
4984 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4985 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4990 case 0x6e: /* outsS */
4995 ot
= dflag
? OT_LONG
: OT_WORD
;
4996 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4997 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
4998 gen_op_andl_T0_ffff();
4999 if (gen_svm_check_io(s
, pc_start
,
5000 (1 << (4+ot
)) | svm_is_rep(prefixes
) |
5001 4 | (1 << (7+s
->aflag
))))
5003 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5004 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5010 /************************/
5018 ot
= dflag
? OT_LONG
: OT_WORD
;
5019 val
= ldub_code(s
->pc
++);
5020 gen_op_movl_T0_im(val
);
5021 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5022 if (gen_svm_check_io(s
, pc_start
,
5023 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) |
5027 gen_op_mov_reg_T1(ot
, R_EAX
);
5034 ot
= dflag
? OT_LONG
: OT_WORD
;
5035 val
= ldub_code(s
->pc
++);
5036 gen_op_movl_T0_im(val
);
5037 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5038 if (gen_svm_check_io(s
, pc_start
, svm_is_rep(prefixes
) |
5041 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5049 ot
= dflag
? OT_LONG
: OT_WORD
;
5050 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5051 gen_op_andl_T0_ffff();
5052 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5053 if (gen_svm_check_io(s
, pc_start
,
5054 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) |
5058 gen_op_mov_reg_T1(ot
, R_EAX
);
5065 ot
= dflag
? OT_LONG
: OT_WORD
;
5066 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5067 gen_op_andl_T0_ffff();
5068 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5069 if (gen_svm_check_io(s
, pc_start
,
5070 svm_is_rep(prefixes
) | (1 << (4+ot
))))
5072 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5076 /************************/
5078 case 0xc2: /* ret im */
5079 val
= ldsw_code(s
->pc
);
5082 if (CODE64(s
) && s
->dflag
)
5084 gen_stack_update(s
, val
+ (2 << s
->dflag
));
5086 gen_op_andl_T0_ffff();
5090 case 0xc3: /* ret */
5094 gen_op_andl_T0_ffff();
5098 case 0xca: /* lret im */
5099 val
= ldsw_code(s
->pc
);
5102 if (s
->pe
&& !s
->vm86
) {
5103 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5104 gen_op_set_cc_op(s
->cc_op
);
5105 gen_jmp_im(pc_start
- s
->cs_base
);
5106 gen_op_lret_protected(s
->dflag
, val
);
5110 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5112 gen_op_andl_T0_ffff();
5113 /* NOTE: keeping EIP updated is not a problem in case of
5117 gen_op_addl_A0_im(2 << s
->dflag
);
5118 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5119 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
5120 /* add stack offset */
5121 gen_stack_update(s
, val
+ (4 << s
->dflag
));
5125 case 0xcb: /* lret */
5128 case 0xcf: /* iret */
5129 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IRET
))
5133 gen_op_iret_real(s
->dflag
);
5134 s
->cc_op
= CC_OP_EFLAGS
;
5135 } else if (s
->vm86
) {
5137 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5139 gen_op_iret_real(s
->dflag
);
5140 s
->cc_op
= CC_OP_EFLAGS
;
5143 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5144 gen_op_set_cc_op(s
->cc_op
);
5145 gen_jmp_im(pc_start
- s
->cs_base
);
5146 gen_op_iret_protected(s
->dflag
, s
->pc
- s
->cs_base
);
5147 s
->cc_op
= CC_OP_EFLAGS
;
5151 case 0xe8: /* call im */
5154 tval
= (int32_t)insn_get(s
, OT_LONG
);
5156 tval
= (int16_t)insn_get(s
, OT_WORD
);
5157 next_eip
= s
->pc
- s
->cs_base
;
5161 gen_movtl_T0_im(next_eip
);
5166 case 0x9a: /* lcall im */
5168 unsigned int selector
, offset
;
5172 ot
= dflag
? OT_LONG
: OT_WORD
;
5173 offset
= insn_get(s
, ot
);
5174 selector
= insn_get(s
, OT_WORD
);
5176 gen_op_movl_T0_im(selector
);
5177 gen_op_movl_T1_imu(offset
);
5180 case 0xe9: /* jmp im */
5182 tval
= (int32_t)insn_get(s
, OT_LONG
);
5184 tval
= (int16_t)insn_get(s
, OT_WORD
);
5185 tval
+= s
->pc
- s
->cs_base
;
5190 case 0xea: /* ljmp im */
5192 unsigned int selector
, offset
;
5196 ot
= dflag
? OT_LONG
: OT_WORD
;
5197 offset
= insn_get(s
, ot
);
5198 selector
= insn_get(s
, OT_WORD
);
5200 gen_op_movl_T0_im(selector
);
5201 gen_op_movl_T1_imu(offset
);
5204 case 0xeb: /* jmp Jb */
5205 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5206 tval
+= s
->pc
- s
->cs_base
;
5211 case 0x70 ... 0x7f: /* jcc Jb */
5212 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5214 case 0x180 ... 0x18f: /* jcc Jv */
5216 tval
= (int32_t)insn_get(s
, OT_LONG
);
5218 tval
= (int16_t)insn_get(s
, OT_WORD
);
5221 next_eip
= s
->pc
- s
->cs_base
;
5225 gen_jcc(s
, b
, tval
, next_eip
);
5228 case 0x190 ... 0x19f: /* setcc Gv */
5229 modrm
= ldub_code(s
->pc
++);
5231 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
5233 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5234 ot
= dflag
+ OT_WORD
;
5235 modrm
= ldub_code(s
->pc
++);
5236 reg
= ((modrm
>> 3) & 7) | rex_r
;
5237 mod
= (modrm
>> 6) & 3;
5240 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5241 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
5243 rm
= (modrm
& 7) | REX_B(s
);
5244 gen_op_mov_TN_reg(ot
, 1, rm
);
5246 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
5249 /************************/
5251 case 0x9c: /* pushf */
5252 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PUSHF
))
5254 if (s
->vm86
&& s
->iopl
!= 3) {
5255 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5257 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5258 gen_op_set_cc_op(s
->cc_op
);
5259 gen_op_movl_T0_eflags();
5263 case 0x9d: /* popf */
5264 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_POPF
))
5266 if (s
->vm86
&& s
->iopl
!= 3) {
5267 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5272 gen_op_movl_eflags_T0_cpl0();
5274 gen_op_movw_eflags_T0_cpl0();
5277 if (s
->cpl
<= s
->iopl
) {
5279 gen_op_movl_eflags_T0_io();
5281 gen_op_movw_eflags_T0_io();
5285 gen_op_movl_eflags_T0();
5287 gen_op_movw_eflags_T0();
5292 s
->cc_op
= CC_OP_EFLAGS
;
5293 /* abort translation because TF flag may change */
5294 gen_jmp_im(s
->pc
- s
->cs_base
);
5298 case 0x9e: /* sahf */
5301 gen_op_mov_TN_reg(OT_BYTE
, 0, R_AH
);
5302 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5303 gen_op_set_cc_op(s
->cc_op
);
5304 gen_op_movb_eflags_T0();
5305 s
->cc_op
= CC_OP_EFLAGS
;
5307 case 0x9f: /* lahf */
5310 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5311 gen_op_set_cc_op(s
->cc_op
);
5312 gen_op_movl_T0_eflags();
5313 gen_op_mov_reg_T0(OT_BYTE
, R_AH
);
5315 case 0xf5: /* cmc */
5316 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5317 gen_op_set_cc_op(s
->cc_op
);
5319 s
->cc_op
= CC_OP_EFLAGS
;
5321 case 0xf8: /* clc */
5322 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5323 gen_op_set_cc_op(s
->cc_op
);
5325 s
->cc_op
= CC_OP_EFLAGS
;
5327 case 0xf9: /* stc */
5328 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5329 gen_op_set_cc_op(s
->cc_op
);
5331 s
->cc_op
= CC_OP_EFLAGS
;
5333 case 0xfc: /* cld */
5336 case 0xfd: /* std */
5340 /************************/
5341 /* bit operations */
5342 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5343 ot
= dflag
+ OT_WORD
;
5344 modrm
= ldub_code(s
->pc
++);
5345 op
= (modrm
>> 3) & 7;
5346 mod
= (modrm
>> 6) & 3;
5347 rm
= (modrm
& 7) | REX_B(s
);
5350 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5351 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5353 gen_op_mov_TN_reg(ot
, 0, rm
);
5356 val
= ldub_code(s
->pc
++);
5357 gen_op_movl_T1_im(val
);
5361 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5362 s
->cc_op
= CC_OP_SARB
+ ot
;
5365 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5367 gen_op_mov_reg_T0(ot
, rm
);
5368 gen_op_update_bt_cc();
5371 case 0x1a3: /* bt Gv, Ev */
5374 case 0x1ab: /* bts */
5377 case 0x1b3: /* btr */
5380 case 0x1bb: /* btc */
5383 ot
= dflag
+ OT_WORD
;
5384 modrm
= ldub_code(s
->pc
++);
5385 reg
= ((modrm
>> 3) & 7) | rex_r
;
5386 mod
= (modrm
>> 6) & 3;
5387 rm
= (modrm
& 7) | REX_B(s
);
5388 gen_op_mov_TN_reg(OT_LONG
, 1, reg
);
5390 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5391 /* specific case: we need to add a displacement */
5392 gen_op_add_bit_A0_T1
[ot
- OT_WORD
]();
5393 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5395 gen_op_mov_TN_reg(ot
, 0, rm
);
5397 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5398 s
->cc_op
= CC_OP_SARB
+ ot
;
5401 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5403 gen_op_mov_reg_T0(ot
, rm
);
5404 gen_op_update_bt_cc();
5407 case 0x1bc: /* bsf */
5408 case 0x1bd: /* bsr */
5409 ot
= dflag
+ OT_WORD
;
5410 modrm
= ldub_code(s
->pc
++);
5411 reg
= ((modrm
>> 3) & 7) | rex_r
;
5412 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5413 /* NOTE: in order to handle the 0 case, we must load the
5414 result. It could be optimized with a generated jump */
5415 gen_op_mov_TN_reg(ot
, 1, reg
);
5416 gen_op_bsx_T0_cc
[ot
- OT_WORD
][b
& 1]();
5417 gen_op_mov_reg_T1(ot
, reg
);
5418 s
->cc_op
= CC_OP_LOGICB
+ ot
;
5420 /************************/
5422 case 0x27: /* daa */
5425 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5426 gen_op_set_cc_op(s
->cc_op
);
5428 s
->cc_op
= CC_OP_EFLAGS
;
5430 case 0x2f: /* das */
5433 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5434 gen_op_set_cc_op(s
->cc_op
);
5436 s
->cc_op
= CC_OP_EFLAGS
;
5438 case 0x37: /* aaa */
5441 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5442 gen_op_set_cc_op(s
->cc_op
);
5444 s
->cc_op
= CC_OP_EFLAGS
;
5446 case 0x3f: /* aas */
5449 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5450 gen_op_set_cc_op(s
->cc_op
);
5452 s
->cc_op
= CC_OP_EFLAGS
;
5454 case 0xd4: /* aam */
5457 val
= ldub_code(s
->pc
++);
5459 gen_exception(s
, EXCP00_DIVZ
, pc_start
- s
->cs_base
);
5462 s
->cc_op
= CC_OP_LOGICB
;
5465 case 0xd5: /* aad */
5468 val
= ldub_code(s
->pc
++);
5470 s
->cc_op
= CC_OP_LOGICB
;
5472 /************************/
5474 case 0x90: /* nop */
5475 /* XXX: xchg + rex handling */
5476 /* XXX: correct lock test for all insn */
5477 if (prefixes
& PREFIX_LOCK
)
5479 if (prefixes
& PREFIX_REPZ
) {
5480 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PAUSE
);
5483 case 0x9b: /* fwait */
5484 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
5485 (HF_MP_MASK
| HF_TS_MASK
)) {
5486 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5488 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5489 gen_op_set_cc_op(s
->cc_op
);
5490 gen_jmp_im(pc_start
- s
->cs_base
);
5494 case 0xcc: /* int3 */
5495 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5497 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5499 case 0xcd: /* int N */
5500 val
= ldub_code(s
->pc
++);
5501 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5503 if (s
->vm86
&& s
->iopl
!= 3) {
5504 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5506 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5509 case 0xce: /* into */
5512 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5514 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5515 gen_op_set_cc_op(s
->cc_op
);
5516 gen_jmp_im(pc_start
- s
->cs_base
);
5517 gen_op_into(s
->pc
- pc_start
);
5519 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5520 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_ICEBP
))
5523 gen_debug(s
, pc_start
- s
->cs_base
);
5526 tb_flush(cpu_single_env
);
5527 cpu_set_log(CPU_LOG_INT
| CPU_LOG_TB_IN_ASM
);
5530 case 0xfa: /* cli */
5532 if (s
->cpl
<= s
->iopl
) {
5535 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5541 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5545 case 0xfb: /* sti */
5547 if (s
->cpl
<= s
->iopl
) {
5550 /* interruptions are enabled only the first insn after sti */
5551 /* If several instructions disable interrupts, only the
5553 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5554 gen_op_set_inhibit_irq();
5555 /* give a chance to handle pending irqs */
5556 gen_jmp_im(s
->pc
- s
->cs_base
);
5559 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5565 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5569 case 0x62: /* bound */
5572 ot
= dflag
? OT_LONG
: OT_WORD
;
5573 modrm
= ldub_code(s
->pc
++);
5574 reg
= (modrm
>> 3) & 7;
5575 mod
= (modrm
>> 6) & 3;
5578 gen_op_mov_TN_reg(ot
, 0, reg
);
5579 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5580 gen_jmp_im(pc_start
- s
->cs_base
);
5586 case 0x1c8 ... 0x1cf: /* bswap reg */
5587 reg
= (b
& 7) | REX_B(s
);
5588 #ifdef TARGET_X86_64
5590 gen_op_mov_TN_reg(OT_QUAD
, 0, reg
);
5591 tcg_gen_bswap_i64(cpu_T
[0], cpu_T
[0]);
5592 gen_op_mov_reg_T0(OT_QUAD
, reg
);
5596 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
5598 tmp0
= tcg_temp_new(TCG_TYPE_I32
);
5599 tcg_gen_trunc_i64_i32(tmp0
, cpu_T
[0]);
5600 tcg_gen_bswap_i32(tmp0
, tmp0
);
5601 tcg_gen_extu_i32_i64(cpu_T
[0], tmp0
);
5602 gen_op_mov_reg_T0(OT_LONG
, reg
);
5606 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
5607 tcg_gen_bswap_i32(cpu_T
[0], cpu_T
[0]);
5608 gen_op_mov_reg_T0(OT_LONG
, reg
);
5612 case 0xd6: /* salc */
5615 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5616 gen_op_set_cc_op(s
->cc_op
);
5619 case 0xe0: /* loopnz */
5620 case 0xe1: /* loopz */
5621 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5622 gen_op_set_cc_op(s
->cc_op
);
5624 case 0xe2: /* loop */
5625 case 0xe3: /* jecxz */
5629 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5630 next_eip
= s
->pc
- s
->cs_base
;
5635 l1
= gen_new_label();
5636 l2
= gen_new_label();
5639 gen_op_jz_ecx
[s
->aflag
](l1
);
5641 gen_op_dec_ECX
[s
->aflag
]();
5644 gen_op_loop
[s
->aflag
][b
](l1
);
5647 gen_jmp_im(next_eip
);
5648 gen_op_jmp_label(l2
);
5655 case 0x130: /* wrmsr */
5656 case 0x132: /* rdmsr */
5658 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5662 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 0);
5665 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 1);
5672 case 0x131: /* rdtsc */
5673 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RDTSC
))
5675 gen_jmp_im(pc_start
- s
->cs_base
);
5678 case 0x133: /* rdpmc */
5679 gen_jmp_im(pc_start
- s
->cs_base
);
5682 case 0x134: /* sysenter */
5686 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5688 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5689 gen_op_set_cc_op(s
->cc_op
);
5690 s
->cc_op
= CC_OP_DYNAMIC
;
5692 gen_jmp_im(pc_start
- s
->cs_base
);
5697 case 0x135: /* sysexit */
5701 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5703 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5704 gen_op_set_cc_op(s
->cc_op
);
5705 s
->cc_op
= CC_OP_DYNAMIC
;
5707 gen_jmp_im(pc_start
- s
->cs_base
);
5712 #ifdef TARGET_X86_64
5713 case 0x105: /* syscall */
5714 /* XXX: is it usable in real mode ? */
5715 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5716 gen_op_set_cc_op(s
->cc_op
);
5717 s
->cc_op
= CC_OP_DYNAMIC
;
5719 gen_jmp_im(pc_start
- s
->cs_base
);
5720 gen_op_syscall(s
->pc
- pc_start
);
5723 case 0x107: /* sysret */
5725 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5727 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5728 gen_op_set_cc_op(s
->cc_op
);
5729 s
->cc_op
= CC_OP_DYNAMIC
;
5731 gen_jmp_im(pc_start
- s
->cs_base
);
5732 gen_op_sysret(s
->dflag
);
5733 /* condition codes are modified only in long mode */
5735 s
->cc_op
= CC_OP_EFLAGS
;
5740 case 0x1a2: /* cpuid */
5741 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CPUID
))
5745 case 0xf4: /* hlt */
5747 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5749 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_HLT
))
5751 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5752 gen_op_set_cc_op(s
->cc_op
);
5753 gen_jmp_im(s
->pc
- s
->cs_base
);
5759 modrm
= ldub_code(s
->pc
++);
5760 mod
= (modrm
>> 6) & 3;
5761 op
= (modrm
>> 3) & 7;
5764 if (!s
->pe
|| s
->vm86
)
5766 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_READ
))
5768 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
5772 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5775 if (!s
->pe
|| s
->vm86
)
5778 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5780 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_WRITE
))
5782 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5783 gen_jmp_im(pc_start
- s
->cs_base
);
5788 if (!s
->pe
|| s
->vm86
)
5790 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_READ
))
5792 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
5796 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5799 if (!s
->pe
|| s
->vm86
)
5802 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5804 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_WRITE
))
5806 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5807 gen_jmp_im(pc_start
- s
->cs_base
);
5813 if (!s
->pe
|| s
->vm86
)
5815 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5816 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5817 gen_op_set_cc_op(s
->cc_op
);
5822 s
->cc_op
= CC_OP_EFLAGS
;
5829 modrm
= ldub_code(s
->pc
++);
5830 mod
= (modrm
>> 6) & 3;
5831 op
= (modrm
>> 3) & 7;
5837 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_GDTR_READ
))
5839 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5840 gen_op_movl_T0_env(offsetof(CPUX86State
, gdt
.limit
));
5841 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5842 gen_add_A0_im(s
, 2);
5843 gen_op_movtl_T0_env(offsetof(CPUX86State
, gdt
.base
));
5845 gen_op_andl_T0_im(0xffffff);
5846 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
5851 case 0: /* monitor */
5852 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
5855 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MONITOR
))
5857 gen_jmp_im(pc_start
- s
->cs_base
);
5858 #ifdef TARGET_X86_64
5859 if (s
->aflag
== 2) {
5860 gen_op_movq_A0_reg(R_EBX
);
5861 gen_op_addq_A0_AL();
5865 gen_op_movl_A0_reg(R_EBX
);
5866 gen_op_addl_A0_AL();
5868 gen_op_andl_A0_ffff();
5870 gen_add_A0_ds_seg(s
);
5874 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
5877 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5878 gen_op_set_cc_op(s
->cc_op
);
5879 s
->cc_op
= CC_OP_DYNAMIC
;
5881 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MWAIT
))
5883 gen_jmp_im(s
->pc
- s
->cs_base
);
5891 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IDTR_READ
))
5893 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5894 gen_op_movl_T0_env(offsetof(CPUX86State
, idt
.limit
));
5895 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5896 gen_add_A0_im(s
, 2);
5897 gen_op_movtl_T0_env(offsetof(CPUX86State
, idt
.base
));
5899 gen_op_andl_T0_im(0xffffff);
5900 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
5908 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMRUN
))
5910 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5911 gen_op_set_cc_op(s
->cc_op
);
5912 gen_jmp_im(s
->pc
- s
->cs_base
);
5914 s
->cc_op
= CC_OP_EFLAGS
;
5917 case 1: /* VMMCALL */
5918 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMMCALL
))
5920 /* FIXME: cause #UD if hflags & SVM */
5923 case 2: /* VMLOAD */
5924 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMLOAD
))
5928 case 3: /* VMSAVE */
5929 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMSAVE
))
5934 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_STGI
))
5939 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CLGI
))
5943 case 6: /* SKINIT */
5944 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SKINIT
))
5948 case 7: /* INVLPGA */
5949 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPGA
))
5956 } else if (s
->cpl
!= 0) {
5957 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5959 if (gen_svm_check_intercept(s
, pc_start
,
5960 op
==2 ? SVM_EXIT_GDTR_WRITE
: SVM_EXIT_IDTR_WRITE
))
5962 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5963 gen_op_ld_T1_A0(OT_WORD
+ s
->mem_index
);
5964 gen_add_A0_im(s
, 2);
5965 gen_op_ld_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
5967 gen_op_andl_T0_im(0xffffff);
5969 gen_op_movtl_env_T0(offsetof(CPUX86State
,gdt
.base
));
5970 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
5972 gen_op_movtl_env_T0(offsetof(CPUX86State
,idt
.base
));
5973 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
5978 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
))
5980 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
5981 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
5985 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5987 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
))
5989 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5991 gen_jmp_im(s
->pc
- s
->cs_base
);
5995 case 7: /* invlpg */
5997 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6000 #ifdef TARGET_X86_64
6001 if (CODE64(s
) && rm
== 0) {
6003 gen_op_movtl_T0_env(offsetof(CPUX86State
,segs
[R_GS
].base
));
6004 gen_op_movtl_T1_env(offsetof(CPUX86State
,kernelgsbase
));
6005 gen_op_movtl_env_T1(offsetof(CPUX86State
,segs
[R_GS
].base
));
6006 gen_op_movtl_env_T0(offsetof(CPUX86State
,kernelgsbase
));
6013 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPG
))
6015 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6017 gen_jmp_im(s
->pc
- s
->cs_base
);
6026 case 0x108: /* invd */
6027 case 0x109: /* wbinvd */
6029 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6031 if (gen_svm_check_intercept(s
, pc_start
, (b
& 2) ? SVM_EXIT_INVD
: SVM_EXIT_WBINVD
))
6036 case 0x63: /* arpl or movslS (x86_64) */
6037 #ifdef TARGET_X86_64
6040 /* d_ot is the size of destination */
6041 d_ot
= dflag
+ OT_WORD
;
6043 modrm
= ldub_code(s
->pc
++);
6044 reg
= ((modrm
>> 3) & 7) | rex_r
;
6045 mod
= (modrm
>> 6) & 3;
6046 rm
= (modrm
& 7) | REX_B(s
);
6049 gen_op_mov_TN_reg(OT_LONG
, 0, rm
);
6051 if (d_ot
== OT_QUAD
)
6052 gen_op_movslq_T0_T0();
6053 gen_op_mov_reg_T0(d_ot
, reg
);
6055 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6056 if (d_ot
== OT_QUAD
) {
6057 gen_op_lds_T0_A0(OT_LONG
+ s
->mem_index
);
6059 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6061 gen_op_mov_reg_T0(d_ot
, reg
);
6066 if (!s
->pe
|| s
->vm86
)
6068 ot
= dflag
? OT_LONG
: OT_WORD
;
6069 modrm
= ldub_code(s
->pc
++);
6070 reg
= (modrm
>> 3) & 7;
6071 mod
= (modrm
>> 6) & 3;
6074 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6075 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
6077 gen_op_mov_TN_reg(ot
, 0, rm
);
6079 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6080 gen_op_set_cc_op(s
->cc_op
);
6082 s
->cc_op
= CC_OP_EFLAGS
;
6084 gen_op_st_T0_A0(ot
+ s
->mem_index
);
6086 gen_op_mov_reg_T0(ot
, rm
);
6088 gen_op_arpl_update();
6091 case 0x102: /* lar */
6092 case 0x103: /* lsl */
6093 if (!s
->pe
|| s
->vm86
)
6095 ot
= dflag
? OT_LONG
: OT_WORD
;
6096 modrm
= ldub_code(s
->pc
++);
6097 reg
= ((modrm
>> 3) & 7) | rex_r
;
6098 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
6099 gen_op_mov_TN_reg(ot
, 1, reg
);
6100 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6101 gen_op_set_cc_op(s
->cc_op
);
6106 s
->cc_op
= CC_OP_EFLAGS
;
6107 gen_op_mov_reg_T1(ot
, reg
);
6110 modrm
= ldub_code(s
->pc
++);
6111 mod
= (modrm
>> 6) & 3;
6112 op
= (modrm
>> 3) & 7;
6114 case 0: /* prefetchnta */
6115 case 1: /* prefetchnt0 */
6116 case 2: /* prefetchnt0 */
6117 case 3: /* prefetchnt0 */
6120 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6121 /* nothing more to do */
6123 default: /* nop (multi byte) */
6124 gen_nop_modrm(s
, modrm
);
6128 case 0x119 ... 0x11f: /* nop (multi byte) */
6129 modrm
= ldub_code(s
->pc
++);
6130 gen_nop_modrm(s
, modrm
);
6132 case 0x120: /* mov reg, crN */
6133 case 0x122: /* mov crN, reg */
6135 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6137 modrm
= ldub_code(s
->pc
++);
6138 if ((modrm
& 0xc0) != 0xc0)
6140 rm
= (modrm
& 7) | REX_B(s
);
6141 reg
= ((modrm
>> 3) & 7) | rex_r
;
6153 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
+ reg
);
6154 gen_op_mov_TN_reg(ot
, 0, rm
);
6155 gen_op_movl_crN_T0(reg
);
6156 gen_jmp_im(s
->pc
- s
->cs_base
);
6159 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
+ reg
);
6160 #if !defined(CONFIG_USER_ONLY)
6162 gen_op_movtl_T0_cr8();
6165 gen_op_movtl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
6166 gen_op_mov_reg_T0(ot
, rm
);
6174 case 0x121: /* mov reg, drN */
6175 case 0x123: /* mov drN, reg */
6177 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6179 modrm
= ldub_code(s
->pc
++);
6180 if ((modrm
& 0xc0) != 0xc0)
6182 rm
= (modrm
& 7) | REX_B(s
);
6183 reg
= ((modrm
>> 3) & 7) | rex_r
;
6188 /* XXX: do it dynamically with CR4.DE bit */
6189 if (reg
== 4 || reg
== 5 || reg
>= 8)
6192 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_DR0
+ reg
);
6193 gen_op_mov_TN_reg(ot
, 0, rm
);
6194 gen_op_movl_drN_T0(reg
);
6195 gen_jmp_im(s
->pc
- s
->cs_base
);
6198 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_DR0
+ reg
);
6199 gen_op_movtl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
6200 gen_op_mov_reg_T0(ot
, rm
);
6204 case 0x106: /* clts */
6206 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6208 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
6210 /* abort block because static cpu state changed */
6211 gen_jmp_im(s
->pc
- s
->cs_base
);
6215 /* MMX/SSE/SSE2/PNI support */
6216 case 0x1c3: /* MOVNTI reg, mem */
6217 if (!(s
->cpuid_features
& CPUID_SSE2
))
6219 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
6220 modrm
= ldub_code(s
->pc
++);
6221 mod
= (modrm
>> 6) & 3;
6224 reg
= ((modrm
>> 3) & 7) | rex_r
;
6225 /* generate a generic store */
6226 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
6229 modrm
= ldub_code(s
->pc
++);
6230 mod
= (modrm
>> 6) & 3;
6231 op
= (modrm
>> 3) & 7;
6233 case 0: /* fxsave */
6234 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6235 (s
->flags
& HF_EM_MASK
))
6237 if (s
->flags
& HF_TS_MASK
) {
6238 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6241 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6242 gen_op_fxsave_A0((s
->dflag
== 2));
6244 case 1: /* fxrstor */
6245 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6246 (s
->flags
& HF_EM_MASK
))
6248 if (s
->flags
& HF_TS_MASK
) {
6249 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6252 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6253 gen_op_fxrstor_A0((s
->dflag
== 2));
6255 case 2: /* ldmxcsr */
6256 case 3: /* stmxcsr */
6257 if (s
->flags
& HF_TS_MASK
) {
6258 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6261 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
6264 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6266 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6267 gen_op_movl_env_T0(offsetof(CPUX86State
, mxcsr
));
6269 gen_op_movl_T0_env(offsetof(CPUX86State
, mxcsr
));
6270 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
6273 case 5: /* lfence */
6274 case 6: /* mfence */
6275 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
6278 case 7: /* sfence / clflush */
6279 if ((modrm
& 0xc7) == 0xc0) {
6281 if (!(s
->cpuid_features
& CPUID_SSE
))
6285 if (!(s
->cpuid_features
& CPUID_CLFLUSH
))
6287 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6294 case 0x10d: /* prefetch */
6295 modrm
= ldub_code(s
->pc
++);
6296 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6297 /* ignore for now */
6299 case 0x1aa: /* rsm */
6300 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RSM
))
6302 if (!(s
->flags
& HF_SMM_MASK
))
6304 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6305 gen_op_set_cc_op(s
->cc_op
);
6306 s
->cc_op
= CC_OP_DYNAMIC
;
6308 gen_jmp_im(s
->pc
- s
->cs_base
);
6312 case 0x110 ... 0x117:
6313 case 0x128 ... 0x12f:
6314 case 0x150 ... 0x177:
6315 case 0x17c ... 0x17f:
6317 case 0x1c4 ... 0x1c6:
6318 case 0x1d0 ... 0x1fe:
6319 gen_sse(s
, b
, pc_start
, rex_r
);
6324 /* lock generation */
6325 if (s
->prefix
& PREFIX_LOCK
)
6329 if (s
->prefix
& PREFIX_LOCK
)
6331 /* XXX: ensure that no lock was generated */
6332 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
6336 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6337 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6339 /* flags read by an operation */
6340 static uint16_t opc_read_flags
[NB_OPS
] = {
6341 [INDEX_op_aas
] = CC_A
,
6342 [INDEX_op_aaa
] = CC_A
,
6343 [INDEX_op_das
] = CC_A
| CC_C
,
6344 [INDEX_op_daa
] = CC_A
| CC_C
,
6346 /* subtle: due to the incl/decl implementation, C is used */
6347 [INDEX_op_update_inc_cc
] = CC_C
,
6349 [INDEX_op_into
] = CC_O
,
6351 [INDEX_op_jb_subb
] = CC_C
,
6352 [INDEX_op_jb_subw
] = CC_C
,
6353 [INDEX_op_jb_subl
] = CC_C
,
6355 [INDEX_op_jz_subb
] = CC_Z
,
6356 [INDEX_op_jz_subw
] = CC_Z
,
6357 [INDEX_op_jz_subl
] = CC_Z
,
6359 [INDEX_op_jbe_subb
] = CC_Z
| CC_C
,
6360 [INDEX_op_jbe_subw
] = CC_Z
| CC_C
,
6361 [INDEX_op_jbe_subl
] = CC_Z
| CC_C
,
6363 [INDEX_op_js_subb
] = CC_S
,
6364 [INDEX_op_js_subw
] = CC_S
,
6365 [INDEX_op_js_subl
] = CC_S
,
6367 [INDEX_op_jl_subb
] = CC_O
| CC_S
,
6368 [INDEX_op_jl_subw
] = CC_O
| CC_S
,
6369 [INDEX_op_jl_subl
] = CC_O
| CC_S
,
6371 [INDEX_op_jle_subb
] = CC_O
| CC_S
| CC_Z
,
6372 [INDEX_op_jle_subw
] = CC_O
| CC_S
| CC_Z
,
6373 [INDEX_op_jle_subl
] = CC_O
| CC_S
| CC_Z
,
6375 [INDEX_op_loopnzw
] = CC_Z
,
6376 [INDEX_op_loopnzl
] = CC_Z
,
6377 [INDEX_op_loopzw
] = CC_Z
,
6378 [INDEX_op_loopzl
] = CC_Z
,
6380 [INDEX_op_seto_T0_cc
] = CC_O
,
6381 [INDEX_op_setb_T0_cc
] = CC_C
,
6382 [INDEX_op_setz_T0_cc
] = CC_Z
,
6383 [INDEX_op_setbe_T0_cc
] = CC_Z
| CC_C
,
6384 [INDEX_op_sets_T0_cc
] = CC_S
,
6385 [INDEX_op_setp_T0_cc
] = CC_P
,
6386 [INDEX_op_setl_T0_cc
] = CC_O
| CC_S
,
6387 [INDEX_op_setle_T0_cc
] = CC_O
| CC_S
| CC_Z
,
6389 [INDEX_op_setb_T0_subb
] = CC_C
,
6390 [INDEX_op_setb_T0_subw
] = CC_C
,
6391 [INDEX_op_setb_T0_subl
] = CC_C
,
6393 [INDEX_op_setz_T0_subb
] = CC_Z
,
6394 [INDEX_op_setz_T0_subw
] = CC_Z
,
6395 [INDEX_op_setz_T0_subl
] = CC_Z
,
6397 [INDEX_op_setbe_T0_subb
] = CC_Z
| CC_C
,
6398 [INDEX_op_setbe_T0_subw
] = CC_Z
| CC_C
,
6399 [INDEX_op_setbe_T0_subl
] = CC_Z
| CC_C
,
6401 [INDEX_op_sets_T0_subb
] = CC_S
,
6402 [INDEX_op_sets_T0_subw
] = CC_S
,
6403 [INDEX_op_sets_T0_subl
] = CC_S
,
6405 [INDEX_op_setl_T0_subb
] = CC_O
| CC_S
,
6406 [INDEX_op_setl_T0_subw
] = CC_O
| CC_S
,
6407 [INDEX_op_setl_T0_subl
] = CC_O
| CC_S
,
6409 [INDEX_op_setle_T0_subb
] = CC_O
| CC_S
| CC_Z
,
6410 [INDEX_op_setle_T0_subw
] = CC_O
| CC_S
| CC_Z
,
6411 [INDEX_op_setle_T0_subl
] = CC_O
| CC_S
| CC_Z
,
6413 [INDEX_op_movl_T0_eflags
] = CC_OSZAPC
,
6414 [INDEX_op_cmc
] = CC_C
,
6415 [INDEX_op_salc
] = CC_C
,
6417 /* needed for correct flag optimisation before string ops */
6418 [INDEX_op_jnz_ecxw
] = CC_OSZAPC
,
6419 [INDEX_op_jnz_ecxl
] = CC_OSZAPC
,
6420 [INDEX_op_jz_ecxw
] = CC_OSZAPC
,
6421 [INDEX_op_jz_ecxl
] = CC_OSZAPC
,
6423 #ifdef TARGET_X86_64
6424 [INDEX_op_jb_subq
] = CC_C
,
6425 [INDEX_op_jz_subq
] = CC_Z
,
6426 [INDEX_op_jbe_subq
] = CC_Z
| CC_C
,
6427 [INDEX_op_js_subq
] = CC_S
,
6428 [INDEX_op_jl_subq
] = CC_O
| CC_S
,
6429 [INDEX_op_jle_subq
] = CC_O
| CC_S
| CC_Z
,
6431 [INDEX_op_loopnzq
] = CC_Z
,
6432 [INDEX_op_loopzq
] = CC_Z
,
6434 [INDEX_op_setb_T0_subq
] = CC_C
,
6435 [INDEX_op_setz_T0_subq
] = CC_Z
,
6436 [INDEX_op_setbe_T0_subq
] = CC_Z
| CC_C
,
6437 [INDEX_op_sets_T0_subq
] = CC_S
,
6438 [INDEX_op_setl_T0_subq
] = CC_O
| CC_S
,
6439 [INDEX_op_setle_T0_subq
] = CC_O
| CC_S
| CC_Z
,
6441 [INDEX_op_jnz_ecxq
] = CC_OSZAPC
,
6442 [INDEX_op_jz_ecxq
] = CC_OSZAPC
,
6445 #define DEF_READF(SUFFIX)\
6446 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6447 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6448 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6449 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6450 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6451 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6452 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6453 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6455 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6456 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6457 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6458 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6459 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6460 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6461 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6462 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6466 #ifndef CONFIG_USER_ONLY
6472 /* flags written by an operation */
6473 static uint16_t opc_write_flags
[NB_OPS
] = {
6474 [INDEX_op_update2_cc
] = CC_OSZAPC
,
6475 [INDEX_op_update1_cc
] = CC_OSZAPC
,
6476 [INDEX_op_cmpl_T0_T1_cc
] = CC_OSZAPC
,
6477 [INDEX_op_update_neg_cc
] = CC_OSZAPC
,
6478 /* subtle: due to the incl/decl implementation, C is used */
6479 [INDEX_op_update_inc_cc
] = CC_OSZAPC
,
6480 [INDEX_op_testl_T0_T1_cc
] = CC_OSZAPC
,
6482 [INDEX_op_mulb_AL_T0
] = CC_OSZAPC
,
6483 [INDEX_op_mulw_AX_T0
] = CC_OSZAPC
,
6484 [INDEX_op_mull_EAX_T0
] = CC_OSZAPC
,
6485 X86_64_DEF([INDEX_op_mulq_EAX_T0
] = CC_OSZAPC
,)
6486 [INDEX_op_imulb_AL_T0
] = CC_OSZAPC
,
6487 [INDEX_op_imulw_AX_T0
] = CC_OSZAPC
,
6488 [INDEX_op_imull_EAX_T0
] = CC_OSZAPC
,
6489 X86_64_DEF([INDEX_op_imulq_EAX_T0
] = CC_OSZAPC
,)
6490 [INDEX_op_imulw_T0_T1
] = CC_OSZAPC
,
6491 [INDEX_op_imull_T0_T1
] = CC_OSZAPC
,
6492 X86_64_DEF([INDEX_op_imulq_T0_T1
] = CC_OSZAPC
,)
6495 [INDEX_op_ucomiss
] = CC_OSZAPC
,
6496 [INDEX_op_ucomisd
] = CC_OSZAPC
,
6497 [INDEX_op_comiss
] = CC_OSZAPC
,
6498 [INDEX_op_comisd
] = CC_OSZAPC
,
6501 [INDEX_op_aam
] = CC_OSZAPC
,
6502 [INDEX_op_aad
] = CC_OSZAPC
,
6503 [INDEX_op_aas
] = CC_OSZAPC
,
6504 [INDEX_op_aaa
] = CC_OSZAPC
,
6505 [INDEX_op_das
] = CC_OSZAPC
,
6506 [INDEX_op_daa
] = CC_OSZAPC
,
6508 [INDEX_op_movb_eflags_T0
] = CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
,
6509 [INDEX_op_movw_eflags_T0
] = CC_OSZAPC
,
6510 [INDEX_op_movl_eflags_T0
] = CC_OSZAPC
,
6511 [INDEX_op_movw_eflags_T0_io
] = CC_OSZAPC
,
6512 [INDEX_op_movl_eflags_T0_io
] = CC_OSZAPC
,
6513 [INDEX_op_movw_eflags_T0_cpl0
] = CC_OSZAPC
,
6514 [INDEX_op_movl_eflags_T0_cpl0
] = CC_OSZAPC
,
6515 [INDEX_op_clc
] = CC_C
,
6516 [INDEX_op_stc
] = CC_C
,
6517 [INDEX_op_cmc
] = CC_C
,
6519 [INDEX_op_btw_T0_T1_cc
] = CC_OSZAPC
,
6520 [INDEX_op_btl_T0_T1_cc
] = CC_OSZAPC
,
6521 X86_64_DEF([INDEX_op_btq_T0_T1_cc
] = CC_OSZAPC
,)
6522 [INDEX_op_btsw_T0_T1_cc
] = CC_OSZAPC
,
6523 [INDEX_op_btsl_T0_T1_cc
] = CC_OSZAPC
,
6524 X86_64_DEF([INDEX_op_btsq_T0_T1_cc
] = CC_OSZAPC
,)
6525 [INDEX_op_btrw_T0_T1_cc
] = CC_OSZAPC
,
6526 [INDEX_op_btrl_T0_T1_cc
] = CC_OSZAPC
,
6527 X86_64_DEF([INDEX_op_btrq_T0_T1_cc
] = CC_OSZAPC
,)
6528 [INDEX_op_btcw_T0_T1_cc
] = CC_OSZAPC
,
6529 [INDEX_op_btcl_T0_T1_cc
] = CC_OSZAPC
,
6530 X86_64_DEF([INDEX_op_btcq_T0_T1_cc
] = CC_OSZAPC
,)
6532 [INDEX_op_bsfw_T0_cc
] = CC_OSZAPC
,
6533 [INDEX_op_bsfl_T0_cc
] = CC_OSZAPC
,
6534 X86_64_DEF([INDEX_op_bsfq_T0_cc
] = CC_OSZAPC
,)
6535 [INDEX_op_bsrw_T0_cc
] = CC_OSZAPC
,
6536 [INDEX_op_bsrl_T0_cc
] = CC_OSZAPC
,
6537 X86_64_DEF([INDEX_op_bsrq_T0_cc
] = CC_OSZAPC
,)
6539 [INDEX_op_cmpxchgb_T0_T1_EAX_cc
] = CC_OSZAPC
,
6540 [INDEX_op_cmpxchgw_T0_T1_EAX_cc
] = CC_OSZAPC
,
6541 [INDEX_op_cmpxchgl_T0_T1_EAX_cc
] = CC_OSZAPC
,
6542 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc
] = CC_OSZAPC
,)
6544 [INDEX_op_cmpxchg8b
] = CC_Z
,
6545 [INDEX_op_lar
] = CC_Z
,
6546 [INDEX_op_lsl
] = CC_Z
,
6547 [INDEX_op_verr
] = CC_Z
,
6548 [INDEX_op_verw
] = CC_Z
,
6549 [INDEX_op_fcomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6550 [INDEX_op_fucomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6552 #define DEF_WRITEF(SUFFIX)\
6553 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6554 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6555 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6556 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6557 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6558 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6559 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6560 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6562 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6563 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6564 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6565 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6566 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6567 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6568 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6569 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6571 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6572 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6573 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6574 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6575 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6576 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6577 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6578 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6580 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6581 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6582 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6583 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6585 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6586 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6587 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6588 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6590 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6591 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6592 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6593 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6595 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6596 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6597 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6598 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6599 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6600 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6602 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6603 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6604 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6605 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6606 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6607 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6609 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6610 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6611 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6612 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6617 #ifndef CONFIG_USER_ONLY
6623 /* simpler form of an operation if no flags need to be generated */
6624 static uint16_t opc_simpler
[NB_OPS
] = {
6625 [INDEX_op_update2_cc
] = INDEX_op_nop
,
6626 [INDEX_op_update1_cc
] = INDEX_op_nop
,
6627 [INDEX_op_update_neg_cc
] = INDEX_op_nop
,
6629 /* broken: CC_OP logic must be rewritten */
6630 [INDEX_op_update_inc_cc
] = INDEX_op_nop
,
6633 [INDEX_op_shlb_T0_T1_cc
] = INDEX_op_shlb_T0_T1
,
6634 [INDEX_op_shlw_T0_T1_cc
] = INDEX_op_shlw_T0_T1
,
6635 [INDEX_op_shll_T0_T1_cc
] = INDEX_op_shll_T0_T1
,
6636 X86_64_DEF([INDEX_op_shlq_T0_T1_cc
] = INDEX_op_shlq_T0_T1
,)
6638 [INDEX_op_shrb_T0_T1_cc
] = INDEX_op_shrb_T0_T1
,
6639 [INDEX_op_shrw_T0_T1_cc
] = INDEX_op_shrw_T0_T1
,
6640 [INDEX_op_shrl_T0_T1_cc
] = INDEX_op_shrl_T0_T1
,
6641 X86_64_DEF([INDEX_op_shrq_T0_T1_cc
] = INDEX_op_shrq_T0_T1
,)
6643 [INDEX_op_sarb_T0_T1_cc
] = INDEX_op_sarb_T0_T1
,
6644 [INDEX_op_sarw_T0_T1_cc
] = INDEX_op_sarw_T0_T1
,
6645 [INDEX_op_sarl_T0_T1_cc
] = INDEX_op_sarl_T0_T1
,
6646 X86_64_DEF([INDEX_op_sarq_T0_T1_cc
] = INDEX_op_sarq_T0_T1
,)
6648 #define DEF_SIMPLER(SUFFIX)\
6649 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6650 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6651 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6652 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6654 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6655 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6656 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6657 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6661 #ifndef CONFIG_USER_ONLY
6662 DEF_SIMPLER(_kernel
)
6667 static void tcg_macro_func(TCGContext
*s
, int macro_id
, const int *dead_args
)
6672 tcg_gen_helper_0_1(helper_divl_EAX_T0
, cpu_T
[0]);
6678 void optimize_flags_init(void)
6681 /* put default values in arrays */
6682 for(i
= 0; i
< NB_OPS
; i
++) {
6683 if (opc_simpler
[i
] == 0)
6687 tcg_set_macro_func(&tcg_ctx
, tcg_macro_func
);
6689 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
6690 #if TARGET_LONG_BITS > HOST_LONG_BITS
6691 cpu_T
[0] = tcg_global_mem_new(TCG_TYPE_TL
,
6692 TCG_AREG0
, offsetof(CPUState
, t0
), "T0");
6693 cpu_T
[1] = tcg_global_mem_new(TCG_TYPE_TL
,
6694 TCG_AREG0
, offsetof(CPUState
, t1
), "T1");
6695 cpu_A0
= tcg_global_mem_new(TCG_TYPE_TL
,
6696 TCG_AREG0
, offsetof(CPUState
, t2
), "A0");
6698 cpu_T
[0] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG1
, "T0");
6699 cpu_T
[1] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG2
, "T1");
6700 cpu_A0
= tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG3
, "A0");
6702 /* the helpers are only registered to print debug info */
6703 TCG_HELPER(helper_divl_EAX_T0
);
6704 TCG_HELPER(helper_idivl_EAX_T0
);
6707 /* CPU flags computation optimization: we move backward thru the
6708 generated code to see which flags are needed. The operation is
6709 modified if suitable */
6710 static void optimize_flags(uint16_t *opc_buf
, int opc_buf_len
)
6713 int live_flags
, write_flags
, op
;
6715 opc_ptr
= opc_buf
+ opc_buf_len
;
6716 /* live_flags contains the flags needed by the next instructions
6717 in the code. At the end of the block, we consider that all the
6719 live_flags
= CC_OSZAPC
;
6720 while (opc_ptr
> opc_buf
) {
6722 /* if none of the flags written by the instruction is used,
6723 then we can try to find a simpler instruction */
6724 write_flags
= opc_write_flags
[op
];
6725 if ((live_flags
& write_flags
) == 0) {
6726 *opc_ptr
= opc_simpler
[op
];
6728 /* compute the live flags before the instruction */
6729 live_flags
&= ~write_flags
;
6730 live_flags
|= opc_read_flags
[op
];
6734 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6735 basic block 'tb'. If search_pc is TRUE, also generate PC
6736 information for each intermediate instruction. */
6737 static inline int gen_intermediate_code_internal(CPUState
*env
,
6738 TranslationBlock
*tb
,
6741 DisasContext dc1
, *dc
= &dc1
;
6742 target_ulong pc_ptr
;
6743 uint16_t *gen_opc_end
;
6746 target_ulong pc_start
;
6747 target_ulong cs_base
;
6749 /* generate intermediate code */
6751 cs_base
= tb
->cs_base
;
6753 cflags
= tb
->cflags
;
6755 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
6756 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
6757 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
6758 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
6760 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
6761 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
6762 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
6763 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
6764 dc
->singlestep_enabled
= env
->singlestep_enabled
;
6765 dc
->cc_op
= CC_OP_DYNAMIC
;
6766 dc
->cs_base
= cs_base
;
6768 dc
->popl_esp_hack
= 0;
6769 /* select memory access functions */
6771 if (flags
& HF_SOFTMMU_MASK
) {
6773 dc
->mem_index
= 2 * 4;
6775 dc
->mem_index
= 1 * 4;
6777 dc
->cpuid_features
= env
->cpuid_features
;
6778 dc
->cpuid_ext_features
= env
->cpuid_ext_features
;
6779 #ifdef TARGET_X86_64
6780 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
6781 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
6784 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
6785 (flags
& HF_INHIBIT_IRQ_MASK
)
6786 #ifndef CONFIG_SOFTMMU
6787 || (flags
& HF_SOFTMMU_MASK
)
6791 /* check addseg logic */
6792 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
6793 printf("ERROR addseg\n");
6796 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
6798 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
6800 dc
->is_jmp
= DISAS_NEXT
;
6805 if (env
->nb_breakpoints
> 0) {
6806 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
6807 if (env
->breakpoints
[j
] == pc_ptr
) {
6808 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
6814 j
= gen_opc_ptr
- gen_opc_buf
;
6818 gen_opc_instr_start
[lj
++] = 0;
6820 gen_opc_pc
[lj
] = pc_ptr
;
6821 gen_opc_cc_op
[lj
] = dc
->cc_op
;
6822 gen_opc_instr_start
[lj
] = 1;
6824 pc_ptr
= disas_insn(dc
, pc_ptr
);
6825 /* stop translation if indicated */
6828 /* if single step mode, we generate only one instruction and
6829 generate an exception */
6830 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6831 the flag and abort the translation to give the irqs a
6832 change to be happen */
6833 if (dc
->tf
|| dc
->singlestep_enabled
||
6834 (flags
& HF_INHIBIT_IRQ_MASK
) ||
6835 (cflags
& CF_SINGLE_INSN
)) {
6836 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6840 /* if too long translation, stop generation too */
6841 if (gen_opc_ptr
>= gen_opc_end
||
6842 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
6843 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6848 *gen_opc_ptr
= INDEX_op_end
;
6849 /* we don't forget to fill the last values */
6851 j
= gen_opc_ptr
- gen_opc_buf
;
6854 gen_opc_instr_start
[lj
++] = 0;
6858 if (loglevel
& CPU_LOG_TB_CPU
) {
6859 cpu_dump_state(env
, logfile
, fprintf
, X86_DUMP_CCOP
);
6861 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
6863 fprintf(logfile
, "----------------\n");
6864 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
6865 #ifdef TARGET_X86_64
6870 disas_flags
= !dc
->code32
;
6871 target_disas(logfile
, pc_start
, pc_ptr
- pc_start
, disas_flags
);
6872 fprintf(logfile
, "\n");
6873 if (loglevel
& CPU_LOG_TB_OP_OPT
) {
6874 fprintf(logfile
, "OP before opt:\n");
6875 tcg_dump_ops(&tcg_ctx
, logfile
);
6876 fprintf(logfile
, "\n");
6881 /* optimize flag computations */
6882 optimize_flags(gen_opc_buf
, gen_opc_ptr
- gen_opc_buf
);
6885 tb
->size
= pc_ptr
- pc_start
;
6889 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
6891 return gen_intermediate_code_internal(env
, tb
, 0);
6894 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
6896 return gen_intermediate_code_internal(env
, tb
, 1);