]> git.proxmox.com Git - mirror_qemu.git/blame - target-i386/translate.c
target-i386: Break CPUID feature definition lines
[mirror_qemu.git] / target-i386 / translate.c
CommitLineData
2c0262af
FB
1/*
2 * i386 translation
5fafdf24 3 *
2c0262af
FB
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
8167ee88 17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
2c0262af
FB
18 */
19#include <stdarg.h>
20#include <stdlib.h>
21#include <stdio.h>
22#include <string.h>
23#include <inttypes.h>
24#include <signal.h>
2c0262af 25
bec93d72 26#include "qemu/host-utils.h"
2c0262af 27#include "cpu.h"
76cad711 28#include "disas/disas.h"
57fec1fe 29#include "tcg-op.h"
2c0262af 30
a7812ae4
PB
31#include "helper.h"
32#define GEN_HELPER 1
33#include "helper.h"
34
2c0262af
FB
35#define PREFIX_REPZ 0x01
36#define PREFIX_REPNZ 0x02
37#define PREFIX_LOCK 0x04
38#define PREFIX_DATA 0x08
39#define PREFIX_ADR 0x10
701ed211 40#define PREFIX_VEX 0x20
2c0262af 41
14ce26e7 42#ifdef TARGET_X86_64
14ce26e7
FB
43#define CODE64(s) ((s)->code64)
44#define REX_X(s) ((s)->rex_x)
45#define REX_B(s) ((s)->rex_b)
14ce26e7 46#else
14ce26e7
FB
47#define CODE64(s) 0
48#define REX_X(s) 0
49#define REX_B(s) 0
50#endif
51
bec93d72
RH
52#ifdef TARGET_X86_64
53# define ctztl ctz64
54# define clztl clz64
55#else
56# define ctztl ctz32
57# define clztl clz32
58#endif
59
57fec1fe
FB
60//#define MACRO_TEST 1
61
57fec1fe 62/* global register indexes */
a7812ae4 63static TCGv_ptr cpu_env;
a3251186 64static TCGv cpu_A0;
988c3eb0 65static TCGv cpu_cc_dst, cpu_cc_src, cpu_cc_src2, cpu_cc_srcT;
a7812ae4 66static TCGv_i32 cpu_cc_op;
cc739bb0 67static TCGv cpu_regs[CPU_NB_REGS];
1e4840bf 68/* local temps */
3b9d3cf1 69static TCGv cpu_T[2];
57fec1fe 70/* local register indexes (only used inside old micro ops) */
a7812ae4
PB
71static TCGv cpu_tmp0, cpu_tmp4;
72static TCGv_ptr cpu_ptr0, cpu_ptr1;
73static TCGv_i32 cpu_tmp2_i32, cpu_tmp3_i32;
74static TCGv_i64 cpu_tmp1_i64;
57fec1fe 75
1a7ff922
PB
76static uint8_t gen_opc_cc_op[OPC_BUF_SIZE];
77
022c62cb 78#include "exec/gen-icount.h"
2e70f6ef 79
57fec1fe
FB
80#ifdef TARGET_X86_64
81static int x86_64_hregs;
ae063a68
FB
82#endif
83
2c0262af
FB
84typedef struct DisasContext {
85 /* current insn context */
86 int override; /* -1 if no override */
87 int prefix;
88 int aflag, dflag;
14ce26e7 89 target_ulong pc; /* pc = eip + cs_base */
2c0262af
FB
90 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
91 static state change (stop translation) */
92 /* current block context */
14ce26e7 93 target_ulong cs_base; /* base of CS segment */
2c0262af
FB
94 int pe; /* protected mode */
95 int code32; /* 32 bit code segment */
14ce26e7
FB
96#ifdef TARGET_X86_64
97 int lma; /* long mode active */
98 int code64; /* 64 bit code segment */
99 int rex_x, rex_b;
100#endif
701ed211
RH
101 int vex_l; /* vex vector length */
102 int vex_v; /* vex vvvv register, without 1's compliment. */
2c0262af 103 int ss32; /* 32 bit stack segment */
fee71888 104 CCOp cc_op; /* current CC operation */
e207582f 105 bool cc_op_dirty;
2c0262af
FB
106 int addseg; /* non zero if either DS/ES/SS have a non zero base */
107 int f_st; /* currently unused */
108 int vm86; /* vm86 mode */
109 int cpl;
110 int iopl;
111 int tf; /* TF cpu flag */
34865134 112 int singlestep_enabled; /* "hardware" single step enabled */
2c0262af
FB
113 int jmp_opt; /* use direct block chaining for direct jumps */
114 int mem_index; /* select memory access functions */
c068688b 115 uint64_t flags; /* all execution flags */
2c0262af
FB
116 struct TranslationBlock *tb;
117 int popl_esp_hack; /* for correct popl with esp base handling */
14ce26e7
FB
118 int rip_offset; /* only used in x86_64, but left for simplicity */
119 int cpuid_features;
3d7374c5 120 int cpuid_ext_features;
e771edab 121 int cpuid_ext2_features;
12e26b75 122 int cpuid_ext3_features;
a9321a4d 123 int cpuid_7_0_ebx_features;
2c0262af
FB
124} DisasContext;
125
126static void gen_eob(DisasContext *s);
14ce26e7
FB
127static void gen_jmp(DisasContext *s, target_ulong eip);
128static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
63633fe6 129static void gen_op(DisasContext *s1, int op, int ot, int d);
2c0262af
FB
130
131/* i386 arith/logic operations */
132enum {
5fafdf24
TS
133 OP_ADDL,
134 OP_ORL,
135 OP_ADCL,
2c0262af 136 OP_SBBL,
5fafdf24
TS
137 OP_ANDL,
138 OP_SUBL,
139 OP_XORL,
2c0262af
FB
140 OP_CMPL,
141};
142
143/* i386 shift ops */
144enum {
5fafdf24
TS
145 OP_ROL,
146 OP_ROR,
147 OP_RCL,
148 OP_RCR,
149 OP_SHL,
150 OP_SHR,
2c0262af
FB
151 OP_SHL1, /* undocumented */
152 OP_SAR = 7,
153};
154
8e1c85e3
FB
155enum {
156 JCC_O,
157 JCC_B,
158 JCC_Z,
159 JCC_BE,
160 JCC_S,
161 JCC_P,
162 JCC_L,
163 JCC_LE,
164};
165
2c0262af
FB
166/* operand size */
167enum {
168 OT_BYTE = 0,
169 OT_WORD,
5fafdf24 170 OT_LONG,
2c0262af
FB
171 OT_QUAD,
172};
173
174enum {
175 /* I386 int registers */
176 OR_EAX, /* MUST be even numbered */
177 OR_ECX,
178 OR_EDX,
179 OR_EBX,
180 OR_ESP,
181 OR_EBP,
182 OR_ESI,
183 OR_EDI,
14ce26e7
FB
184
185 OR_TMP0 = 16, /* temporary operand register */
2c0262af
FB
186 OR_TMP1,
187 OR_A0, /* temporary register used when doing address evaluation */
2c0262af
FB
188};
189
b666265b 190enum {
a3251186
RH
191 USES_CC_DST = 1,
192 USES_CC_SRC = 2,
988c3eb0
RH
193 USES_CC_SRC2 = 4,
194 USES_CC_SRCT = 8,
b666265b
RH
195};
196
197/* Bit set if the global variable is live after setting CC_OP to X. */
198static const uint8_t cc_op_live[CC_OP_NB] = {
988c3eb0 199 [CC_OP_DYNAMIC] = USES_CC_DST | USES_CC_SRC | USES_CC_SRC2,
b666265b
RH
200 [CC_OP_EFLAGS] = USES_CC_SRC,
201 [CC_OP_MULB ... CC_OP_MULQ] = USES_CC_DST | USES_CC_SRC,
202 [CC_OP_ADDB ... CC_OP_ADDQ] = USES_CC_DST | USES_CC_SRC,
988c3eb0 203 [CC_OP_ADCB ... CC_OP_ADCQ] = USES_CC_DST | USES_CC_SRC | USES_CC_SRC2,
a3251186 204 [CC_OP_SUBB ... CC_OP_SUBQ] = USES_CC_DST | USES_CC_SRC | USES_CC_SRCT,
988c3eb0 205 [CC_OP_SBBB ... CC_OP_SBBQ] = USES_CC_DST | USES_CC_SRC | USES_CC_SRC2,
b666265b
RH
206 [CC_OP_LOGICB ... CC_OP_LOGICQ] = USES_CC_DST,
207 [CC_OP_INCB ... CC_OP_INCQ] = USES_CC_DST | USES_CC_SRC,
208 [CC_OP_DECB ... CC_OP_DECQ] = USES_CC_DST | USES_CC_SRC,
209 [CC_OP_SHLB ... CC_OP_SHLQ] = USES_CC_DST | USES_CC_SRC,
210 [CC_OP_SARB ... CC_OP_SARQ] = USES_CC_DST | USES_CC_SRC,
bc4b43dc 211 [CC_OP_BMILGB ... CC_OP_BMILGQ] = USES_CC_DST | USES_CC_SRC,
cd7f97ca
RH
212 [CC_OP_ADCX] = USES_CC_DST | USES_CC_SRC,
213 [CC_OP_ADOX] = USES_CC_SRC | USES_CC_SRC2,
214 [CC_OP_ADCOX] = USES_CC_DST | USES_CC_SRC | USES_CC_SRC2,
436ff2d2 215 [CC_OP_CLR] = 0,
b666265b
RH
216};
217
e207582f 218static void set_cc_op(DisasContext *s, CCOp op)
3ca51d07 219{
b666265b
RH
220 int dead;
221
222 if (s->cc_op == op) {
223 return;
224 }
225
226 /* Discard CC computation that will no longer be used. */
227 dead = cc_op_live[s->cc_op] & ~cc_op_live[op];
228 if (dead & USES_CC_DST) {
229 tcg_gen_discard_tl(cpu_cc_dst);
e207582f 230 }
b666265b
RH
231 if (dead & USES_CC_SRC) {
232 tcg_gen_discard_tl(cpu_cc_src);
233 }
988c3eb0
RH
234 if (dead & USES_CC_SRC2) {
235 tcg_gen_discard_tl(cpu_cc_src2);
236 }
a3251186
RH
237 if (dead & USES_CC_SRCT) {
238 tcg_gen_discard_tl(cpu_cc_srcT);
239 }
b666265b 240
e2f515cf
RH
241 if (op == CC_OP_DYNAMIC) {
242 /* The DYNAMIC setting is translator only, and should never be
243 stored. Thus we always consider it clean. */
244 s->cc_op_dirty = false;
245 } else {
246 /* Discard any computed CC_OP value (see shifts). */
247 if (s->cc_op == CC_OP_DYNAMIC) {
248 tcg_gen_discard_i32(cpu_cc_op);
249 }
250 s->cc_op_dirty = true;
251 }
b666265b 252 s->cc_op = op;
e207582f
RH
253}
254
e207582f
RH
255static void gen_update_cc_op(DisasContext *s)
256{
257 if (s->cc_op_dirty) {
773cdfcc 258 tcg_gen_movi_i32(cpu_cc_op, s->cc_op);
e207582f
RH
259 s->cc_op_dirty = false;
260 }
3ca51d07
RH
261}
262
57fec1fe
FB
263static inline void gen_op_movl_T0_0(void)
264{
265 tcg_gen_movi_tl(cpu_T[0], 0);
266}
267
268static inline void gen_op_movl_T0_im(int32_t val)
269{
270 tcg_gen_movi_tl(cpu_T[0], val);
271}
272
273static inline void gen_op_movl_T0_imu(uint32_t val)
274{
275 tcg_gen_movi_tl(cpu_T[0], val);
276}
277
278static inline void gen_op_movl_T1_im(int32_t val)
279{
280 tcg_gen_movi_tl(cpu_T[1], val);
281}
282
283static inline void gen_op_movl_T1_imu(uint32_t val)
284{
285 tcg_gen_movi_tl(cpu_T[1], val);
286}
287
288static inline void gen_op_movl_A0_im(uint32_t val)
289{
290 tcg_gen_movi_tl(cpu_A0, val);
291}
292
293#ifdef TARGET_X86_64
294static inline void gen_op_movq_A0_im(int64_t val)
295{
296 tcg_gen_movi_tl(cpu_A0, val);
297}
298#endif
299
300static inline void gen_movtl_T0_im(target_ulong val)
301{
302 tcg_gen_movi_tl(cpu_T[0], val);
303}
304
305static inline void gen_movtl_T1_im(target_ulong val)
306{
307 tcg_gen_movi_tl(cpu_T[1], val);
308}
309
310static inline void gen_op_andl_T0_ffff(void)
311{
312 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
313}
314
315static inline void gen_op_andl_T0_im(uint32_t val)
316{
317 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
318}
319
320static inline void gen_op_movl_T0_T1(void)
321{
322 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
323}
324
325static inline void gen_op_andl_A0_ffff(void)
326{
327 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
328}
329
14ce26e7
FB
330#ifdef TARGET_X86_64
331
332#define NB_OP_SIZES 4
333
14ce26e7
FB
334#else /* !TARGET_X86_64 */
335
336#define NB_OP_SIZES 3
337
14ce26e7
FB
338#endif /* !TARGET_X86_64 */
339
e2542fe2 340#if defined(HOST_WORDS_BIGENDIAN)
57fec1fe
FB
341#define REG_B_OFFSET (sizeof(target_ulong) - 1)
342#define REG_H_OFFSET (sizeof(target_ulong) - 2)
343#define REG_W_OFFSET (sizeof(target_ulong) - 2)
344#define REG_L_OFFSET (sizeof(target_ulong) - 4)
345#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
14ce26e7 346#else
57fec1fe
FB
347#define REG_B_OFFSET 0
348#define REG_H_OFFSET 1
349#define REG_W_OFFSET 0
350#define REG_L_OFFSET 0
351#define REG_LH_OFFSET 4
14ce26e7 352#endif
57fec1fe 353
96d7073f
PM
354/* In instruction encodings for byte register accesses the
355 * register number usually indicates "low 8 bits of register N";
356 * however there are some special cases where N 4..7 indicates
357 * [AH, CH, DH, BH], ie "bits 15..8 of register N-4". Return
358 * true for this special case, false otherwise.
359 */
360static inline bool byte_reg_is_xH(int reg)
361{
362 if (reg < 4) {
363 return false;
364 }
365#ifdef TARGET_X86_64
366 if (reg >= 8 || x86_64_hregs) {
367 return false;
368 }
369#endif
370 return true;
371}
372
1e4840bf 373static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
57fec1fe
FB
374{
375 switch(ot) {
376 case OT_BYTE:
96d7073f 377 if (!byte_reg_is_xH(reg)) {
c832e3de 378 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], t0, 0, 8);
57fec1fe 379 } else {
c832e3de 380 tcg_gen_deposit_tl(cpu_regs[reg - 4], cpu_regs[reg - 4], t0, 8, 8);
57fec1fe
FB
381 }
382 break;
383 case OT_WORD:
c832e3de 384 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], t0, 0, 16);
57fec1fe 385 break;
cc739bb0 386 default: /* XXX this shouldn't be reached; abort? */
57fec1fe 387 case OT_LONG:
cc739bb0
LD
388 /* For x86_64, this sets the higher half of register to zero.
389 For i386, this is equivalent to a mov. */
390 tcg_gen_ext32u_tl(cpu_regs[reg], t0);
57fec1fe 391 break;
cc739bb0 392#ifdef TARGET_X86_64
57fec1fe 393 case OT_QUAD:
cc739bb0 394 tcg_gen_mov_tl(cpu_regs[reg], t0);
57fec1fe 395 break;
14ce26e7 396#endif
57fec1fe
FB
397 }
398}
2c0262af 399
57fec1fe
FB
400static inline void gen_op_mov_reg_T0(int ot, int reg)
401{
1e4840bf 402 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
57fec1fe
FB
403}
404
405static inline void gen_op_mov_reg_T1(int ot, int reg)
406{
1e4840bf 407 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
57fec1fe
FB
408}
409
410static inline void gen_op_mov_reg_A0(int size, int reg)
411{
412 switch(size) {
93ab25d7 413 case OT_BYTE:
c832e3de 414 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_A0, 0, 16);
57fec1fe 415 break;
cc739bb0 416 default: /* XXX this shouldn't be reached; abort? */
93ab25d7 417 case OT_WORD:
cc739bb0
LD
418 /* For x86_64, this sets the higher half of register to zero.
419 For i386, this is equivalent to a mov. */
420 tcg_gen_ext32u_tl(cpu_regs[reg], cpu_A0);
57fec1fe 421 break;
cc739bb0 422#ifdef TARGET_X86_64
93ab25d7 423 case OT_LONG:
cc739bb0 424 tcg_gen_mov_tl(cpu_regs[reg], cpu_A0);
57fec1fe 425 break;
14ce26e7 426#endif
57fec1fe
FB
427 }
428}
429
1e4840bf 430static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
57fec1fe 431{
96d7073f
PM
432 if (ot == OT_BYTE && byte_reg_is_xH(reg)) {
433 tcg_gen_shri_tl(t0, cpu_regs[reg - 4], 8);
434 tcg_gen_ext8u_tl(t0, t0);
435 } else {
cc739bb0 436 tcg_gen_mov_tl(t0, cpu_regs[reg]);
57fec1fe
FB
437 }
438}
439
1e4840bf
FB
440static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
441{
442 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
443}
444
57fec1fe
FB
445static inline void gen_op_movl_A0_reg(int reg)
446{
cc739bb0 447 tcg_gen_mov_tl(cpu_A0, cpu_regs[reg]);
57fec1fe
FB
448}
449
450static inline void gen_op_addl_A0_im(int32_t val)
451{
452 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
14ce26e7 453#ifdef TARGET_X86_64
57fec1fe 454 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
14ce26e7 455#endif
57fec1fe 456}
2c0262af 457
14ce26e7 458#ifdef TARGET_X86_64
57fec1fe
FB
459static inline void gen_op_addq_A0_im(int64_t val)
460{
461 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
462}
14ce26e7 463#endif
57fec1fe
FB
464
465static void gen_add_A0_im(DisasContext *s, int val)
466{
467#ifdef TARGET_X86_64
468 if (CODE64(s))
469 gen_op_addq_A0_im(val);
470 else
471#endif
472 gen_op_addl_A0_im(val);
473}
2c0262af 474
57fec1fe 475static inline void gen_op_addl_T0_T1(void)
2c0262af 476{
57fec1fe
FB
477 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
478}
479
480static inline void gen_op_jmp_T0(void)
481{
317ac620 482 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, eip));
57fec1fe
FB
483}
484
6e0d8677 485static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
57fec1fe 486{
6e0d8677 487 switch(size) {
93ab25d7 488 case OT_BYTE:
cc739bb0 489 tcg_gen_addi_tl(cpu_tmp0, cpu_regs[reg], val);
c832e3de 490 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_tmp0, 0, 16);
6e0d8677 491 break;
93ab25d7 492 case OT_WORD:
cc739bb0
LD
493 tcg_gen_addi_tl(cpu_tmp0, cpu_regs[reg], val);
494 /* For x86_64, this sets the higher half of register to zero.
495 For i386, this is equivalent to a nop. */
496 tcg_gen_ext32u_tl(cpu_tmp0, cpu_tmp0);
497 tcg_gen_mov_tl(cpu_regs[reg], cpu_tmp0);
6e0d8677
FB
498 break;
499#ifdef TARGET_X86_64
93ab25d7 500 case OT_LONG:
cc739bb0 501 tcg_gen_addi_tl(cpu_regs[reg], cpu_regs[reg], val);
6e0d8677
FB
502 break;
503#endif
504 }
57fec1fe
FB
505}
506
6e0d8677 507static inline void gen_op_add_reg_T0(int size, int reg)
57fec1fe 508{
6e0d8677 509 switch(size) {
93ab25d7 510 case OT_BYTE:
cc739bb0 511 tcg_gen_add_tl(cpu_tmp0, cpu_regs[reg], cpu_T[0]);
c832e3de 512 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_tmp0, 0, 16);
6e0d8677 513 break;
93ab25d7 514 case OT_WORD:
cc739bb0
LD
515 tcg_gen_add_tl(cpu_tmp0, cpu_regs[reg], cpu_T[0]);
516 /* For x86_64, this sets the higher half of register to zero.
517 For i386, this is equivalent to a nop. */
518 tcg_gen_ext32u_tl(cpu_tmp0, cpu_tmp0);
519 tcg_gen_mov_tl(cpu_regs[reg], cpu_tmp0);
6e0d8677 520 break;
14ce26e7 521#ifdef TARGET_X86_64
93ab25d7 522 case OT_LONG:
cc739bb0 523 tcg_gen_add_tl(cpu_regs[reg], cpu_regs[reg], cpu_T[0]);
6e0d8677 524 break;
14ce26e7 525#endif
6e0d8677
FB
526 }
527}
57fec1fe 528
57fec1fe
FB
529static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
530{
cc739bb0
LD
531 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[reg]);
532 if (shift != 0)
57fec1fe
FB
533 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
534 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
cc739bb0
LD
535 /* For x86_64, this sets the higher half of register to zero.
536 For i386, this is equivalent to a nop. */
537 tcg_gen_ext32u_tl(cpu_A0, cpu_A0);
57fec1fe 538}
2c0262af 539
57fec1fe
FB
540static inline void gen_op_movl_A0_seg(int reg)
541{
317ac620 542 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUX86State, segs[reg].base) + REG_L_OFFSET);
57fec1fe 543}
2c0262af 544
7162ab21 545static inline void gen_op_addl_A0_seg(DisasContext *s, int reg)
57fec1fe 546{
317ac620 547 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State, segs[reg].base));
57fec1fe 548#ifdef TARGET_X86_64
7162ab21
VC
549 if (CODE64(s)) {
550 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
551 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
552 } else {
553 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
554 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
555 }
556#else
557 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
57fec1fe
FB
558#endif
559}
2c0262af 560
14ce26e7 561#ifdef TARGET_X86_64
57fec1fe
FB
562static inline void gen_op_movq_A0_seg(int reg)
563{
317ac620 564 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUX86State, segs[reg].base));
57fec1fe 565}
14ce26e7 566
57fec1fe
FB
567static inline void gen_op_addq_A0_seg(int reg)
568{
317ac620 569 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State, segs[reg].base));
57fec1fe
FB
570 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
571}
572
573static inline void gen_op_movq_A0_reg(int reg)
574{
cc739bb0 575 tcg_gen_mov_tl(cpu_A0, cpu_regs[reg]);
57fec1fe
FB
576}
577
578static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
579{
cc739bb0
LD
580 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[reg]);
581 if (shift != 0)
57fec1fe
FB
582 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
583 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
584}
14ce26e7
FB
585#endif
586
57fec1fe
FB
587static inline void gen_op_lds_T0_A0(int idx)
588{
589 int mem_index = (idx >> 2) - 1;
590 switch(idx & 3) {
93ab25d7 591 case OT_BYTE:
57fec1fe
FB
592 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
593 break;
93ab25d7 594 case OT_WORD:
57fec1fe
FB
595 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
596 break;
597 default:
93ab25d7 598 case OT_LONG:
57fec1fe
FB
599 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
600 break;
601 }
602}
2c0262af 603
1e4840bf 604static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
57fec1fe
FB
605{
606 int mem_index = (idx >> 2) - 1;
607 switch(idx & 3) {
93ab25d7 608 case OT_BYTE:
1e4840bf 609 tcg_gen_qemu_ld8u(t0, a0, mem_index);
57fec1fe 610 break;
93ab25d7 611 case OT_WORD:
1e4840bf 612 tcg_gen_qemu_ld16u(t0, a0, mem_index);
57fec1fe 613 break;
93ab25d7 614 case OT_LONG:
1e4840bf 615 tcg_gen_qemu_ld32u(t0, a0, mem_index);
57fec1fe
FB
616 break;
617 default:
93ab25d7 618 case OT_QUAD:
a7812ae4
PB
619 /* Should never happen on 32-bit targets. */
620#ifdef TARGET_X86_64
1e4840bf 621 tcg_gen_qemu_ld64(t0, a0, mem_index);
a7812ae4 622#endif
57fec1fe
FB
623 break;
624 }
625}
2c0262af 626
1e4840bf
FB
627/* XXX: always use ldu or lds */
628static inline void gen_op_ld_T0_A0(int idx)
629{
630 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
631}
632
57fec1fe
FB
633static inline void gen_op_ldu_T0_A0(int idx)
634{
1e4840bf 635 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
57fec1fe 636}
2c0262af 637
57fec1fe 638static inline void gen_op_ld_T1_A0(int idx)
1e4840bf
FB
639{
640 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
641}
642
643static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
57fec1fe
FB
644{
645 int mem_index = (idx >> 2) - 1;
646 switch(idx & 3) {
93ab25d7 647 case OT_BYTE:
1e4840bf 648 tcg_gen_qemu_st8(t0, a0, mem_index);
57fec1fe 649 break;
93ab25d7 650 case OT_WORD:
1e4840bf 651 tcg_gen_qemu_st16(t0, a0, mem_index);
57fec1fe 652 break;
93ab25d7 653 case OT_LONG:
1e4840bf 654 tcg_gen_qemu_st32(t0, a0, mem_index);
57fec1fe
FB
655 break;
656 default:
93ab25d7 657 case OT_QUAD:
a7812ae4
PB
658 /* Should never happen on 32-bit targets. */
659#ifdef TARGET_X86_64
1e4840bf 660 tcg_gen_qemu_st64(t0, a0, mem_index);
a7812ae4 661#endif
57fec1fe
FB
662 break;
663 }
664}
4f31916f 665
57fec1fe
FB
666static inline void gen_op_st_T0_A0(int idx)
667{
1e4840bf 668 gen_op_st_v(idx, cpu_T[0], cpu_A0);
57fec1fe 669}
4f31916f 670
57fec1fe
FB
671static inline void gen_op_st_T1_A0(int idx)
672{
1e4840bf 673 gen_op_st_v(idx, cpu_T[1], cpu_A0);
57fec1fe 674}
4f31916f 675
14ce26e7
FB
676static inline void gen_jmp_im(target_ulong pc)
677{
57fec1fe 678 tcg_gen_movi_tl(cpu_tmp0, pc);
317ac620 679 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State, eip));
14ce26e7
FB
680}
681
2c0262af
FB
682static inline void gen_string_movl_A0_ESI(DisasContext *s)
683{
684 int override;
685
686 override = s->override;
14ce26e7
FB
687#ifdef TARGET_X86_64
688 if (s->aflag == 2) {
689 if (override >= 0) {
57fec1fe
FB
690 gen_op_movq_A0_seg(override);
691 gen_op_addq_A0_reg_sN(0, R_ESI);
14ce26e7 692 } else {
57fec1fe 693 gen_op_movq_A0_reg(R_ESI);
14ce26e7
FB
694 }
695 } else
696#endif
2c0262af
FB
697 if (s->aflag) {
698 /* 32 bit address */
699 if (s->addseg && override < 0)
700 override = R_DS;
701 if (override >= 0) {
57fec1fe
FB
702 gen_op_movl_A0_seg(override);
703 gen_op_addl_A0_reg_sN(0, R_ESI);
2c0262af 704 } else {
57fec1fe 705 gen_op_movl_A0_reg(R_ESI);
2c0262af
FB
706 }
707 } else {
708 /* 16 address, always override */
709 if (override < 0)
710 override = R_DS;
57fec1fe 711 gen_op_movl_A0_reg(R_ESI);
2c0262af 712 gen_op_andl_A0_ffff();
7162ab21 713 gen_op_addl_A0_seg(s, override);
2c0262af
FB
714 }
715}
716
717static inline void gen_string_movl_A0_EDI(DisasContext *s)
718{
14ce26e7
FB
719#ifdef TARGET_X86_64
720 if (s->aflag == 2) {
57fec1fe 721 gen_op_movq_A0_reg(R_EDI);
14ce26e7
FB
722 } else
723#endif
2c0262af
FB
724 if (s->aflag) {
725 if (s->addseg) {
57fec1fe
FB
726 gen_op_movl_A0_seg(R_ES);
727 gen_op_addl_A0_reg_sN(0, R_EDI);
2c0262af 728 } else {
57fec1fe 729 gen_op_movl_A0_reg(R_EDI);
2c0262af
FB
730 }
731 } else {
57fec1fe 732 gen_op_movl_A0_reg(R_EDI);
2c0262af 733 gen_op_andl_A0_ffff();
7162ab21 734 gen_op_addl_A0_seg(s, R_ES);
2c0262af
FB
735 }
736}
737
6e0d8677
FB
738static inline void gen_op_movl_T0_Dshift(int ot)
739{
317ac620 740 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, df));
6e0d8677 741 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
2c0262af
FB
742};
743
d824df34 744static TCGv gen_ext_tl(TCGv dst, TCGv src, int size, bool sign)
6e0d8677 745{
d824df34 746 switch (size) {
6e0d8677 747 case OT_BYTE:
d824df34
PB
748 if (sign) {
749 tcg_gen_ext8s_tl(dst, src);
750 } else {
751 tcg_gen_ext8u_tl(dst, src);
752 }
753 return dst;
6e0d8677 754 case OT_WORD:
d824df34
PB
755 if (sign) {
756 tcg_gen_ext16s_tl(dst, src);
757 } else {
758 tcg_gen_ext16u_tl(dst, src);
759 }
760 return dst;
761#ifdef TARGET_X86_64
6e0d8677 762 case OT_LONG:
d824df34
PB
763 if (sign) {
764 tcg_gen_ext32s_tl(dst, src);
765 } else {
766 tcg_gen_ext32u_tl(dst, src);
767 }
768 return dst;
769#endif
6e0d8677 770 default:
d824df34 771 return src;
6e0d8677
FB
772 }
773}
3b46e624 774
d824df34
PB
775static void gen_extu(int ot, TCGv reg)
776{
777 gen_ext_tl(reg, reg, ot, false);
778}
779
6e0d8677
FB
780static void gen_exts(int ot, TCGv reg)
781{
d824df34 782 gen_ext_tl(reg, reg, ot, true);
6e0d8677 783}
2c0262af 784
6e0d8677
FB
785static inline void gen_op_jnz_ecx(int size, int label1)
786{
cc739bb0 787 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[R_ECX]);
6e0d8677 788 gen_extu(size + 1, cpu_tmp0);
cb63669a 789 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
6e0d8677
FB
790}
791
792static inline void gen_op_jz_ecx(int size, int label1)
793{
cc739bb0 794 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[R_ECX]);
6e0d8677 795 gen_extu(size + 1, cpu_tmp0);
cb63669a 796 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
6e0d8677 797}
2c0262af 798
a7812ae4
PB
799static void gen_helper_in_func(int ot, TCGv v, TCGv_i32 n)
800{
801 switch (ot) {
93ab25d7
PB
802 case OT_BYTE:
803 gen_helper_inb(v, n);
804 break;
805 case OT_WORD:
806 gen_helper_inw(v, n);
807 break;
808 case OT_LONG:
809 gen_helper_inl(v, n);
810 break;
a7812ae4 811 }
a7812ae4 812}
2c0262af 813
a7812ae4
PB
814static void gen_helper_out_func(int ot, TCGv_i32 v, TCGv_i32 n)
815{
816 switch (ot) {
93ab25d7
PB
817 case OT_BYTE:
818 gen_helper_outb(v, n);
819 break;
820 case OT_WORD:
821 gen_helper_outw(v, n);
822 break;
823 case OT_LONG:
824 gen_helper_outl(v, n);
825 break;
a7812ae4 826 }
a7812ae4 827}
f115e911 828
b8b6a50b
FB
829static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
830 uint32_t svm_flags)
f115e911 831{
b8b6a50b
FB
832 int state_saved;
833 target_ulong next_eip;
834
835 state_saved = 0;
f115e911 836 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
773cdfcc 837 gen_update_cc_op(s);
14ce26e7 838 gen_jmp_im(cur_eip);
b8b6a50b 839 state_saved = 1;
b6abf97d 840 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
a7812ae4 841 switch (ot) {
93ab25d7 842 case OT_BYTE:
4a7443be
BS
843 gen_helper_check_iob(cpu_env, cpu_tmp2_i32);
844 break;
93ab25d7 845 case OT_WORD:
4a7443be
BS
846 gen_helper_check_iow(cpu_env, cpu_tmp2_i32);
847 break;
93ab25d7 848 case OT_LONG:
4a7443be
BS
849 gen_helper_check_iol(cpu_env, cpu_tmp2_i32);
850 break;
a7812ae4 851 }
b8b6a50b 852 }
872929aa 853 if(s->flags & HF_SVMI_MASK) {
b8b6a50b 854 if (!state_saved) {
773cdfcc 855 gen_update_cc_op(s);
b8b6a50b 856 gen_jmp_im(cur_eip);
b8b6a50b
FB
857 }
858 svm_flags |= (1 << (4 + ot));
859 next_eip = s->pc - s->cs_base;
b6abf97d 860 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
052e80d5
BS
861 gen_helper_svm_check_io(cpu_env, cpu_tmp2_i32,
862 tcg_const_i32(svm_flags),
a7812ae4 863 tcg_const_i32(next_eip - cur_eip));
f115e911
FB
864 }
865}
866
2c0262af
FB
867static inline void gen_movs(DisasContext *s, int ot)
868{
869 gen_string_movl_A0_ESI(s);
57fec1fe 870 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 871 gen_string_movl_A0_EDI(s);
57fec1fe 872 gen_op_st_T0_A0(ot + s->mem_index);
6e0d8677
FB
873 gen_op_movl_T0_Dshift(ot);
874 gen_op_add_reg_T0(s->aflag, R_ESI);
875 gen_op_add_reg_T0(s->aflag, R_EDI);
2c0262af
FB
876}
877
b6abf97d
FB
878static void gen_op_update1_cc(void)
879{
b6abf97d
FB
880 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
881}
882
883static void gen_op_update2_cc(void)
884{
885 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
886 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
887}
888
988c3eb0
RH
889static void gen_op_update3_cc(TCGv reg)
890{
891 tcg_gen_mov_tl(cpu_cc_src2, reg);
892 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
893 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
894}
895
b6abf97d
FB
896static inline void gen_op_testl_T0_T1_cc(void)
897{
b6abf97d
FB
898 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
899}
900
901static void gen_op_update_neg_cc(void)
902{
b6abf97d 903 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
a3251186
RH
904 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
905 tcg_gen_movi_tl(cpu_cc_srcT, 0);
b6abf97d
FB
906}
907
d229edce
RH
908/* compute all eflags to cc_src */
909static void gen_compute_eflags(DisasContext *s)
8e1c85e3 910{
988c3eb0 911 TCGv zero, dst, src1, src2;
db9f2597
RH
912 int live, dead;
913
d229edce
RH
914 if (s->cc_op == CC_OP_EFLAGS) {
915 return;
916 }
436ff2d2
RH
917 if (s->cc_op == CC_OP_CLR) {
918 tcg_gen_movi_tl(cpu_cc_src, CC_Z);
919 set_cc_op(s, CC_OP_EFLAGS);
920 return;
921 }
db9f2597
RH
922
923 TCGV_UNUSED(zero);
924 dst = cpu_cc_dst;
925 src1 = cpu_cc_src;
988c3eb0 926 src2 = cpu_cc_src2;
db9f2597
RH
927
928 /* Take care to not read values that are not live. */
929 live = cc_op_live[s->cc_op] & ~USES_CC_SRCT;
988c3eb0 930 dead = live ^ (USES_CC_DST | USES_CC_SRC | USES_CC_SRC2);
db9f2597
RH
931 if (dead) {
932 zero = tcg_const_tl(0);
933 if (dead & USES_CC_DST) {
934 dst = zero;
935 }
936 if (dead & USES_CC_SRC) {
937 src1 = zero;
938 }
988c3eb0
RH
939 if (dead & USES_CC_SRC2) {
940 src2 = zero;
941 }
db9f2597
RH
942 }
943
773cdfcc 944 gen_update_cc_op(s);
988c3eb0 945 gen_helper_cc_compute_all(cpu_cc_src, dst, src1, src2, cpu_cc_op);
d229edce 946 set_cc_op(s, CC_OP_EFLAGS);
db9f2597
RH
947
948 if (dead) {
949 tcg_temp_free(zero);
950 }
8e1c85e3
FB
951}
952
bec93d72
RH
953typedef struct CCPrepare {
954 TCGCond cond;
955 TCGv reg;
956 TCGv reg2;
957 target_ulong imm;
958 target_ulong mask;
959 bool use_reg2;
960 bool no_setcond;
961} CCPrepare;
962
06847f1f 963/* compute eflags.C to reg */
bec93d72 964static CCPrepare gen_prepare_eflags_c(DisasContext *s, TCGv reg)
06847f1f
RH
965{
966 TCGv t0, t1;
bec93d72 967 int size, shift;
06847f1f
RH
968
969 switch (s->cc_op) {
970 case CC_OP_SUBB ... CC_OP_SUBQ:
a3251186 971 /* (DATA_TYPE)CC_SRCT < (DATA_TYPE)CC_SRC */
06847f1f
RH
972 size = s->cc_op - CC_OP_SUBB;
973 t1 = gen_ext_tl(cpu_tmp0, cpu_cc_src, size, false);
974 /* If no temporary was used, be careful not to alias t1 and t0. */
975 t0 = TCGV_EQUAL(t1, cpu_cc_src) ? cpu_tmp0 : reg;
a3251186 976 tcg_gen_mov_tl(t0, cpu_cc_srcT);
06847f1f
RH
977 gen_extu(size, t0);
978 goto add_sub;
979
980 case CC_OP_ADDB ... CC_OP_ADDQ:
981 /* (DATA_TYPE)CC_DST < (DATA_TYPE)CC_SRC */
982 size = s->cc_op - CC_OP_ADDB;
983 t1 = gen_ext_tl(cpu_tmp0, cpu_cc_src, size, false);
984 t0 = gen_ext_tl(reg, cpu_cc_dst, size, false);
985 add_sub:
bec93d72
RH
986 return (CCPrepare) { .cond = TCG_COND_LTU, .reg = t0,
987 .reg2 = t1, .mask = -1, .use_reg2 = true };
06847f1f 988
06847f1f 989 case CC_OP_LOGICB ... CC_OP_LOGICQ:
436ff2d2 990 case CC_OP_CLR:
bec93d72 991 return (CCPrepare) { .cond = TCG_COND_NEVER, .mask = -1 };
06847f1f
RH
992
993 case CC_OP_INCB ... CC_OP_INCQ:
994 case CC_OP_DECB ... CC_OP_DECQ:
bec93d72
RH
995 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
996 .mask = -1, .no_setcond = true };
06847f1f
RH
997
998 case CC_OP_SHLB ... CC_OP_SHLQ:
999 /* (CC_SRC >> (DATA_BITS - 1)) & 1 */
1000 size = s->cc_op - CC_OP_SHLB;
bec93d72
RH
1001 shift = (8 << size) - 1;
1002 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1003 .mask = (target_ulong)1 << shift };
06847f1f
RH
1004
1005 case CC_OP_MULB ... CC_OP_MULQ:
bec93d72
RH
1006 return (CCPrepare) { .cond = TCG_COND_NE,
1007 .reg = cpu_cc_src, .mask = -1 };
06847f1f 1008
bc4b43dc
RH
1009 case CC_OP_BMILGB ... CC_OP_BMILGQ:
1010 size = s->cc_op - CC_OP_BMILGB;
1011 t0 = gen_ext_tl(reg, cpu_cc_src, size, false);
1012 return (CCPrepare) { .cond = TCG_COND_EQ, .reg = t0, .mask = -1 };
1013
cd7f97ca
RH
1014 case CC_OP_ADCX:
1015 case CC_OP_ADCOX:
1016 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_dst,
1017 .mask = -1, .no_setcond = true };
1018
06847f1f
RH
1019 case CC_OP_EFLAGS:
1020 case CC_OP_SARB ... CC_OP_SARQ:
1021 /* CC_SRC & 1 */
bec93d72
RH
1022 return (CCPrepare) { .cond = TCG_COND_NE,
1023 .reg = cpu_cc_src, .mask = CC_C };
06847f1f
RH
1024
1025 default:
1026 /* The need to compute only C from CC_OP_DYNAMIC is important
1027 in efficiently implementing e.g. INC at the start of a TB. */
1028 gen_update_cc_op(s);
988c3eb0
RH
1029 gen_helper_cc_compute_c(reg, cpu_cc_dst, cpu_cc_src,
1030 cpu_cc_src2, cpu_cc_op);
bec93d72
RH
1031 return (CCPrepare) { .cond = TCG_COND_NE, .reg = reg,
1032 .mask = -1, .no_setcond = true };
06847f1f
RH
1033 }
1034}
1035
1608ecca 1036/* compute eflags.P to reg */
bec93d72 1037static CCPrepare gen_prepare_eflags_p(DisasContext *s, TCGv reg)
1608ecca 1038{
d229edce 1039 gen_compute_eflags(s);
bec93d72
RH
1040 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1041 .mask = CC_P };
1608ecca
PB
1042}
1043
1044/* compute eflags.S to reg */
bec93d72 1045static CCPrepare gen_prepare_eflags_s(DisasContext *s, TCGv reg)
1608ecca 1046{
086c4077
RH
1047 switch (s->cc_op) {
1048 case CC_OP_DYNAMIC:
1049 gen_compute_eflags(s);
1050 /* FALLTHRU */
1051 case CC_OP_EFLAGS:
cd7f97ca
RH
1052 case CC_OP_ADCX:
1053 case CC_OP_ADOX:
1054 case CC_OP_ADCOX:
bec93d72
RH
1055 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1056 .mask = CC_S };
436ff2d2
RH
1057 case CC_OP_CLR:
1058 return (CCPrepare) { .cond = TCG_COND_NEVER, .mask = -1 };
086c4077
RH
1059 default:
1060 {
1061 int size = (s->cc_op - CC_OP_ADDB) & 3;
1062 TCGv t0 = gen_ext_tl(reg, cpu_cc_dst, size, true);
bec93d72 1063 return (CCPrepare) { .cond = TCG_COND_LT, .reg = t0, .mask = -1 };
086c4077 1064 }
086c4077 1065 }
1608ecca
PB
1066}
1067
1068/* compute eflags.O to reg */
bec93d72 1069static CCPrepare gen_prepare_eflags_o(DisasContext *s, TCGv reg)
1608ecca 1070{
cd7f97ca
RH
1071 switch (s->cc_op) {
1072 case CC_OP_ADOX:
1073 case CC_OP_ADCOX:
1074 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src2,
1075 .mask = -1, .no_setcond = true };
436ff2d2
RH
1076 case CC_OP_CLR:
1077 return (CCPrepare) { .cond = TCG_COND_NEVER, .mask = -1 };
cd7f97ca
RH
1078 default:
1079 gen_compute_eflags(s);
1080 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1081 .mask = CC_O };
1082 }
1608ecca
PB
1083}
1084
1085/* compute eflags.Z to reg */
bec93d72 1086static CCPrepare gen_prepare_eflags_z(DisasContext *s, TCGv reg)
1608ecca 1087{
086c4077
RH
1088 switch (s->cc_op) {
1089 case CC_OP_DYNAMIC:
1090 gen_compute_eflags(s);
1091 /* FALLTHRU */
1092 case CC_OP_EFLAGS:
cd7f97ca
RH
1093 case CC_OP_ADCX:
1094 case CC_OP_ADOX:
1095 case CC_OP_ADCOX:
bec93d72
RH
1096 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1097 .mask = CC_Z };
436ff2d2
RH
1098 case CC_OP_CLR:
1099 return (CCPrepare) { .cond = TCG_COND_ALWAYS, .mask = -1 };
086c4077
RH
1100 default:
1101 {
1102 int size = (s->cc_op - CC_OP_ADDB) & 3;
1103 TCGv t0 = gen_ext_tl(reg, cpu_cc_dst, size, false);
bec93d72 1104 return (CCPrepare) { .cond = TCG_COND_EQ, .reg = t0, .mask = -1 };
086c4077 1105 }
bec93d72
RH
1106 }
1107}
1108
c365395e
PB
1109/* perform a conditional store into register 'reg' according to jump opcode
1110 value 'b'. In the fast case, T0 is guaranted not to be used. */
276e6b5f 1111static CCPrepare gen_prepare_cc(DisasContext *s, int b, TCGv reg)
8e1c85e3 1112{
c365395e 1113 int inv, jcc_op, size, cond;
276e6b5f 1114 CCPrepare cc;
c365395e
PB
1115 TCGv t0;
1116
1117 inv = b & 1;
8e1c85e3 1118 jcc_op = (b >> 1) & 7;
c365395e
PB
1119
1120 switch (s->cc_op) {
69d1aa31
RH
1121 case CC_OP_SUBB ... CC_OP_SUBQ:
1122 /* We optimize relational operators for the cmp/jcc case. */
c365395e
PB
1123 size = s->cc_op - CC_OP_SUBB;
1124 switch (jcc_op) {
1125 case JCC_BE:
a3251186 1126 tcg_gen_mov_tl(cpu_tmp4, cpu_cc_srcT);
c365395e
PB
1127 gen_extu(size, cpu_tmp4);
1128 t0 = gen_ext_tl(cpu_tmp0, cpu_cc_src, size, false);
276e6b5f
RH
1129 cc = (CCPrepare) { .cond = TCG_COND_LEU, .reg = cpu_tmp4,
1130 .reg2 = t0, .mask = -1, .use_reg2 = true };
c365395e 1131 break;
8e1c85e3 1132
c365395e 1133 case JCC_L:
276e6b5f 1134 cond = TCG_COND_LT;
c365395e
PB
1135 goto fast_jcc_l;
1136 case JCC_LE:
276e6b5f 1137 cond = TCG_COND_LE;
c365395e 1138 fast_jcc_l:
a3251186 1139 tcg_gen_mov_tl(cpu_tmp4, cpu_cc_srcT);
c365395e
PB
1140 gen_exts(size, cpu_tmp4);
1141 t0 = gen_ext_tl(cpu_tmp0, cpu_cc_src, size, true);
276e6b5f
RH
1142 cc = (CCPrepare) { .cond = cond, .reg = cpu_tmp4,
1143 .reg2 = t0, .mask = -1, .use_reg2 = true };
c365395e 1144 break;
8e1c85e3 1145
c365395e 1146 default:
8e1c85e3 1147 goto slow_jcc;
c365395e 1148 }
8e1c85e3 1149 break;
c365395e 1150
8e1c85e3
FB
1151 default:
1152 slow_jcc:
69d1aa31
RH
1153 /* This actually generates good code for JC, JZ and JS. */
1154 switch (jcc_op) {
1155 case JCC_O:
1156 cc = gen_prepare_eflags_o(s, reg);
1157 break;
1158 case JCC_B:
1159 cc = gen_prepare_eflags_c(s, reg);
1160 break;
1161 case JCC_Z:
1162 cc = gen_prepare_eflags_z(s, reg);
1163 break;
1164 case JCC_BE:
1165 gen_compute_eflags(s);
1166 cc = (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1167 .mask = CC_Z | CC_C };
1168 break;
1169 case JCC_S:
1170 cc = gen_prepare_eflags_s(s, reg);
1171 break;
1172 case JCC_P:
1173 cc = gen_prepare_eflags_p(s, reg);
1174 break;
1175 case JCC_L:
1176 gen_compute_eflags(s);
1177 if (TCGV_EQUAL(reg, cpu_cc_src)) {
1178 reg = cpu_tmp0;
1179 }
1180 tcg_gen_shri_tl(reg, cpu_cc_src, 4); /* CC_O -> CC_S */
1181 tcg_gen_xor_tl(reg, reg, cpu_cc_src);
1182 cc = (CCPrepare) { .cond = TCG_COND_NE, .reg = reg,
1183 .mask = CC_S };
1184 break;
1185 default:
1186 case JCC_LE:
1187 gen_compute_eflags(s);
1188 if (TCGV_EQUAL(reg, cpu_cc_src)) {
1189 reg = cpu_tmp0;
1190 }
1191 tcg_gen_shri_tl(reg, cpu_cc_src, 4); /* CC_O -> CC_S */
1192 tcg_gen_xor_tl(reg, reg, cpu_cc_src);
1193 cc = (CCPrepare) { .cond = TCG_COND_NE, .reg = reg,
1194 .mask = CC_S | CC_Z };
1195 break;
1196 }
c365395e 1197 break;
8e1c85e3 1198 }
276e6b5f
RH
1199
1200 if (inv) {
1201 cc.cond = tcg_invert_cond(cc.cond);
1202 }
1203 return cc;
8e1c85e3
FB
1204}
1205
cc8b6f5b
PB
1206static void gen_setcc1(DisasContext *s, int b, TCGv reg)
1207{
1208 CCPrepare cc = gen_prepare_cc(s, b, reg);
1209
1210 if (cc.no_setcond) {
1211 if (cc.cond == TCG_COND_EQ) {
1212 tcg_gen_xori_tl(reg, cc.reg, 1);
1213 } else {
1214 tcg_gen_mov_tl(reg, cc.reg);
1215 }
1216 return;
1217 }
1218
1219 if (cc.cond == TCG_COND_NE && !cc.use_reg2 && cc.imm == 0 &&
1220 cc.mask != 0 && (cc.mask & (cc.mask - 1)) == 0) {
1221 tcg_gen_shri_tl(reg, cc.reg, ctztl(cc.mask));
1222 tcg_gen_andi_tl(reg, reg, 1);
1223 return;
1224 }
1225 if (cc.mask != -1) {
1226 tcg_gen_andi_tl(reg, cc.reg, cc.mask);
1227 cc.reg = reg;
1228 }
1229 if (cc.use_reg2) {
1230 tcg_gen_setcond_tl(cc.cond, reg, cc.reg, cc.reg2);
1231 } else {
1232 tcg_gen_setcondi_tl(cc.cond, reg, cc.reg, cc.imm);
1233 }
1234}
1235
1236static inline void gen_compute_eflags_c(DisasContext *s, TCGv reg)
1237{
1238 gen_setcc1(s, JCC_B << 1, reg);
1239}
276e6b5f 1240
8e1c85e3
FB
1241/* generate a conditional jump to label 'l1' according to jump opcode
1242 value 'b'. In the fast case, T0 is guaranted not to be used. */
dc259201
RH
1243static inline void gen_jcc1_noeob(DisasContext *s, int b, int l1)
1244{
1245 CCPrepare cc = gen_prepare_cc(s, b, cpu_T[0]);
1246
1247 if (cc.mask != -1) {
1248 tcg_gen_andi_tl(cpu_T[0], cc.reg, cc.mask);
1249 cc.reg = cpu_T[0];
1250 }
1251 if (cc.use_reg2) {
1252 tcg_gen_brcond_tl(cc.cond, cc.reg, cc.reg2, l1);
1253 } else {
1254 tcg_gen_brcondi_tl(cc.cond, cc.reg, cc.imm, l1);
1255 }
1256}
1257
1258/* Generate a conditional jump to label 'l1' according to jump opcode
1259 value 'b'. In the fast case, T0 is guaranted not to be used.
1260 A translation block must end soon. */
b27fc131 1261static inline void gen_jcc1(DisasContext *s, int b, int l1)
8e1c85e3 1262{
943131ca 1263 CCPrepare cc = gen_prepare_cc(s, b, cpu_T[0]);
8e1c85e3 1264
dc259201 1265 gen_update_cc_op(s);
943131ca
PB
1266 if (cc.mask != -1) {
1267 tcg_gen_andi_tl(cpu_T[0], cc.reg, cc.mask);
1268 cc.reg = cpu_T[0];
1269 }
dc259201 1270 set_cc_op(s, CC_OP_DYNAMIC);
943131ca
PB
1271 if (cc.use_reg2) {
1272 tcg_gen_brcond_tl(cc.cond, cc.reg, cc.reg2, l1);
1273 } else {
1274 tcg_gen_brcondi_tl(cc.cond, cc.reg, cc.imm, l1);
8e1c85e3
FB
1275 }
1276}
1277
14ce26e7
FB
1278/* XXX: does not work with gdbstub "ice" single step - not a
1279 serious problem */
1280static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
2c0262af 1281{
14ce26e7
FB
1282 int l1, l2;
1283
1284 l1 = gen_new_label();
1285 l2 = gen_new_label();
6e0d8677 1286 gen_op_jnz_ecx(s->aflag, l1);
14ce26e7
FB
1287 gen_set_label(l2);
1288 gen_jmp_tb(s, next_eip, 1);
1289 gen_set_label(l1);
1290 return l2;
2c0262af
FB
1291}
1292
1293static inline void gen_stos(DisasContext *s, int ot)
1294{
57fec1fe 1295 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
2c0262af 1296 gen_string_movl_A0_EDI(s);
57fec1fe 1297 gen_op_st_T0_A0(ot + s->mem_index);
6e0d8677
FB
1298 gen_op_movl_T0_Dshift(ot);
1299 gen_op_add_reg_T0(s->aflag, R_EDI);
2c0262af
FB
1300}
1301
1302static inline void gen_lods(DisasContext *s, int ot)
1303{
1304 gen_string_movl_A0_ESI(s);
57fec1fe
FB
1305 gen_op_ld_T0_A0(ot + s->mem_index);
1306 gen_op_mov_reg_T0(ot, R_EAX);
6e0d8677
FB
1307 gen_op_movl_T0_Dshift(ot);
1308 gen_op_add_reg_T0(s->aflag, R_ESI);
2c0262af
FB
1309}
1310
1311static inline void gen_scas(DisasContext *s, int ot)
1312{
2c0262af 1313 gen_string_movl_A0_EDI(s);
57fec1fe 1314 gen_op_ld_T1_A0(ot + s->mem_index);
63633fe6 1315 gen_op(s, OP_CMPL, ot, R_EAX);
6e0d8677
FB
1316 gen_op_movl_T0_Dshift(ot);
1317 gen_op_add_reg_T0(s->aflag, R_EDI);
2c0262af
FB
1318}
1319
1320static inline void gen_cmps(DisasContext *s, int ot)
1321{
2c0262af 1322 gen_string_movl_A0_EDI(s);
57fec1fe 1323 gen_op_ld_T1_A0(ot + s->mem_index);
63633fe6
RH
1324 gen_string_movl_A0_ESI(s);
1325 gen_op(s, OP_CMPL, ot, OR_TMP0);
6e0d8677
FB
1326 gen_op_movl_T0_Dshift(ot);
1327 gen_op_add_reg_T0(s->aflag, R_ESI);
1328 gen_op_add_reg_T0(s->aflag, R_EDI);
2c0262af
FB
1329}
1330
1331static inline void gen_ins(DisasContext *s, int ot)
1332{
2e70f6ef
PB
1333 if (use_icount)
1334 gen_io_start();
2c0262af 1335 gen_string_movl_A0_EDI(s);
6e0d8677
FB
1336 /* Note: we must do this dummy write first to be restartable in
1337 case of page fault. */
9772c73b 1338 gen_op_movl_T0_0();
57fec1fe 1339 gen_op_st_T0_A0(ot + s->mem_index);
b8b6a50b 1340 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
b6abf97d
FB
1341 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1342 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
a7812ae4 1343 gen_helper_in_func(ot, cpu_T[0], cpu_tmp2_i32);
57fec1fe 1344 gen_op_st_T0_A0(ot + s->mem_index);
6e0d8677
FB
1345 gen_op_movl_T0_Dshift(ot);
1346 gen_op_add_reg_T0(s->aflag, R_EDI);
2e70f6ef
PB
1347 if (use_icount)
1348 gen_io_end();
2c0262af
FB
1349}
1350
1351static inline void gen_outs(DisasContext *s, int ot)
1352{
2e70f6ef
PB
1353 if (use_icount)
1354 gen_io_start();
2c0262af 1355 gen_string_movl_A0_ESI(s);
57fec1fe 1356 gen_op_ld_T0_A0(ot + s->mem_index);
b8b6a50b
FB
1357
1358 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
b6abf97d
FB
1359 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1360 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1361 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
a7812ae4 1362 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
b8b6a50b 1363
6e0d8677
FB
1364 gen_op_movl_T0_Dshift(ot);
1365 gen_op_add_reg_T0(s->aflag, R_ESI);
2e70f6ef
PB
1366 if (use_icount)
1367 gen_io_end();
2c0262af
FB
1368}
1369
1370/* same method as Valgrind : we generate jumps to current or next
1371 instruction */
1372#define GEN_REPZ(op) \
1373static inline void gen_repz_ ## op(DisasContext *s, int ot, \
14ce26e7 1374 target_ulong cur_eip, target_ulong next_eip) \
2c0262af 1375{ \
14ce26e7 1376 int l2;\
2c0262af 1377 gen_update_cc_op(s); \
14ce26e7 1378 l2 = gen_jz_ecx_string(s, next_eip); \
2c0262af 1379 gen_ ## op(s, ot); \
6e0d8677 1380 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
2c0262af
FB
1381 /* a loop would cause two single step exceptions if ECX = 1 \
1382 before rep string_insn */ \
1383 if (!s->jmp_opt) \
6e0d8677 1384 gen_op_jz_ecx(s->aflag, l2); \
2c0262af
FB
1385 gen_jmp(s, cur_eip); \
1386}
1387
1388#define GEN_REPZ2(op) \
1389static inline void gen_repz_ ## op(DisasContext *s, int ot, \
14ce26e7
FB
1390 target_ulong cur_eip, \
1391 target_ulong next_eip, \
2c0262af
FB
1392 int nz) \
1393{ \
14ce26e7 1394 int l2;\
2c0262af 1395 gen_update_cc_op(s); \
14ce26e7 1396 l2 = gen_jz_ecx_string(s, next_eip); \
2c0262af 1397 gen_ ## op(s, ot); \
6e0d8677 1398 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
773cdfcc 1399 gen_update_cc_op(s); \
b27fc131 1400 gen_jcc1(s, (JCC_Z << 1) | (nz ^ 1), l2); \
2c0262af 1401 if (!s->jmp_opt) \
6e0d8677 1402 gen_op_jz_ecx(s->aflag, l2); \
2c0262af
FB
1403 gen_jmp(s, cur_eip); \
1404}
1405
1406GEN_REPZ(movs)
1407GEN_REPZ(stos)
1408GEN_REPZ(lods)
1409GEN_REPZ(ins)
1410GEN_REPZ(outs)
1411GEN_REPZ2(scas)
1412GEN_REPZ2(cmps)
1413
a7812ae4
PB
1414static void gen_helper_fp_arith_ST0_FT0(int op)
1415{
1416 switch (op) {
d3eb5eae
BS
1417 case 0:
1418 gen_helper_fadd_ST0_FT0(cpu_env);
1419 break;
1420 case 1:
1421 gen_helper_fmul_ST0_FT0(cpu_env);
1422 break;
1423 case 2:
1424 gen_helper_fcom_ST0_FT0(cpu_env);
1425 break;
1426 case 3:
1427 gen_helper_fcom_ST0_FT0(cpu_env);
1428 break;
1429 case 4:
1430 gen_helper_fsub_ST0_FT0(cpu_env);
1431 break;
1432 case 5:
1433 gen_helper_fsubr_ST0_FT0(cpu_env);
1434 break;
1435 case 6:
1436 gen_helper_fdiv_ST0_FT0(cpu_env);
1437 break;
1438 case 7:
1439 gen_helper_fdivr_ST0_FT0(cpu_env);
1440 break;
a7812ae4
PB
1441 }
1442}
2c0262af
FB
1443
1444/* NOTE the exception in "r" op ordering */
a7812ae4
PB
1445static void gen_helper_fp_arith_STN_ST0(int op, int opreg)
1446{
1447 TCGv_i32 tmp = tcg_const_i32(opreg);
1448 switch (op) {
d3eb5eae
BS
1449 case 0:
1450 gen_helper_fadd_STN_ST0(cpu_env, tmp);
1451 break;
1452 case 1:
1453 gen_helper_fmul_STN_ST0(cpu_env, tmp);
1454 break;
1455 case 4:
1456 gen_helper_fsubr_STN_ST0(cpu_env, tmp);
1457 break;
1458 case 5:
1459 gen_helper_fsub_STN_ST0(cpu_env, tmp);
1460 break;
1461 case 6:
1462 gen_helper_fdivr_STN_ST0(cpu_env, tmp);
1463 break;
1464 case 7:
1465 gen_helper_fdiv_STN_ST0(cpu_env, tmp);
1466 break;
a7812ae4
PB
1467 }
1468}
2c0262af
FB
1469
1470/* if d == OR_TMP0, it means memory operand (address in A0) */
1471static void gen_op(DisasContext *s1, int op, int ot, int d)
1472{
2c0262af 1473 if (d != OR_TMP0) {
57fec1fe 1474 gen_op_mov_TN_reg(ot, 0, d);
2c0262af 1475 } else {
57fec1fe 1476 gen_op_ld_T0_A0(ot + s1->mem_index);
2c0262af
FB
1477 }
1478 switch(op) {
1479 case OP_ADCL:
cc8b6f5b 1480 gen_compute_eflags_c(s1, cpu_tmp4);
cad3a37d
FB
1481 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1482 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1483 if (d != OR_TMP0)
1484 gen_op_mov_reg_T0(ot, d);
1485 else
1486 gen_op_st_T0_A0(ot + s1->mem_index);
988c3eb0
RH
1487 gen_op_update3_cc(cpu_tmp4);
1488 set_cc_op(s1, CC_OP_ADCB + ot);
cad3a37d 1489 break;
2c0262af 1490 case OP_SBBL:
cc8b6f5b 1491 gen_compute_eflags_c(s1, cpu_tmp4);
cad3a37d
FB
1492 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1493 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1494 if (d != OR_TMP0)
57fec1fe 1495 gen_op_mov_reg_T0(ot, d);
cad3a37d
FB
1496 else
1497 gen_op_st_T0_A0(ot + s1->mem_index);
988c3eb0
RH
1498 gen_op_update3_cc(cpu_tmp4);
1499 set_cc_op(s1, CC_OP_SBBB + ot);
cad3a37d 1500 break;
2c0262af
FB
1501 case OP_ADDL:
1502 gen_op_addl_T0_T1();
cad3a37d
FB
1503 if (d != OR_TMP0)
1504 gen_op_mov_reg_T0(ot, d);
1505 else
1506 gen_op_st_T0_A0(ot + s1->mem_index);
1507 gen_op_update2_cc();
3ca51d07 1508 set_cc_op(s1, CC_OP_ADDB + ot);
2c0262af
FB
1509 break;
1510 case OP_SUBL:
a3251186 1511 tcg_gen_mov_tl(cpu_cc_srcT, cpu_T[0]);
57fec1fe 1512 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
cad3a37d
FB
1513 if (d != OR_TMP0)
1514 gen_op_mov_reg_T0(ot, d);
1515 else
1516 gen_op_st_T0_A0(ot + s1->mem_index);
1517 gen_op_update2_cc();
3ca51d07 1518 set_cc_op(s1, CC_OP_SUBB + ot);
2c0262af
FB
1519 break;
1520 default:
1521 case OP_ANDL:
57fec1fe 1522 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
cad3a37d
FB
1523 if (d != OR_TMP0)
1524 gen_op_mov_reg_T0(ot, d);
1525 else
1526 gen_op_st_T0_A0(ot + s1->mem_index);
1527 gen_op_update1_cc();
3ca51d07 1528 set_cc_op(s1, CC_OP_LOGICB + ot);
57fec1fe 1529 break;
2c0262af 1530 case OP_ORL:
57fec1fe 1531 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
cad3a37d
FB
1532 if (d != OR_TMP0)
1533 gen_op_mov_reg_T0(ot, d);
1534 else
1535 gen_op_st_T0_A0(ot + s1->mem_index);
1536 gen_op_update1_cc();
3ca51d07 1537 set_cc_op(s1, CC_OP_LOGICB + ot);
57fec1fe 1538 break;
2c0262af 1539 case OP_XORL:
57fec1fe 1540 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
cad3a37d
FB
1541 if (d != OR_TMP0)
1542 gen_op_mov_reg_T0(ot, d);
1543 else
1544 gen_op_st_T0_A0(ot + s1->mem_index);
1545 gen_op_update1_cc();
3ca51d07 1546 set_cc_op(s1, CC_OP_LOGICB + ot);
2c0262af
FB
1547 break;
1548 case OP_CMPL:
63633fe6 1549 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
a3251186 1550 tcg_gen_mov_tl(cpu_cc_srcT, cpu_T[0]);
63633fe6 1551 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
3ca51d07 1552 set_cc_op(s1, CC_OP_SUBB + ot);
2c0262af
FB
1553 break;
1554 }
b6abf97d
FB
1555}
1556
2c0262af
FB
1557/* if d == OR_TMP0, it means memory operand (address in A0) */
1558static void gen_inc(DisasContext *s1, int ot, int d, int c)
1559{
1560 if (d != OR_TMP0)
57fec1fe 1561 gen_op_mov_TN_reg(ot, 0, d);
2c0262af 1562 else
57fec1fe 1563 gen_op_ld_T0_A0(ot + s1->mem_index);
cc8b6f5b 1564 gen_compute_eflags_c(s1, cpu_cc_src);
2c0262af 1565 if (c > 0) {
b6abf97d 1566 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
3ca51d07 1567 set_cc_op(s1, CC_OP_INCB + ot);
2c0262af 1568 } else {
b6abf97d 1569 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
3ca51d07 1570 set_cc_op(s1, CC_OP_DECB + ot);
2c0262af
FB
1571 }
1572 if (d != OR_TMP0)
57fec1fe 1573 gen_op_mov_reg_T0(ot, d);
2c0262af 1574 else
57fec1fe 1575 gen_op_st_T0_A0(ot + s1->mem_index);
cd31fefa 1576 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
2c0262af
FB
1577}
1578
f437d0a3
RH
1579static void gen_shift_flags(DisasContext *s, int ot, TCGv result, TCGv shm1,
1580 TCGv count, bool is_right)
1581{
1582 TCGv_i32 z32, s32, oldop;
1583 TCGv z_tl;
1584
1585 /* Store the results into the CC variables. If we know that the
1586 variable must be dead, store unconditionally. Otherwise we'll
1587 need to not disrupt the current contents. */
1588 z_tl = tcg_const_tl(0);
1589 if (cc_op_live[s->cc_op] & USES_CC_DST) {
1590 tcg_gen_movcond_tl(TCG_COND_NE, cpu_cc_dst, count, z_tl,
1591 result, cpu_cc_dst);
1592 } else {
1593 tcg_gen_mov_tl(cpu_cc_dst, result);
1594 }
1595 if (cc_op_live[s->cc_op] & USES_CC_SRC) {
1596 tcg_gen_movcond_tl(TCG_COND_NE, cpu_cc_src, count, z_tl,
1597 shm1, cpu_cc_src);
1598 } else {
1599 tcg_gen_mov_tl(cpu_cc_src, shm1);
1600 }
1601 tcg_temp_free(z_tl);
1602
1603 /* Get the two potential CC_OP values into temporaries. */
1604 tcg_gen_movi_i32(cpu_tmp2_i32, (is_right ? CC_OP_SARB : CC_OP_SHLB) + ot);
1605 if (s->cc_op == CC_OP_DYNAMIC) {
1606 oldop = cpu_cc_op;
1607 } else {
1608 tcg_gen_movi_i32(cpu_tmp3_i32, s->cc_op);
1609 oldop = cpu_tmp3_i32;
1610 }
1611
1612 /* Conditionally store the CC_OP value. */
1613 z32 = tcg_const_i32(0);
1614 s32 = tcg_temp_new_i32();
1615 tcg_gen_trunc_tl_i32(s32, count);
1616 tcg_gen_movcond_i32(TCG_COND_NE, cpu_cc_op, s32, z32, cpu_tmp2_i32, oldop);
1617 tcg_temp_free_i32(z32);
1618 tcg_temp_free_i32(s32);
1619
1620 /* The CC_OP value is no longer predictable. */
1621 set_cc_op(s, CC_OP_DYNAMIC);
1622}
1623
b6abf97d
FB
1624static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1625 int is_right, int is_arith)
2c0262af 1626{
a41f62f5 1627 target_ulong mask = (ot == OT_QUAD ? 0x3f : 0x1f);
3b46e624 1628
b6abf97d 1629 /* load */
82786041 1630 if (op1 == OR_TMP0) {
b6abf97d 1631 gen_op_ld_T0_A0(ot + s->mem_index);
82786041 1632 } else {
b6abf97d 1633 gen_op_mov_TN_reg(ot, 0, op1);
82786041 1634 }
b6abf97d 1635
a41f62f5
RH
1636 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1637 tcg_gen_subi_tl(cpu_tmp0, cpu_T[1], 1);
b6abf97d
FB
1638
1639 if (is_right) {
1640 if (is_arith) {
f484d386 1641 gen_exts(ot, cpu_T[0]);
a41f62f5
RH
1642 tcg_gen_sar_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
1643 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
b6abf97d 1644 } else {
cad3a37d 1645 gen_extu(ot, cpu_T[0]);
a41f62f5
RH
1646 tcg_gen_shr_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
1647 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
b6abf97d
FB
1648 }
1649 } else {
a41f62f5
RH
1650 tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
1651 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
b6abf97d
FB
1652 }
1653
1654 /* store */
82786041 1655 if (op1 == OR_TMP0) {
b6abf97d 1656 gen_op_st_T0_A0(ot + s->mem_index);
82786041 1657 } else {
b6abf97d 1658 gen_op_mov_reg_T0(ot, op1);
82786041
RH
1659 }
1660
f437d0a3 1661 gen_shift_flags(s, ot, cpu_T[0], cpu_tmp0, cpu_T[1], is_right);
b6abf97d
FB
1662}
1663
c1c37968
FB
1664static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1665 int is_right, int is_arith)
1666{
a41f62f5 1667 int mask = (ot == OT_QUAD ? 0x3f : 0x1f);
c1c37968
FB
1668
1669 /* load */
1670 if (op1 == OR_TMP0)
1671 gen_op_ld_T0_A0(ot + s->mem_index);
1672 else
1673 gen_op_mov_TN_reg(ot, 0, op1);
1674
1675 op2 &= mask;
1676 if (op2 != 0) {
1677 if (is_right) {
1678 if (is_arith) {
1679 gen_exts(ot, cpu_T[0]);
2a449d14 1680 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
c1c37968
FB
1681 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1682 } else {
1683 gen_extu(ot, cpu_T[0]);
2a449d14 1684 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
c1c37968
FB
1685 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1686 }
1687 } else {
2a449d14 1688 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
c1c37968
FB
1689 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1690 }
1691 }
1692
1693 /* store */
1694 if (op1 == OR_TMP0)
1695 gen_op_st_T0_A0(ot + s->mem_index);
1696 else
1697 gen_op_mov_reg_T0(ot, op1);
1698
1699 /* update eflags if non zero shift */
1700 if (op2 != 0) {
2a449d14 1701 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
c1c37968 1702 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3ca51d07 1703 set_cc_op(s, (is_right ? CC_OP_SARB : CC_OP_SHLB) + ot);
c1c37968
FB
1704 }
1705}
1706
b6abf97d
FB
1707static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1708{
1709 if (arg2 >= 0)
1710 tcg_gen_shli_tl(ret, arg1, arg2);
1711 else
1712 tcg_gen_shri_tl(ret, arg1, -arg2);
1713}
1714
34d80a55 1715static void gen_rot_rm_T1(DisasContext *s, int ot, int op1, int is_right)
b6abf97d 1716{
34d80a55
RH
1717 target_ulong mask = (ot == OT_QUAD ? 0x3f : 0x1f);
1718 TCGv_i32 t0, t1;
b6abf97d
FB
1719
1720 /* load */
1e4840bf 1721 if (op1 == OR_TMP0) {
34d80a55 1722 gen_op_ld_T0_A0(ot + s->mem_index);
1e4840bf 1723 } else {
34d80a55 1724 gen_op_mov_TN_reg(ot, 0, op1);
1e4840bf 1725 }
b6abf97d 1726
34d80a55 1727 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
b6abf97d 1728
34d80a55
RH
1729 switch (ot) {
1730 case OT_BYTE:
1731 /* Replicate the 8-bit input so that a 32-bit rotate works. */
1732 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
1733 tcg_gen_muli_tl(cpu_T[0], cpu_T[0], 0x01010101);
1734 goto do_long;
1735 case OT_WORD:
1736 /* Replicate the 16-bit input so that a 32-bit rotate works. */
1737 tcg_gen_deposit_tl(cpu_T[0], cpu_T[0], cpu_T[0], 16, 16);
1738 goto do_long;
1739 do_long:
1740#ifdef TARGET_X86_64
1741 case OT_LONG:
1742 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1743 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
1744 if (is_right) {
1745 tcg_gen_rotr_i32(cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
1746 } else {
1747 tcg_gen_rotl_i32(cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
1748 }
1749 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
1750 break;
1751#endif
1752 default:
1753 if (is_right) {
1754 tcg_gen_rotr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1755 } else {
1756 tcg_gen_rotl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1757 }
1758 break;
b6abf97d 1759 }
b6abf97d 1760
b6abf97d 1761 /* store */
1e4840bf 1762 if (op1 == OR_TMP0) {
34d80a55 1763 gen_op_st_T0_A0(ot + s->mem_index);
1e4840bf 1764 } else {
34d80a55 1765 gen_op_mov_reg_T0(ot, op1);
1e4840bf 1766 }
b6abf97d 1767
34d80a55
RH
1768 /* We'll need the flags computed into CC_SRC. */
1769 gen_compute_eflags(s);
b6abf97d 1770
34d80a55
RH
1771 /* The value that was "rotated out" is now present at the other end
1772 of the word. Compute C into CC_DST and O into CC_SRC2. Note that
1773 since we've computed the flags into CC_SRC, these variables are
1774 currently dead. */
b6abf97d 1775 if (is_right) {
34d80a55
RH
1776 tcg_gen_shri_tl(cpu_cc_src2, cpu_T[0], mask - 1);
1777 tcg_gen_shri_tl(cpu_cc_dst, cpu_T[0], mask);
089305ac 1778 tcg_gen_andi_tl(cpu_cc_dst, cpu_cc_dst, 1);
34d80a55
RH
1779 } else {
1780 tcg_gen_shri_tl(cpu_cc_src2, cpu_T[0], mask);
1781 tcg_gen_andi_tl(cpu_cc_dst, cpu_T[0], 1);
b6abf97d 1782 }
34d80a55
RH
1783 tcg_gen_andi_tl(cpu_cc_src2, cpu_cc_src2, 1);
1784 tcg_gen_xor_tl(cpu_cc_src2, cpu_cc_src2, cpu_cc_dst);
1785
1786 /* Now conditionally store the new CC_OP value. If the shift count
1787 is 0 we keep the CC_OP_EFLAGS setting so that only CC_SRC is live.
1788 Otherwise reuse CC_OP_ADCOX which have the C and O flags split out
1789 exactly as we computed above. */
1790 t0 = tcg_const_i32(0);
1791 t1 = tcg_temp_new_i32();
1792 tcg_gen_trunc_tl_i32(t1, cpu_T[1]);
1793 tcg_gen_movi_i32(cpu_tmp2_i32, CC_OP_ADCOX);
1794 tcg_gen_movi_i32(cpu_tmp3_i32, CC_OP_EFLAGS);
1795 tcg_gen_movcond_i32(TCG_COND_NE, cpu_cc_op, t1, t0,
1796 cpu_tmp2_i32, cpu_tmp3_i32);
1797 tcg_temp_free_i32(t0);
1798 tcg_temp_free_i32(t1);
1799
1800 /* The CC_OP value is no longer predictable. */
1801 set_cc_op(s, CC_OP_DYNAMIC);
b6abf97d
FB
1802}
1803
8cd6345d 1804static void gen_rot_rm_im(DisasContext *s, int ot, int op1, int op2,
1805 int is_right)
1806{
34d80a55
RH
1807 int mask = (ot == OT_QUAD ? 0x3f : 0x1f);
1808 int shift;
8cd6345d 1809
1810 /* load */
1811 if (op1 == OR_TMP0) {
34d80a55 1812 gen_op_ld_T0_A0(ot + s->mem_index);
8cd6345d 1813 } else {
34d80a55 1814 gen_op_mov_TN_reg(ot, 0, op1);
8cd6345d 1815 }
1816
8cd6345d 1817 op2 &= mask;
8cd6345d 1818 if (op2 != 0) {
34d80a55
RH
1819 switch (ot) {
1820#ifdef TARGET_X86_64
1821 case OT_LONG:
1822 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1823 if (is_right) {
1824 tcg_gen_rotri_i32(cpu_tmp2_i32, cpu_tmp2_i32, op2);
1825 } else {
1826 tcg_gen_rotli_i32(cpu_tmp2_i32, cpu_tmp2_i32, op2);
1827 }
1828 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
1829 break;
1830#endif
1831 default:
1832 if (is_right) {
1833 tcg_gen_rotri_tl(cpu_T[0], cpu_T[0], op2);
1834 } else {
1835 tcg_gen_rotli_tl(cpu_T[0], cpu_T[0], op2);
1836 }
1837 break;
1838 case OT_BYTE:
1839 mask = 7;
1840 goto do_shifts;
1841 case OT_WORD:
1842 mask = 15;
1843 do_shifts:
1844 shift = op2 & mask;
1845 if (is_right) {
1846 shift = mask + 1 - shift;
1847 }
1848 gen_extu(ot, cpu_T[0]);
1849 tcg_gen_shli_tl(cpu_tmp0, cpu_T[0], shift);
1850 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], mask + 1 - shift);
1851 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1852 break;
8cd6345d 1853 }
8cd6345d 1854 }
1855
1856 /* store */
1857 if (op1 == OR_TMP0) {
34d80a55 1858 gen_op_st_T0_A0(ot + s->mem_index);
8cd6345d 1859 } else {
34d80a55 1860 gen_op_mov_reg_T0(ot, op1);
8cd6345d 1861 }
1862
1863 if (op2 != 0) {
34d80a55 1864 /* Compute the flags into CC_SRC. */
d229edce 1865 gen_compute_eflags(s);
0ff6addd 1866
34d80a55
RH
1867 /* The value that was "rotated out" is now present at the other end
1868 of the word. Compute C into CC_DST and O into CC_SRC2. Note that
1869 since we've computed the flags into CC_SRC, these variables are
1870 currently dead. */
8cd6345d 1871 if (is_right) {
34d80a55
RH
1872 tcg_gen_shri_tl(cpu_cc_src2, cpu_T[0], mask - 1);
1873 tcg_gen_shri_tl(cpu_cc_dst, cpu_T[0], mask);
1874 } else {
1875 tcg_gen_shri_tl(cpu_cc_src2, cpu_T[0], mask);
1876 tcg_gen_andi_tl(cpu_cc_dst, cpu_T[0], 1);
8cd6345d 1877 }
34d80a55
RH
1878 tcg_gen_andi_tl(cpu_cc_src2, cpu_cc_src2, 1);
1879 tcg_gen_xor_tl(cpu_cc_src2, cpu_cc_src2, cpu_cc_dst);
1880 set_cc_op(s, CC_OP_ADCOX);
8cd6345d 1881 }
8cd6345d 1882}
1883
b6abf97d
FB
1884/* XXX: add faster immediate = 1 case */
1885static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1886 int is_right)
1887{
d229edce 1888 gen_compute_eflags(s);
c7b3c873 1889 assert(s->cc_op == CC_OP_EFLAGS);
b6abf97d
FB
1890
1891 /* load */
1892 if (op1 == OR_TMP0)
1893 gen_op_ld_T0_A0(ot + s->mem_index);
1894 else
1895 gen_op_mov_TN_reg(ot, 0, op1);
1896
a7812ae4
PB
1897 if (is_right) {
1898 switch (ot) {
93ab25d7 1899 case OT_BYTE:
7923057b
BS
1900 gen_helper_rcrb(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1901 break;
93ab25d7 1902 case OT_WORD:
7923057b
BS
1903 gen_helper_rcrw(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1904 break;
93ab25d7 1905 case OT_LONG:
7923057b
BS
1906 gen_helper_rcrl(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1907 break;
a7812ae4 1908#ifdef TARGET_X86_64
93ab25d7 1909 case OT_QUAD:
7923057b
BS
1910 gen_helper_rcrq(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1911 break;
a7812ae4
PB
1912#endif
1913 }
1914 } else {
1915 switch (ot) {
93ab25d7 1916 case OT_BYTE:
7923057b
BS
1917 gen_helper_rclb(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1918 break;
93ab25d7 1919 case OT_WORD:
7923057b
BS
1920 gen_helper_rclw(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1921 break;
93ab25d7 1922 case OT_LONG:
7923057b
BS
1923 gen_helper_rcll(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1924 break;
a7812ae4 1925#ifdef TARGET_X86_64
93ab25d7 1926 case OT_QUAD:
7923057b
BS
1927 gen_helper_rclq(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1928 break;
a7812ae4
PB
1929#endif
1930 }
1931 }
b6abf97d
FB
1932 /* store */
1933 if (op1 == OR_TMP0)
1934 gen_op_st_T0_A0(ot + s->mem_index);
1935 else
1936 gen_op_mov_reg_T0(ot, op1);
b6abf97d
FB
1937}
1938
1939/* XXX: add faster immediate case */
3b9d3cf1 1940static void gen_shiftd_rm_T1(DisasContext *s, int ot, int op1,
f437d0a3 1941 bool is_right, TCGv count_in)
b6abf97d 1942{
f437d0a3
RH
1943 target_ulong mask = (ot == OT_QUAD ? 63 : 31);
1944 TCGv count;
b6abf97d
FB
1945
1946 /* load */
1e4840bf 1947 if (op1 == OR_TMP0) {
f437d0a3 1948 gen_op_ld_T0_A0(ot + s->mem_index);
1e4840bf 1949 } else {
f437d0a3 1950 gen_op_mov_TN_reg(ot, 0, op1);
1e4840bf 1951 }
b6abf97d 1952
f437d0a3
RH
1953 count = tcg_temp_new();
1954 tcg_gen_andi_tl(count, count_in, mask);
1e4840bf 1955
f437d0a3
RH
1956 switch (ot) {
1957 case OT_WORD:
1958 /* Note: we implement the Intel behaviour for shift count > 16.
1959 This means "shrdw C, B, A" shifts A:B:A >> C. Build the B:A
1960 portion by constructing it as a 32-bit value. */
b6abf97d 1961 if (is_right) {
f437d0a3
RH
1962 tcg_gen_deposit_tl(cpu_tmp0, cpu_T[0], cpu_T[1], 16, 16);
1963 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
1964 tcg_gen_mov_tl(cpu_T[0], cpu_tmp0);
b6abf97d 1965 } else {
f437d0a3 1966 tcg_gen_deposit_tl(cpu_T[1], cpu_T[0], cpu_T[1], 16, 16);
b6abf97d 1967 }
f437d0a3
RH
1968 /* FALLTHRU */
1969#ifdef TARGET_X86_64
1970 case OT_LONG:
1971 /* Concatenate the two 32-bit values and use a 64-bit shift. */
1972 tcg_gen_subi_tl(cpu_tmp0, count, 1);
b6abf97d 1973 if (is_right) {
f437d0a3
RH
1974 tcg_gen_concat_tl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1975 tcg_gen_shr_i64(cpu_tmp0, cpu_T[0], cpu_tmp0);
1976 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], count);
1977 } else {
1978 tcg_gen_concat_tl_i64(cpu_T[0], cpu_T[1], cpu_T[0]);
1979 tcg_gen_shl_i64(cpu_tmp0, cpu_T[0], cpu_tmp0);
1980 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], count);
1981 tcg_gen_shri_i64(cpu_tmp0, cpu_tmp0, 32);
1982 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], 32);
1983 }
1984 break;
1985#endif
1986 default:
1987 tcg_gen_subi_tl(cpu_tmp0, count, 1);
1988 if (is_right) {
1989 tcg_gen_shr_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
b6abf97d 1990
f437d0a3
RH
1991 tcg_gen_subfi_tl(cpu_tmp4, mask + 1, count);
1992 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], count);
1993 tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp4);
b6abf97d 1994 } else {
f437d0a3
RH
1995 tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
1996 if (ot == OT_WORD) {
1997 /* Only needed if count > 16, for Intel behaviour. */
1998 tcg_gen_subfi_tl(cpu_tmp4, 33, count);
1999 tcg_gen_shr_tl(cpu_tmp4, cpu_T[1], cpu_tmp4);
2000 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, cpu_tmp4);
2001 }
2002
2003 tcg_gen_subfi_tl(cpu_tmp4, mask + 1, count);
2004 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], count);
2005 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp4);
b6abf97d 2006 }
f437d0a3
RH
2007 tcg_gen_movi_tl(cpu_tmp4, 0);
2008 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_T[1], count, cpu_tmp4,
2009 cpu_tmp4, cpu_T[1]);
2010 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2011 break;
b6abf97d 2012 }
b6abf97d 2013
b6abf97d 2014 /* store */
1e4840bf 2015 if (op1 == OR_TMP0) {
f437d0a3 2016 gen_op_st_T0_A0(ot + s->mem_index);
b6abf97d 2017 } else {
f437d0a3 2018 gen_op_mov_reg_T0(ot, op1);
b6abf97d 2019 }
1e4840bf 2020
f437d0a3
RH
2021 gen_shift_flags(s, ot, cpu_T[0], cpu_tmp0, count, is_right);
2022 tcg_temp_free(count);
b6abf97d
FB
2023}
2024
2025static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2026{
2027 if (s != OR_TMP1)
2028 gen_op_mov_TN_reg(ot, 1, s);
2029 switch(op) {
2030 case OP_ROL:
2031 gen_rot_rm_T1(s1, ot, d, 0);
2032 break;
2033 case OP_ROR:
2034 gen_rot_rm_T1(s1, ot, d, 1);
2035 break;
2036 case OP_SHL:
2037 case OP_SHL1:
2038 gen_shift_rm_T1(s1, ot, d, 0, 0);
2039 break;
2040 case OP_SHR:
2041 gen_shift_rm_T1(s1, ot, d, 1, 0);
2042 break;
2043 case OP_SAR:
2044 gen_shift_rm_T1(s1, ot, d, 1, 1);
2045 break;
2046 case OP_RCL:
2047 gen_rotc_rm_T1(s1, ot, d, 0);
2048 break;
2049 case OP_RCR:
2050 gen_rotc_rm_T1(s1, ot, d, 1);
2051 break;
2052 }
2c0262af
FB
2053}
2054
2055static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2056{
c1c37968 2057 switch(op) {
8cd6345d 2058 case OP_ROL:
2059 gen_rot_rm_im(s1, ot, d, c, 0);
2060 break;
2061 case OP_ROR:
2062 gen_rot_rm_im(s1, ot, d, c, 1);
2063 break;
c1c37968
FB
2064 case OP_SHL:
2065 case OP_SHL1:
2066 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2067 break;
2068 case OP_SHR:
2069 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2070 break;
2071 case OP_SAR:
2072 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2073 break;
2074 default:
2075 /* currently not optimized */
2076 gen_op_movl_T1_im(c);
2077 gen_shift(s1, op, ot, d, OR_TMP1);
2078 break;
2079 }
2c0262af
FB
2080}
2081
0af10c86
BS
2082static void gen_lea_modrm(CPUX86State *env, DisasContext *s, int modrm,
2083 int *reg_ptr, int *offset_ptr)
2c0262af 2084{
14ce26e7 2085 target_long disp;
2c0262af 2086 int havesib;
14ce26e7 2087 int base;
2c0262af
FB
2088 int index;
2089 int scale;
2090 int opreg;
2091 int mod, rm, code, override, must_add_seg;
2092
2093 override = s->override;
2094 must_add_seg = s->addseg;
2095 if (override >= 0)
2096 must_add_seg = 1;
2097 mod = (modrm >> 6) & 3;
2098 rm = modrm & 7;
2099
2100 if (s->aflag) {
2101
2102 havesib = 0;
2103 base = rm;
2104 index = 0;
2105 scale = 0;
3b46e624 2106
2c0262af
FB
2107 if (base == 4) {
2108 havesib = 1;
0af10c86 2109 code = cpu_ldub_code(env, s->pc++);
2c0262af 2110 scale = (code >> 6) & 3;
14ce26e7
FB
2111 index = ((code >> 3) & 7) | REX_X(s);
2112 base = (code & 7);
2c0262af 2113 }
14ce26e7 2114 base |= REX_B(s);
2c0262af
FB
2115
2116 switch (mod) {
2117 case 0:
14ce26e7 2118 if ((base & 7) == 5) {
2c0262af 2119 base = -1;
0af10c86 2120 disp = (int32_t)cpu_ldl_code(env, s->pc);
2c0262af 2121 s->pc += 4;
14ce26e7
FB
2122 if (CODE64(s) && !havesib) {
2123 disp += s->pc + s->rip_offset;
2124 }
2c0262af
FB
2125 } else {
2126 disp = 0;
2127 }
2128 break;
2129 case 1:
0af10c86 2130 disp = (int8_t)cpu_ldub_code(env, s->pc++);
2c0262af
FB
2131 break;
2132 default:
2133 case 2:
0af10c86 2134 disp = (int32_t)cpu_ldl_code(env, s->pc);
2c0262af
FB
2135 s->pc += 4;
2136 break;
2137 }
3b46e624 2138
2c0262af
FB
2139 if (base >= 0) {
2140 /* for correct popl handling with esp */
2141 if (base == 4 && s->popl_esp_hack)
2142 disp += s->popl_esp_hack;
14ce26e7
FB
2143#ifdef TARGET_X86_64
2144 if (s->aflag == 2) {
57fec1fe 2145 gen_op_movq_A0_reg(base);
14ce26e7 2146 if (disp != 0) {
57fec1fe 2147 gen_op_addq_A0_im(disp);
14ce26e7 2148 }
5fafdf24 2149 } else
14ce26e7
FB
2150#endif
2151 {
57fec1fe 2152 gen_op_movl_A0_reg(base);
14ce26e7
FB
2153 if (disp != 0)
2154 gen_op_addl_A0_im(disp);
2155 }
2c0262af 2156 } else {
14ce26e7
FB
2157#ifdef TARGET_X86_64
2158 if (s->aflag == 2) {
57fec1fe 2159 gen_op_movq_A0_im(disp);
5fafdf24 2160 } else
14ce26e7
FB
2161#endif
2162 {
2163 gen_op_movl_A0_im(disp);
2164 }
2c0262af 2165 }
b16f827b
AJ
2166 /* index == 4 means no index */
2167 if (havesib && (index != 4)) {
14ce26e7
FB
2168#ifdef TARGET_X86_64
2169 if (s->aflag == 2) {
57fec1fe 2170 gen_op_addq_A0_reg_sN(scale, index);
5fafdf24 2171 } else
14ce26e7
FB
2172#endif
2173 {
57fec1fe 2174 gen_op_addl_A0_reg_sN(scale, index);
14ce26e7 2175 }
2c0262af
FB
2176 }
2177 if (must_add_seg) {
2178 if (override < 0) {
2179 if (base == R_EBP || base == R_ESP)
2180 override = R_SS;
2181 else
2182 override = R_DS;
2183 }
14ce26e7
FB
2184#ifdef TARGET_X86_64
2185 if (s->aflag == 2) {
57fec1fe 2186 gen_op_addq_A0_seg(override);
5fafdf24 2187 } else
14ce26e7
FB
2188#endif
2189 {
7162ab21 2190 gen_op_addl_A0_seg(s, override);
14ce26e7 2191 }
2c0262af
FB
2192 }
2193 } else {
2194 switch (mod) {
2195 case 0:
2196 if (rm == 6) {
0af10c86 2197 disp = cpu_lduw_code(env, s->pc);
2c0262af
FB
2198 s->pc += 2;
2199 gen_op_movl_A0_im(disp);
2200 rm = 0; /* avoid SS override */
2201 goto no_rm;
2202 } else {
2203 disp = 0;
2204 }
2205 break;
2206 case 1:
0af10c86 2207 disp = (int8_t)cpu_ldub_code(env, s->pc++);
2c0262af
FB
2208 break;
2209 default:
2210 case 2:
0af10c86 2211 disp = cpu_lduw_code(env, s->pc);
2c0262af
FB
2212 s->pc += 2;
2213 break;
2214 }
2215 switch(rm) {
2216 case 0:
57fec1fe
FB
2217 gen_op_movl_A0_reg(R_EBX);
2218 gen_op_addl_A0_reg_sN(0, R_ESI);
2c0262af
FB
2219 break;
2220 case 1:
57fec1fe
FB
2221 gen_op_movl_A0_reg(R_EBX);
2222 gen_op_addl_A0_reg_sN(0, R_EDI);
2c0262af
FB
2223 break;
2224 case 2:
57fec1fe
FB
2225 gen_op_movl_A0_reg(R_EBP);
2226 gen_op_addl_A0_reg_sN(0, R_ESI);
2c0262af
FB
2227 break;
2228 case 3:
57fec1fe
FB
2229 gen_op_movl_A0_reg(R_EBP);
2230 gen_op_addl_A0_reg_sN(0, R_EDI);
2c0262af
FB
2231 break;
2232 case 4:
57fec1fe 2233 gen_op_movl_A0_reg(R_ESI);
2c0262af
FB
2234 break;
2235 case 5:
57fec1fe 2236 gen_op_movl_A0_reg(R_EDI);
2c0262af
FB
2237 break;
2238 case 6:
57fec1fe 2239 gen_op_movl_A0_reg(R_EBP);
2c0262af
FB
2240 break;
2241 default:
2242 case 7:
57fec1fe 2243 gen_op_movl_A0_reg(R_EBX);
2c0262af
FB
2244 break;
2245 }
2246 if (disp != 0)
2247 gen_op_addl_A0_im(disp);
2248 gen_op_andl_A0_ffff();
2249 no_rm:
2250 if (must_add_seg) {
2251 if (override < 0) {
2252 if (rm == 2 || rm == 3 || rm == 6)
2253 override = R_SS;
2254 else
2255 override = R_DS;
2256 }
7162ab21 2257 gen_op_addl_A0_seg(s, override);
2c0262af
FB
2258 }
2259 }
2260
2261 opreg = OR_A0;
2262 disp = 0;
2263 *reg_ptr = opreg;
2264 *offset_ptr = disp;
2265}
2266
0af10c86 2267static void gen_nop_modrm(CPUX86State *env, DisasContext *s, int modrm)
e17a36ce
FB
2268{
2269 int mod, rm, base, code;
2270
2271 mod = (modrm >> 6) & 3;
2272 if (mod == 3)
2273 return;
2274 rm = modrm & 7;
2275
2276 if (s->aflag) {
2277
2278 base = rm;
3b46e624 2279
e17a36ce 2280 if (base == 4) {
0af10c86 2281 code = cpu_ldub_code(env, s->pc++);
e17a36ce
FB
2282 base = (code & 7);
2283 }
3b46e624 2284
e17a36ce
FB
2285 switch (mod) {
2286 case 0:
2287 if (base == 5) {
2288 s->pc += 4;
2289 }
2290 break;
2291 case 1:
2292 s->pc++;
2293 break;
2294 default:
2295 case 2:
2296 s->pc += 4;
2297 break;
2298 }
2299 } else {
2300 switch (mod) {
2301 case 0:
2302 if (rm == 6) {
2303 s->pc += 2;
2304 }
2305 break;
2306 case 1:
2307 s->pc++;
2308 break;
2309 default:
2310 case 2:
2311 s->pc += 2;
2312 break;
2313 }
2314 }
2315}
2316
664e0f19
FB
2317/* used for LEA and MOV AX, mem */
2318static void gen_add_A0_ds_seg(DisasContext *s)
2319{
2320 int override, must_add_seg;
2321 must_add_seg = s->addseg;
2322 override = R_DS;
2323 if (s->override >= 0) {
2324 override = s->override;
2325 must_add_seg = 1;
664e0f19
FB
2326 }
2327 if (must_add_seg) {
8f091a59
FB
2328#ifdef TARGET_X86_64
2329 if (CODE64(s)) {
57fec1fe 2330 gen_op_addq_A0_seg(override);
5fafdf24 2331 } else
8f091a59
FB
2332#endif
2333 {
7162ab21 2334 gen_op_addl_A0_seg(s, override);
8f091a59 2335 }
664e0f19
FB
2336 }
2337}
2338
222a3336 2339/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2c0262af 2340 OR_TMP0 */
0af10c86
BS
2341static void gen_ldst_modrm(CPUX86State *env, DisasContext *s, int modrm,
2342 int ot, int reg, int is_store)
2c0262af
FB
2343{
2344 int mod, rm, opreg, disp;
2345
2346 mod = (modrm >> 6) & 3;
14ce26e7 2347 rm = (modrm & 7) | REX_B(s);
2c0262af
FB
2348 if (mod == 3) {
2349 if (is_store) {
2350 if (reg != OR_TMP0)
57fec1fe
FB
2351 gen_op_mov_TN_reg(ot, 0, reg);
2352 gen_op_mov_reg_T0(ot, rm);
2c0262af 2353 } else {
57fec1fe 2354 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af 2355 if (reg != OR_TMP0)
57fec1fe 2356 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
2357 }
2358 } else {
0af10c86 2359 gen_lea_modrm(env, s, modrm, &opreg, &disp);
2c0262af
FB
2360 if (is_store) {
2361 if (reg != OR_TMP0)
57fec1fe
FB
2362 gen_op_mov_TN_reg(ot, 0, reg);
2363 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 2364 } else {
57fec1fe 2365 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 2366 if (reg != OR_TMP0)
57fec1fe 2367 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
2368 }
2369 }
2370}
2371
0af10c86 2372static inline uint32_t insn_get(CPUX86State *env, DisasContext *s, int ot)
2c0262af
FB
2373{
2374 uint32_t ret;
2375
2376 switch(ot) {
2377 case OT_BYTE:
0af10c86 2378 ret = cpu_ldub_code(env, s->pc);
2c0262af
FB
2379 s->pc++;
2380 break;
2381 case OT_WORD:
0af10c86 2382 ret = cpu_lduw_code(env, s->pc);
2c0262af
FB
2383 s->pc += 2;
2384 break;
2385 default:
2386 case OT_LONG:
0af10c86 2387 ret = cpu_ldl_code(env, s->pc);
2c0262af
FB
2388 s->pc += 4;
2389 break;
2390 }
2391 return ret;
2392}
2393
14ce26e7
FB
2394static inline int insn_const_size(unsigned int ot)
2395{
2396 if (ot <= OT_LONG)
2397 return 1 << ot;
2398 else
2399 return 4;
2400}
2401
6e256c93
FB
2402static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2403{
2404 TranslationBlock *tb;
2405 target_ulong pc;
2406
2407 pc = s->cs_base + eip;
2408 tb = s->tb;
2409 /* NOTE: we handle the case where the TB spans two pages here */
2410 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2411 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2412 /* jump to same page: we can use a direct jump */
57fec1fe 2413 tcg_gen_goto_tb(tb_num);
6e256c93 2414 gen_jmp_im(eip);
4b4a72e5 2415 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
6e256c93
FB
2416 } else {
2417 /* jump to another page: currently not optimized */
2418 gen_jmp_im(eip);
2419 gen_eob(s);
2420 }
2421}
2422
5fafdf24 2423static inline void gen_jcc(DisasContext *s, int b,
14ce26e7 2424 target_ulong val, target_ulong next_eip)
2c0262af 2425{
b27fc131 2426 int l1, l2;
3b46e624 2427
2c0262af 2428 if (s->jmp_opt) {
14ce26e7 2429 l1 = gen_new_label();
b27fc131 2430 gen_jcc1(s, b, l1);
dc259201 2431
6e256c93 2432 gen_goto_tb(s, 0, next_eip);
14ce26e7
FB
2433
2434 gen_set_label(l1);
6e256c93 2435 gen_goto_tb(s, 1, val);
5779406a 2436 s->is_jmp = DISAS_TB_JUMP;
2c0262af 2437 } else {
14ce26e7
FB
2438 l1 = gen_new_label();
2439 l2 = gen_new_label();
b27fc131 2440 gen_jcc1(s, b, l1);
8e1c85e3 2441
14ce26e7 2442 gen_jmp_im(next_eip);
8e1c85e3
FB
2443 tcg_gen_br(l2);
2444
14ce26e7
FB
2445 gen_set_label(l1);
2446 gen_jmp_im(val);
2447 gen_set_label(l2);
2c0262af
FB
2448 gen_eob(s);
2449 }
2450}
2451
f32d3781
PB
2452static void gen_cmovcc1(CPUX86State *env, DisasContext *s, int ot, int b,
2453 int modrm, int reg)
2454{
57eb0cc8 2455 CCPrepare cc;
f32d3781 2456
57eb0cc8 2457 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
f32d3781 2458
57eb0cc8
RH
2459 cc = gen_prepare_cc(s, b, cpu_T[1]);
2460 if (cc.mask != -1) {
2461 TCGv t0 = tcg_temp_new();
2462 tcg_gen_andi_tl(t0, cc.reg, cc.mask);
2463 cc.reg = t0;
2464 }
2465 if (!cc.use_reg2) {
2466 cc.reg2 = tcg_const_tl(cc.imm);
f32d3781
PB
2467 }
2468
57eb0cc8
RH
2469 tcg_gen_movcond_tl(cc.cond, cpu_T[0], cc.reg, cc.reg2,
2470 cpu_T[0], cpu_regs[reg]);
2471 gen_op_mov_reg_T0(ot, reg);
2472
2473 if (cc.mask != -1) {
2474 tcg_temp_free(cc.reg);
2475 }
2476 if (!cc.use_reg2) {
2477 tcg_temp_free(cc.reg2);
2478 }
f32d3781
PB
2479}
2480
3bd7da9e
FB
2481static inline void gen_op_movl_T0_seg(int seg_reg)
2482{
2483 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2484 offsetof(CPUX86State,segs[seg_reg].selector));
2485}
2486
2487static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2488{
2489 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2490 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2491 offsetof(CPUX86State,segs[seg_reg].selector));
2492 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2493 tcg_gen_st_tl(cpu_T[0], cpu_env,
2494 offsetof(CPUX86State,segs[seg_reg].base));
2495}
2496
2c0262af
FB
2497/* move T0 to seg_reg and compute if the CPU state may change. Never
2498 call this function with seg_reg == R_CS */
14ce26e7 2499static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2c0262af 2500{
3415a4dd
FB
2501 if (s->pe && !s->vm86) {
2502 /* XXX: optimize by finding processor state dynamically */
773cdfcc 2503 gen_update_cc_op(s);
14ce26e7 2504 gen_jmp_im(cur_eip);
b6abf97d 2505 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2 2506 gen_helper_load_seg(cpu_env, tcg_const_i32(seg_reg), cpu_tmp2_i32);
dc196a57
FB
2507 /* abort translation because the addseg value may change or
2508 because ss32 may change. For R_SS, translation must always
2509 stop as a special handling must be done to disable hardware
2510 interrupts for the next instruction */
2511 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
5779406a 2512 s->is_jmp = DISAS_TB_JUMP;
3415a4dd 2513 } else {
3bd7da9e 2514 gen_op_movl_seg_T0_vm(seg_reg);
dc196a57 2515 if (seg_reg == R_SS)
5779406a 2516 s->is_jmp = DISAS_TB_JUMP;
3415a4dd 2517 }
2c0262af
FB
2518}
2519
0573fbfc
TS
2520static inline int svm_is_rep(int prefixes)
2521{
2522 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2523}
2524
872929aa 2525static inline void
0573fbfc 2526gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
b8b6a50b 2527 uint32_t type, uint64_t param)
0573fbfc 2528{
872929aa
FB
2529 /* no SVM activated; fast case */
2530 if (likely(!(s->flags & HF_SVMI_MASK)))
2531 return;
773cdfcc 2532 gen_update_cc_op(s);
872929aa 2533 gen_jmp_im(pc_start - s->cs_base);
052e80d5 2534 gen_helper_svm_check_intercept_param(cpu_env, tcg_const_i32(type),
a7812ae4 2535 tcg_const_i64(param));
0573fbfc
TS
2536}
2537
872929aa 2538static inline void
0573fbfc
TS
2539gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2540{
872929aa 2541 gen_svm_check_intercept_param(s, pc_start, type, 0);
0573fbfc
TS
2542}
2543
4f31916f
FB
2544static inline void gen_stack_update(DisasContext *s, int addend)
2545{
14ce26e7
FB
2546#ifdef TARGET_X86_64
2547 if (CODE64(s)) {
6e0d8677 2548 gen_op_add_reg_im(2, R_ESP, addend);
14ce26e7
FB
2549 } else
2550#endif
4f31916f 2551 if (s->ss32) {
6e0d8677 2552 gen_op_add_reg_im(1, R_ESP, addend);
4f31916f 2553 } else {
6e0d8677 2554 gen_op_add_reg_im(0, R_ESP, addend);
4f31916f
FB
2555 }
2556}
2557
2c0262af
FB
2558/* generate a push. It depends on ss32, addseg and dflag */
2559static void gen_push_T0(DisasContext *s)
2560{
14ce26e7
FB
2561#ifdef TARGET_X86_64
2562 if (CODE64(s)) {
57fec1fe 2563 gen_op_movq_A0_reg(R_ESP);
8f091a59 2564 if (s->dflag) {
57fec1fe
FB
2565 gen_op_addq_A0_im(-8);
2566 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
8f091a59 2567 } else {
57fec1fe
FB
2568 gen_op_addq_A0_im(-2);
2569 gen_op_st_T0_A0(OT_WORD + s->mem_index);
8f091a59 2570 }
57fec1fe 2571 gen_op_mov_reg_A0(2, R_ESP);
5fafdf24 2572 } else
14ce26e7
FB
2573#endif
2574 {
57fec1fe 2575 gen_op_movl_A0_reg(R_ESP);
14ce26e7 2576 if (!s->dflag)
57fec1fe 2577 gen_op_addl_A0_im(-2);
14ce26e7 2578 else
57fec1fe 2579 gen_op_addl_A0_im(-4);
14ce26e7
FB
2580 if (s->ss32) {
2581 if (s->addseg) {
bbf662ee 2582 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
7162ab21 2583 gen_op_addl_A0_seg(s, R_SS);
14ce26e7
FB
2584 }
2585 } else {
2586 gen_op_andl_A0_ffff();
bbf662ee 2587 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
7162ab21 2588 gen_op_addl_A0_seg(s, R_SS);
2c0262af 2589 }
57fec1fe 2590 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
14ce26e7 2591 if (s->ss32 && !s->addseg)
57fec1fe 2592 gen_op_mov_reg_A0(1, R_ESP);
14ce26e7 2593 else
57fec1fe 2594 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2c0262af
FB
2595 }
2596}
2597
4f31916f
FB
2598/* generate a push. It depends on ss32, addseg and dflag */
2599/* slower version for T1, only used for call Ev */
2600static void gen_push_T1(DisasContext *s)
2c0262af 2601{
14ce26e7
FB
2602#ifdef TARGET_X86_64
2603 if (CODE64(s)) {
57fec1fe 2604 gen_op_movq_A0_reg(R_ESP);
8f091a59 2605 if (s->dflag) {
57fec1fe
FB
2606 gen_op_addq_A0_im(-8);
2607 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
8f091a59 2608 } else {
57fec1fe
FB
2609 gen_op_addq_A0_im(-2);
2610 gen_op_st_T0_A0(OT_WORD + s->mem_index);
8f091a59 2611 }
57fec1fe 2612 gen_op_mov_reg_A0(2, R_ESP);
5fafdf24 2613 } else
14ce26e7
FB
2614#endif
2615 {
57fec1fe 2616 gen_op_movl_A0_reg(R_ESP);
14ce26e7 2617 if (!s->dflag)
57fec1fe 2618 gen_op_addl_A0_im(-2);
14ce26e7 2619 else
57fec1fe 2620 gen_op_addl_A0_im(-4);
14ce26e7
FB
2621 if (s->ss32) {
2622 if (s->addseg) {
7162ab21 2623 gen_op_addl_A0_seg(s, R_SS);
14ce26e7
FB
2624 }
2625 } else {
2626 gen_op_andl_A0_ffff();
7162ab21 2627 gen_op_addl_A0_seg(s, R_SS);
2c0262af 2628 }
57fec1fe 2629 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
3b46e624 2630
14ce26e7 2631 if (s->ss32 && !s->addseg)
57fec1fe 2632 gen_op_mov_reg_A0(1, R_ESP);
14ce26e7
FB
2633 else
2634 gen_stack_update(s, (-2) << s->dflag);
2c0262af
FB
2635 }
2636}
2637
4f31916f
FB
2638/* two step pop is necessary for precise exceptions */
2639static void gen_pop_T0(DisasContext *s)
2c0262af 2640{
14ce26e7
FB
2641#ifdef TARGET_X86_64
2642 if (CODE64(s)) {
57fec1fe
FB
2643 gen_op_movq_A0_reg(R_ESP);
2644 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
5fafdf24 2645 } else
14ce26e7
FB
2646#endif
2647 {
57fec1fe 2648 gen_op_movl_A0_reg(R_ESP);
14ce26e7
FB
2649 if (s->ss32) {
2650 if (s->addseg)
7162ab21 2651 gen_op_addl_A0_seg(s, R_SS);
14ce26e7
FB
2652 } else {
2653 gen_op_andl_A0_ffff();
7162ab21 2654 gen_op_addl_A0_seg(s, R_SS);
14ce26e7 2655 }
57fec1fe 2656 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2c0262af
FB
2657 }
2658}
2659
2660static void gen_pop_update(DisasContext *s)
2661{
14ce26e7 2662#ifdef TARGET_X86_64
8f091a59 2663 if (CODE64(s) && s->dflag) {
14ce26e7
FB
2664 gen_stack_update(s, 8);
2665 } else
2666#endif
2667 {
2668 gen_stack_update(s, 2 << s->dflag);
2669 }
2c0262af
FB
2670}
2671
2672static void gen_stack_A0(DisasContext *s)
2673{
57fec1fe 2674 gen_op_movl_A0_reg(R_ESP);
2c0262af
FB
2675 if (!s->ss32)
2676 gen_op_andl_A0_ffff();
bbf662ee 2677 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2c0262af 2678 if (s->addseg)
7162ab21 2679 gen_op_addl_A0_seg(s, R_SS);
2c0262af
FB
2680}
2681
2682/* NOTE: wrap around in 16 bit not fully handled */
2683static void gen_pusha(DisasContext *s)
2684{
2685 int i;
57fec1fe 2686 gen_op_movl_A0_reg(R_ESP);
2c0262af
FB
2687 gen_op_addl_A0_im(-16 << s->dflag);
2688 if (!s->ss32)
2689 gen_op_andl_A0_ffff();
bbf662ee 2690 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2c0262af 2691 if (s->addseg)
7162ab21 2692 gen_op_addl_A0_seg(s, R_SS);
2c0262af 2693 for(i = 0;i < 8; i++) {
57fec1fe
FB
2694 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2695 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2c0262af
FB
2696 gen_op_addl_A0_im(2 << s->dflag);
2697 }
57fec1fe 2698 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2c0262af
FB
2699}
2700
2701/* NOTE: wrap around in 16 bit not fully handled */
2702static void gen_popa(DisasContext *s)
2703{
2704 int i;
57fec1fe 2705 gen_op_movl_A0_reg(R_ESP);
2c0262af
FB
2706 if (!s->ss32)
2707 gen_op_andl_A0_ffff();
bbf662ee
FB
2708 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2709 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2c0262af 2710 if (s->addseg)
7162ab21 2711 gen_op_addl_A0_seg(s, R_SS);
2c0262af
FB
2712 for(i = 0;i < 8; i++) {
2713 /* ESP is not reloaded */
2714 if (i != 3) {
57fec1fe
FB
2715 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2716 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2c0262af
FB
2717 }
2718 gen_op_addl_A0_im(2 << s->dflag);
2719 }
57fec1fe 2720 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2c0262af
FB
2721}
2722
2c0262af
FB
2723static void gen_enter(DisasContext *s, int esp_addend, int level)
2724{
61a8c4ec 2725 int ot, opsize;
2c0262af 2726
2c0262af 2727 level &= 0x1f;
8f091a59
FB
2728#ifdef TARGET_X86_64
2729 if (CODE64(s)) {
2730 ot = s->dflag ? OT_QUAD : OT_WORD;
2731 opsize = 1 << ot;
3b46e624 2732
57fec1fe 2733 gen_op_movl_A0_reg(R_ESP);
8f091a59 2734 gen_op_addq_A0_im(-opsize);
bbf662ee 2735 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
8f091a59
FB
2736
2737 /* push bp */
57fec1fe
FB
2738 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2739 gen_op_st_T0_A0(ot + s->mem_index);
8f091a59 2740 if (level) {
b5b38f61 2741 /* XXX: must save state */
2999a0b2 2742 gen_helper_enter64_level(cpu_env, tcg_const_i32(level),
a7812ae4
PB
2743 tcg_const_i32((ot == OT_QUAD)),
2744 cpu_T[1]);
8f091a59 2745 }
57fec1fe 2746 gen_op_mov_reg_T1(ot, R_EBP);
bbf662ee 2747 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
57fec1fe 2748 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
5fafdf24 2749 } else
8f091a59
FB
2750#endif
2751 {
2752 ot = s->dflag + OT_WORD;
2753 opsize = 2 << s->dflag;
3b46e624 2754
57fec1fe 2755 gen_op_movl_A0_reg(R_ESP);
8f091a59
FB
2756 gen_op_addl_A0_im(-opsize);
2757 if (!s->ss32)
2758 gen_op_andl_A0_ffff();
bbf662ee 2759 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
8f091a59 2760 if (s->addseg)
7162ab21 2761 gen_op_addl_A0_seg(s, R_SS);
8f091a59 2762 /* push bp */
57fec1fe
FB
2763 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2764 gen_op_st_T0_A0(ot + s->mem_index);
8f091a59 2765 if (level) {
b5b38f61 2766 /* XXX: must save state */
2999a0b2 2767 gen_helper_enter_level(cpu_env, tcg_const_i32(level),
a7812ae4
PB
2768 tcg_const_i32(s->dflag),
2769 cpu_T[1]);
8f091a59 2770 }
57fec1fe 2771 gen_op_mov_reg_T1(ot, R_EBP);
bbf662ee 2772 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
57fec1fe 2773 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2c0262af 2774 }
2c0262af
FB
2775}
2776
14ce26e7 2777static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2c0262af 2778{
773cdfcc 2779 gen_update_cc_op(s);
14ce26e7 2780 gen_jmp_im(cur_eip);
77b2bc2c 2781 gen_helper_raise_exception(cpu_env, tcg_const_i32(trapno));
5779406a 2782 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
2783}
2784
2785/* an interrupt is different from an exception because of the
7f75ffd3 2786 privilege checks */
5fafdf24 2787static void gen_interrupt(DisasContext *s, int intno,
14ce26e7 2788 target_ulong cur_eip, target_ulong next_eip)
2c0262af 2789{
773cdfcc 2790 gen_update_cc_op(s);
14ce26e7 2791 gen_jmp_im(cur_eip);
77b2bc2c 2792 gen_helper_raise_interrupt(cpu_env, tcg_const_i32(intno),
a7812ae4 2793 tcg_const_i32(next_eip - cur_eip));
5779406a 2794 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
2795}
2796
14ce26e7 2797static void gen_debug(DisasContext *s, target_ulong cur_eip)
2c0262af 2798{
773cdfcc 2799 gen_update_cc_op(s);
14ce26e7 2800 gen_jmp_im(cur_eip);
4a7443be 2801 gen_helper_debug(cpu_env);
5779406a 2802 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
2803}
2804
2805/* generate a generic end of block. Trace exception is also generated
2806 if needed */
2807static void gen_eob(DisasContext *s)
2808{
773cdfcc 2809 gen_update_cc_op(s);
a2cc3b24 2810 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
f0967a1a 2811 gen_helper_reset_inhibit_irq(cpu_env);
a2cc3b24 2812 }
a2397807 2813 if (s->tb->flags & HF_RF_MASK) {
f0967a1a 2814 gen_helper_reset_rf(cpu_env);
a2397807 2815 }
34865134 2816 if (s->singlestep_enabled) {
4a7443be 2817 gen_helper_debug(cpu_env);
34865134 2818 } else if (s->tf) {
4a7443be 2819 gen_helper_single_step(cpu_env);
2c0262af 2820 } else {
57fec1fe 2821 tcg_gen_exit_tb(0);
2c0262af 2822 }
5779406a 2823 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
2824}
2825
2826/* generate a jump to eip. No segment change must happen before as a
2827 direct call to the next block may occur */
14ce26e7 2828static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2c0262af 2829{
a3251186
RH
2830 gen_update_cc_op(s);
2831 set_cc_op(s, CC_OP_DYNAMIC);
2c0262af 2832 if (s->jmp_opt) {
6e256c93 2833 gen_goto_tb(s, tb_num, eip);
5779406a 2834 s->is_jmp = DISAS_TB_JUMP;
2c0262af 2835 } else {
14ce26e7 2836 gen_jmp_im(eip);
2c0262af
FB
2837 gen_eob(s);
2838 }
2839}
2840
14ce26e7
FB
2841static void gen_jmp(DisasContext *s, target_ulong eip)
2842{
2843 gen_jmp_tb(s, eip, 0);
2844}
2845
8686c490
FB
2846static inline void gen_ldq_env_A0(int idx, int offset)
2847{
2848 int mem_index = (idx >> 2) - 1;
b6abf97d
FB
2849 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2850 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
8686c490 2851}
664e0f19 2852
8686c490
FB
2853static inline void gen_stq_env_A0(int idx, int offset)
2854{
2855 int mem_index = (idx >> 2) - 1;
b6abf97d
FB
2856 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2857 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
8686c490 2858}
664e0f19 2859
8686c490
FB
2860static inline void gen_ldo_env_A0(int idx, int offset)
2861{
2862 int mem_index = (idx >> 2) - 1;
b6abf97d
FB
2863 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2864 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
8686c490 2865 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
b6abf97d
FB
2866 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2867 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
8686c490 2868}
14ce26e7 2869
8686c490
FB
2870static inline void gen_sto_env_A0(int idx, int offset)
2871{
2872 int mem_index = (idx >> 2) - 1;
b6abf97d
FB
2873 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2874 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
8686c490 2875 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
b6abf97d
FB
2876 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2877 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
8686c490 2878}
14ce26e7 2879
5af45186
FB
2880static inline void gen_op_movo(int d_offset, int s_offset)
2881{
b6abf97d
FB
2882 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2883 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2884 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2885 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
5af45186
FB
2886}
2887
2888static inline void gen_op_movq(int d_offset, int s_offset)
2889{
b6abf97d
FB
2890 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2891 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
5af45186
FB
2892}
2893
2894static inline void gen_op_movl(int d_offset, int s_offset)
2895{
b6abf97d
FB
2896 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2897 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
5af45186
FB
2898}
2899
2900static inline void gen_op_movq_env_0(int d_offset)
2901{
b6abf97d
FB
2902 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2903 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
5af45186 2904}
664e0f19 2905
d3eb5eae
BS
2906typedef void (*SSEFunc_i_ep)(TCGv_i32 val, TCGv_ptr env, TCGv_ptr reg);
2907typedef void (*SSEFunc_l_ep)(TCGv_i64 val, TCGv_ptr env, TCGv_ptr reg);
2908typedef void (*SSEFunc_0_epi)(TCGv_ptr env, TCGv_ptr reg, TCGv_i32 val);
2909typedef void (*SSEFunc_0_epl)(TCGv_ptr env, TCGv_ptr reg, TCGv_i64 val);
2910typedef void (*SSEFunc_0_epp)(TCGv_ptr env, TCGv_ptr reg_a, TCGv_ptr reg_b);
2911typedef void (*SSEFunc_0_eppi)(TCGv_ptr env, TCGv_ptr reg_a, TCGv_ptr reg_b,
2912 TCGv_i32 val);
c4baa050 2913typedef void (*SSEFunc_0_ppi)(TCGv_ptr reg_a, TCGv_ptr reg_b, TCGv_i32 val);
d3eb5eae
BS
2914typedef void (*SSEFunc_0_eppt)(TCGv_ptr env, TCGv_ptr reg_a, TCGv_ptr reg_b,
2915 TCGv val);
c4baa050 2916
5af45186
FB
2917#define SSE_SPECIAL ((void *)1)
2918#define SSE_DUMMY ((void *)2)
664e0f19 2919
a7812ae4
PB
2920#define MMX_OP2(x) { gen_helper_ ## x ## _mmx, gen_helper_ ## x ## _xmm }
2921#define SSE_FOP(x) { gen_helper_ ## x ## ps, gen_helper_ ## x ## pd, \
2922 gen_helper_ ## x ## ss, gen_helper_ ## x ## sd, }
5af45186 2923
d3eb5eae 2924static const SSEFunc_0_epp sse_op_table1[256][4] = {
a35f3ec7
AJ
2925 /* 3DNow! extensions */
2926 [0x0e] = { SSE_DUMMY }, /* femms */
2927 [0x0f] = { SSE_DUMMY }, /* pf... */
664e0f19
FB
2928 /* pure SSE operations */
2929 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2930 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
465e9838 2931 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
664e0f19 2932 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
a7812ae4
PB
2933 [0x14] = { gen_helper_punpckldq_xmm, gen_helper_punpcklqdq_xmm },
2934 [0x15] = { gen_helper_punpckhdq_xmm, gen_helper_punpckhqdq_xmm },
664e0f19
FB
2935 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2936 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2937
2938 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2939 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2940 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
d9f4bb27 2941 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd, movntss, movntsd */
664e0f19
FB
2942 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2943 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
a7812ae4
PB
2944 [0x2e] = { gen_helper_ucomiss, gen_helper_ucomisd },
2945 [0x2f] = { gen_helper_comiss, gen_helper_comisd },
664e0f19
FB
2946 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2947 [0x51] = SSE_FOP(sqrt),
a7812ae4
PB
2948 [0x52] = { gen_helper_rsqrtps, NULL, gen_helper_rsqrtss, NULL },
2949 [0x53] = { gen_helper_rcpps, NULL, gen_helper_rcpss, NULL },
2950 [0x54] = { gen_helper_pand_xmm, gen_helper_pand_xmm }, /* andps, andpd */
2951 [0x55] = { gen_helper_pandn_xmm, gen_helper_pandn_xmm }, /* andnps, andnpd */
2952 [0x56] = { gen_helper_por_xmm, gen_helper_por_xmm }, /* orps, orpd */
2953 [0x57] = { gen_helper_pxor_xmm, gen_helper_pxor_xmm }, /* xorps, xorpd */
664e0f19
FB
2954 [0x58] = SSE_FOP(add),
2955 [0x59] = SSE_FOP(mul),
a7812ae4
PB
2956 [0x5a] = { gen_helper_cvtps2pd, gen_helper_cvtpd2ps,
2957 gen_helper_cvtss2sd, gen_helper_cvtsd2ss },
2958 [0x5b] = { gen_helper_cvtdq2ps, gen_helper_cvtps2dq, gen_helper_cvttps2dq },
664e0f19
FB
2959 [0x5c] = SSE_FOP(sub),
2960 [0x5d] = SSE_FOP(min),
2961 [0x5e] = SSE_FOP(div),
2962 [0x5f] = SSE_FOP(max),
2963
2964 [0xc2] = SSE_FOP(cmpeq),
d3eb5eae
BS
2965 [0xc6] = { (SSEFunc_0_epp)gen_helper_shufps,
2966 (SSEFunc_0_epp)gen_helper_shufpd }, /* XXX: casts */
664e0f19 2967
7073fbad
RH
2968 /* SSSE3, SSE4, MOVBE, CRC32, BMI1, BMI2, ADX. */
2969 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2970 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
4242b1bd 2971
664e0f19
FB
2972 /* MMX ops and their SSE extensions */
2973 [0x60] = MMX_OP2(punpcklbw),
2974 [0x61] = MMX_OP2(punpcklwd),
2975 [0x62] = MMX_OP2(punpckldq),
2976 [0x63] = MMX_OP2(packsswb),
2977 [0x64] = MMX_OP2(pcmpgtb),
2978 [0x65] = MMX_OP2(pcmpgtw),
2979 [0x66] = MMX_OP2(pcmpgtl),
2980 [0x67] = MMX_OP2(packuswb),
2981 [0x68] = MMX_OP2(punpckhbw),
2982 [0x69] = MMX_OP2(punpckhwd),
2983 [0x6a] = MMX_OP2(punpckhdq),
2984 [0x6b] = MMX_OP2(packssdw),
a7812ae4
PB
2985 [0x6c] = { NULL, gen_helper_punpcklqdq_xmm },
2986 [0x6d] = { NULL, gen_helper_punpckhqdq_xmm },
664e0f19
FB
2987 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2988 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
d3eb5eae
BS
2989 [0x70] = { (SSEFunc_0_epp)gen_helper_pshufw_mmx,
2990 (SSEFunc_0_epp)gen_helper_pshufd_xmm,
2991 (SSEFunc_0_epp)gen_helper_pshufhw_xmm,
2992 (SSEFunc_0_epp)gen_helper_pshuflw_xmm }, /* XXX: casts */
664e0f19
FB
2993 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2994 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2995 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2996 [0x74] = MMX_OP2(pcmpeqb),
2997 [0x75] = MMX_OP2(pcmpeqw),
2998 [0x76] = MMX_OP2(pcmpeql),
a35f3ec7 2999 [0x77] = { SSE_DUMMY }, /* emms */
d9f4bb27
AP
3000 [0x78] = { NULL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* extrq_i, insertq_i */
3001 [0x79] = { NULL, gen_helper_extrq_r, NULL, gen_helper_insertq_r },
a7812ae4
PB
3002 [0x7c] = { NULL, gen_helper_haddpd, NULL, gen_helper_haddps },
3003 [0x7d] = { NULL, gen_helper_hsubpd, NULL, gen_helper_hsubps },
664e0f19
FB
3004 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3005 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3006 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3007 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
a7812ae4 3008 [0xd0] = { NULL, gen_helper_addsubpd, NULL, gen_helper_addsubps },
664e0f19
FB
3009 [0xd1] = MMX_OP2(psrlw),
3010 [0xd2] = MMX_OP2(psrld),
3011 [0xd3] = MMX_OP2(psrlq),
3012 [0xd4] = MMX_OP2(paddq),
3013 [0xd5] = MMX_OP2(pmullw),
3014 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3015 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3016 [0xd8] = MMX_OP2(psubusb),
3017 [0xd9] = MMX_OP2(psubusw),
3018 [0xda] = MMX_OP2(pminub),
3019 [0xdb] = MMX_OP2(pand),
3020 [0xdc] = MMX_OP2(paddusb),
3021 [0xdd] = MMX_OP2(paddusw),
3022 [0xde] = MMX_OP2(pmaxub),
3023 [0xdf] = MMX_OP2(pandn),
3024 [0xe0] = MMX_OP2(pavgb),
3025 [0xe1] = MMX_OP2(psraw),
3026 [0xe2] = MMX_OP2(psrad),
3027 [0xe3] = MMX_OP2(pavgw),
3028 [0xe4] = MMX_OP2(pmulhuw),
3029 [0xe5] = MMX_OP2(pmulhw),
a7812ae4 3030 [0xe6] = { NULL, gen_helper_cvttpd2dq, gen_helper_cvtdq2pd, gen_helper_cvtpd2dq },
664e0f19
FB
3031 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3032 [0xe8] = MMX_OP2(psubsb),
3033 [0xe9] = MMX_OP2(psubsw),
3034 [0xea] = MMX_OP2(pminsw),
3035 [0xeb] = MMX_OP2(por),
3036 [0xec] = MMX_OP2(paddsb),
3037 [0xed] = MMX_OP2(paddsw),
3038 [0xee] = MMX_OP2(pmaxsw),
3039 [0xef] = MMX_OP2(pxor),
465e9838 3040 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
664e0f19
FB
3041 [0xf1] = MMX_OP2(psllw),
3042 [0xf2] = MMX_OP2(pslld),
3043 [0xf3] = MMX_OP2(psllq),
3044 [0xf4] = MMX_OP2(pmuludq),
3045 [0xf5] = MMX_OP2(pmaddwd),
3046 [0xf6] = MMX_OP2(psadbw),
d3eb5eae
BS
3047 [0xf7] = { (SSEFunc_0_epp)gen_helper_maskmov_mmx,
3048 (SSEFunc_0_epp)gen_helper_maskmov_xmm }, /* XXX: casts */
664e0f19
FB
3049 [0xf8] = MMX_OP2(psubb),
3050 [0xf9] = MMX_OP2(psubw),
3051 [0xfa] = MMX_OP2(psubl),
3052 [0xfb] = MMX_OP2(psubq),
3053 [0xfc] = MMX_OP2(paddb),
3054 [0xfd] = MMX_OP2(paddw),
3055 [0xfe] = MMX_OP2(paddl),
3056};
3057
d3eb5eae 3058static const SSEFunc_0_epp sse_op_table2[3 * 8][2] = {
664e0f19
FB
3059 [0 + 2] = MMX_OP2(psrlw),
3060 [0 + 4] = MMX_OP2(psraw),
3061 [0 + 6] = MMX_OP2(psllw),
3062 [8 + 2] = MMX_OP2(psrld),
3063 [8 + 4] = MMX_OP2(psrad),
3064 [8 + 6] = MMX_OP2(pslld),
3065 [16 + 2] = MMX_OP2(psrlq),
a7812ae4 3066 [16 + 3] = { NULL, gen_helper_psrldq_xmm },
664e0f19 3067 [16 + 6] = MMX_OP2(psllq),
a7812ae4 3068 [16 + 7] = { NULL, gen_helper_pslldq_xmm },
664e0f19
FB
3069};
3070
d3eb5eae 3071static const SSEFunc_0_epi sse_op_table3ai[] = {
a7812ae4 3072 gen_helper_cvtsi2ss,
11f8cdbc 3073 gen_helper_cvtsi2sd
c4baa050 3074};
a7812ae4 3075
11f8cdbc 3076#ifdef TARGET_X86_64
d3eb5eae 3077static const SSEFunc_0_epl sse_op_table3aq[] = {
11f8cdbc
SW
3078 gen_helper_cvtsq2ss,
3079 gen_helper_cvtsq2sd
3080};
3081#endif
3082
d3eb5eae 3083static const SSEFunc_i_ep sse_op_table3bi[] = {
a7812ae4 3084 gen_helper_cvttss2si,
a7812ae4 3085 gen_helper_cvtss2si,
bedc2ac1 3086 gen_helper_cvttsd2si,
11f8cdbc 3087 gen_helper_cvtsd2si
664e0f19 3088};
3b46e624 3089
11f8cdbc 3090#ifdef TARGET_X86_64
d3eb5eae 3091static const SSEFunc_l_ep sse_op_table3bq[] = {
11f8cdbc 3092 gen_helper_cvttss2sq,
11f8cdbc 3093 gen_helper_cvtss2sq,
bedc2ac1 3094 gen_helper_cvttsd2sq,
11f8cdbc
SW
3095 gen_helper_cvtsd2sq
3096};
3097#endif
3098
d3eb5eae 3099static const SSEFunc_0_epp sse_op_table4[8][4] = {
664e0f19
FB
3100 SSE_FOP(cmpeq),
3101 SSE_FOP(cmplt),
3102 SSE_FOP(cmple),
3103 SSE_FOP(cmpunord),
3104 SSE_FOP(cmpneq),
3105 SSE_FOP(cmpnlt),
3106 SSE_FOP(cmpnle),
3107 SSE_FOP(cmpord),
3108};
3b46e624 3109
d3eb5eae 3110static const SSEFunc_0_epp sse_op_table5[256] = {
a7812ae4
PB
3111 [0x0c] = gen_helper_pi2fw,
3112 [0x0d] = gen_helper_pi2fd,
3113 [0x1c] = gen_helper_pf2iw,
3114 [0x1d] = gen_helper_pf2id,
3115 [0x8a] = gen_helper_pfnacc,
3116 [0x8e] = gen_helper_pfpnacc,
3117 [0x90] = gen_helper_pfcmpge,
3118 [0x94] = gen_helper_pfmin,
3119 [0x96] = gen_helper_pfrcp,
3120 [0x97] = gen_helper_pfrsqrt,
3121 [0x9a] = gen_helper_pfsub,
3122 [0x9e] = gen_helper_pfadd,
3123 [0xa0] = gen_helper_pfcmpgt,
3124 [0xa4] = gen_helper_pfmax,
3125 [0xa6] = gen_helper_movq, /* pfrcpit1; no need to actually increase precision */
3126 [0xa7] = gen_helper_movq, /* pfrsqit1 */
3127 [0xaa] = gen_helper_pfsubr,
3128 [0xae] = gen_helper_pfacc,
3129 [0xb0] = gen_helper_pfcmpeq,
3130 [0xb4] = gen_helper_pfmul,
3131 [0xb6] = gen_helper_movq, /* pfrcpit2 */
3132 [0xb7] = gen_helper_pmulhrw_mmx,
3133 [0xbb] = gen_helper_pswapd,
3134 [0xbf] = gen_helper_pavgb_mmx /* pavgusb */
a35f3ec7
AJ
3135};
3136
d3eb5eae
BS
3137struct SSEOpHelper_epp {
3138 SSEFunc_0_epp op[2];
c4baa050
BS
3139 uint32_t ext_mask;
3140};
3141
d3eb5eae
BS
3142struct SSEOpHelper_eppi {
3143 SSEFunc_0_eppi op[2];
c4baa050 3144 uint32_t ext_mask;
222a3336 3145};
c4baa050 3146
222a3336 3147#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
a7812ae4
PB
3148#define SSE41_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3149#define SSE42_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
222a3336 3150#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
e71827bc
AJ
3151#define PCLMULQDQ_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, \
3152 CPUID_EXT_PCLMULQDQ }
d640045a 3153#define AESNI_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_AES }
c4baa050 3154
d3eb5eae 3155static const struct SSEOpHelper_epp sse_op_table6[256] = {
222a3336
AZ
3156 [0x00] = SSSE3_OP(pshufb),
3157 [0x01] = SSSE3_OP(phaddw),
3158 [0x02] = SSSE3_OP(phaddd),
3159 [0x03] = SSSE3_OP(phaddsw),
3160 [0x04] = SSSE3_OP(pmaddubsw),
3161 [0x05] = SSSE3_OP(phsubw),
3162 [0x06] = SSSE3_OP(phsubd),
3163 [0x07] = SSSE3_OP(phsubsw),
3164 [0x08] = SSSE3_OP(psignb),
3165 [0x09] = SSSE3_OP(psignw),
3166 [0x0a] = SSSE3_OP(psignd),
3167 [0x0b] = SSSE3_OP(pmulhrsw),
3168 [0x10] = SSE41_OP(pblendvb),
3169 [0x14] = SSE41_OP(blendvps),
3170 [0x15] = SSE41_OP(blendvpd),
3171 [0x17] = SSE41_OP(ptest),
3172 [0x1c] = SSSE3_OP(pabsb),
3173 [0x1d] = SSSE3_OP(pabsw),
3174 [0x1e] = SSSE3_OP(pabsd),
3175 [0x20] = SSE41_OP(pmovsxbw),
3176 [0x21] = SSE41_OP(pmovsxbd),
3177 [0x22] = SSE41_OP(pmovsxbq),
3178 [0x23] = SSE41_OP(pmovsxwd),
3179 [0x24] = SSE41_OP(pmovsxwq),
3180 [0x25] = SSE41_OP(pmovsxdq),
3181 [0x28] = SSE41_OP(pmuldq),
3182 [0x29] = SSE41_OP(pcmpeqq),
3183 [0x2a] = SSE41_SPECIAL, /* movntqda */
3184 [0x2b] = SSE41_OP(packusdw),
3185 [0x30] = SSE41_OP(pmovzxbw),
3186 [0x31] = SSE41_OP(pmovzxbd),
3187 [0x32] = SSE41_OP(pmovzxbq),
3188 [0x33] = SSE41_OP(pmovzxwd),
3189 [0x34] = SSE41_OP(pmovzxwq),
3190 [0x35] = SSE41_OP(pmovzxdq),
3191 [0x37] = SSE42_OP(pcmpgtq),
3192 [0x38] = SSE41_OP(pminsb),
3193 [0x39] = SSE41_OP(pminsd),
3194 [0x3a] = SSE41_OP(pminuw),
3195 [0x3b] = SSE41_OP(pminud),
3196 [0x3c] = SSE41_OP(pmaxsb),
3197 [0x3d] = SSE41_OP(pmaxsd),
3198 [0x3e] = SSE41_OP(pmaxuw),
3199 [0x3f] = SSE41_OP(pmaxud),
3200 [0x40] = SSE41_OP(pmulld),
3201 [0x41] = SSE41_OP(phminposuw),
d640045a
AJ
3202 [0xdb] = AESNI_OP(aesimc),
3203 [0xdc] = AESNI_OP(aesenc),
3204 [0xdd] = AESNI_OP(aesenclast),
3205 [0xde] = AESNI_OP(aesdec),
3206 [0xdf] = AESNI_OP(aesdeclast),
4242b1bd
AZ
3207};
3208
d3eb5eae 3209static const struct SSEOpHelper_eppi sse_op_table7[256] = {
222a3336
AZ
3210 [0x08] = SSE41_OP(roundps),
3211 [0x09] = SSE41_OP(roundpd),
3212 [0x0a] = SSE41_OP(roundss),
3213 [0x0b] = SSE41_OP(roundsd),
3214 [0x0c] = SSE41_OP(blendps),
3215 [0x0d] = SSE41_OP(blendpd),
3216 [0x0e] = SSE41_OP(pblendw),
3217 [0x0f] = SSSE3_OP(palignr),
3218 [0x14] = SSE41_SPECIAL, /* pextrb */
3219 [0x15] = SSE41_SPECIAL, /* pextrw */
3220 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3221 [0x17] = SSE41_SPECIAL, /* extractps */
3222 [0x20] = SSE41_SPECIAL, /* pinsrb */
3223 [0x21] = SSE41_SPECIAL, /* insertps */
3224 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3225 [0x40] = SSE41_OP(dpps),
3226 [0x41] = SSE41_OP(dppd),
3227 [0x42] = SSE41_OP(mpsadbw),
e71827bc 3228 [0x44] = PCLMULQDQ_OP(pclmulqdq),
222a3336
AZ
3229 [0x60] = SSE42_OP(pcmpestrm),
3230 [0x61] = SSE42_OP(pcmpestri),
3231 [0x62] = SSE42_OP(pcmpistrm),
3232 [0x63] = SSE42_OP(pcmpistri),
d640045a 3233 [0xdf] = AESNI_OP(aeskeygenassist),
4242b1bd
AZ
3234};
3235
0af10c86
BS
3236static void gen_sse(CPUX86State *env, DisasContext *s, int b,
3237 target_ulong pc_start, int rex_r)
664e0f19
FB
3238{
3239 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3240 int modrm, mod, rm, reg, reg_addr, offset_addr;
d3eb5eae
BS
3241 SSEFunc_0_epp sse_fn_epp;
3242 SSEFunc_0_eppi sse_fn_eppi;
c4baa050 3243 SSEFunc_0_ppi sse_fn_ppi;
d3eb5eae 3244 SSEFunc_0_eppt sse_fn_eppt;
664e0f19
FB
3245
3246 b &= 0xff;
5fafdf24 3247 if (s->prefix & PREFIX_DATA)
664e0f19 3248 b1 = 1;
5fafdf24 3249 else if (s->prefix & PREFIX_REPZ)
664e0f19 3250 b1 = 2;
5fafdf24 3251 else if (s->prefix & PREFIX_REPNZ)
664e0f19
FB
3252 b1 = 3;
3253 else
3254 b1 = 0;
d3eb5eae
BS
3255 sse_fn_epp = sse_op_table1[b][b1];
3256 if (!sse_fn_epp) {
664e0f19 3257 goto illegal_op;
c4baa050 3258 }
a35f3ec7 3259 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
664e0f19
FB
3260 is_xmm = 1;
3261 } else {
3262 if (b1 == 0) {
3263 /* MMX case */
3264 is_xmm = 0;
3265 } else {
3266 is_xmm = 1;
3267 }
3268 }
3269 /* simple MMX/SSE operation */
3270 if (s->flags & HF_TS_MASK) {
3271 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3272 return;
3273 }
3274 if (s->flags & HF_EM_MASK) {
3275 illegal_op:
3276 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3277 return;
3278 }
3279 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
4242b1bd
AZ
3280 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3281 goto illegal_op;
e771edab
AJ
3282 if (b == 0x0e) {
3283 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3284 goto illegal_op;
3285 /* femms */
d3eb5eae 3286 gen_helper_emms(cpu_env);
e771edab
AJ
3287 return;
3288 }
3289 if (b == 0x77) {
3290 /* emms */
d3eb5eae 3291 gen_helper_emms(cpu_env);
664e0f19
FB
3292 return;
3293 }
3294 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3295 the static cpu state) */
3296 if (!is_xmm) {
d3eb5eae 3297 gen_helper_enter_mmx(cpu_env);
664e0f19
FB
3298 }
3299
0af10c86 3300 modrm = cpu_ldub_code(env, s->pc++);
664e0f19
FB
3301 reg = ((modrm >> 3) & 7);
3302 if (is_xmm)
3303 reg |= rex_r;
3304 mod = (modrm >> 6) & 3;
d3eb5eae 3305 if (sse_fn_epp == SSE_SPECIAL) {
664e0f19
FB
3306 b |= (b1 << 8);
3307 switch(b) {
3308 case 0x0e7: /* movntq */
5fafdf24 3309 if (mod == 3)
664e0f19 3310 goto illegal_op;
0af10c86 3311 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3312 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
664e0f19
FB
3313 break;
3314 case 0x1e7: /* movntdq */
3315 case 0x02b: /* movntps */
3316 case 0x12b: /* movntps */
2e21e749
T
3317 if (mod == 3)
3318 goto illegal_op;
0af10c86 3319 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2e21e749
T
3320 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3321 break;
465e9838
FB
3322 case 0x3f0: /* lddqu */
3323 if (mod == 3)
664e0f19 3324 goto illegal_op;
0af10c86 3325 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
c2254920 3326 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
664e0f19 3327 break;
d9f4bb27
AP
3328 case 0x22b: /* movntss */
3329 case 0x32b: /* movntsd */
3330 if (mod == 3)
3331 goto illegal_op;
0af10c86 3332 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
d9f4bb27
AP
3333 if (b1 & 1) {
3334 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,
3335 xmm_regs[reg]));
3336 } else {
3337 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3338 xmm_regs[reg].XMM_L(0)));
3339 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3340 }
3341 break;
664e0f19 3342 case 0x6e: /* movd mm, ea */
dabd98dd
FB
3343#ifdef TARGET_X86_64
3344 if (s->dflag == 2) {
0af10c86 3345 gen_ldst_modrm(env, s, modrm, OT_QUAD, OR_TMP0, 0);
5af45186 3346 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
5fafdf24 3347 } else
dabd98dd
FB
3348#endif
3349 {
0af10c86 3350 gen_ldst_modrm(env, s, modrm, OT_LONG, OR_TMP0, 0);
5af45186
FB
3351 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3352 offsetof(CPUX86State,fpregs[reg].mmx));
a7812ae4
PB
3353 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3354 gen_helper_movl_mm_T0_mmx(cpu_ptr0, cpu_tmp2_i32);
dabd98dd 3355 }
664e0f19
FB
3356 break;
3357 case 0x16e: /* movd xmm, ea */
dabd98dd
FB
3358#ifdef TARGET_X86_64
3359 if (s->dflag == 2) {
0af10c86 3360 gen_ldst_modrm(env, s, modrm, OT_QUAD, OR_TMP0, 0);
5af45186
FB
3361 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3362 offsetof(CPUX86State,xmm_regs[reg]));
a7812ae4 3363 gen_helper_movq_mm_T0_xmm(cpu_ptr0, cpu_T[0]);
5fafdf24 3364 } else
dabd98dd
FB
3365#endif
3366 {
0af10c86 3367 gen_ldst_modrm(env, s, modrm, OT_LONG, OR_TMP0, 0);
5af45186
FB
3368 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3369 offsetof(CPUX86State,xmm_regs[reg]));
b6abf97d 3370 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
a7812ae4 3371 gen_helper_movl_mm_T0_xmm(cpu_ptr0, cpu_tmp2_i32);
dabd98dd 3372 }
664e0f19
FB
3373 break;
3374 case 0x6f: /* movq mm, ea */
3375 if (mod != 3) {
0af10c86 3376 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3377 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
664e0f19
FB
3378 } else {
3379 rm = (modrm & 7);
b6abf97d 3380 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
5af45186 3381 offsetof(CPUX86State,fpregs[rm].mmx));
b6abf97d 3382 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
5af45186 3383 offsetof(CPUX86State,fpregs[reg].mmx));
664e0f19
FB
3384 }
3385 break;
3386 case 0x010: /* movups */
3387 case 0x110: /* movupd */
3388 case 0x028: /* movaps */
3389 case 0x128: /* movapd */
3390 case 0x16f: /* movdqa xmm, ea */
3391 case 0x26f: /* movdqu xmm, ea */
3392 if (mod != 3) {
0af10c86 3393 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3394 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
664e0f19
FB
3395 } else {
3396 rm = (modrm & 7) | REX_B(s);
3397 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3398 offsetof(CPUX86State,xmm_regs[rm]));
3399 }
3400 break;
3401 case 0x210: /* movss xmm, ea */
3402 if (mod != 3) {
0af10c86 3403 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 3404 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
651ba608 3405 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
664e0f19 3406 gen_op_movl_T0_0();
651ba608
FB
3407 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3408 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3409 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
664e0f19
FB
3410 } else {
3411 rm = (modrm & 7) | REX_B(s);
3412 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3413 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3414 }
3415 break;
3416 case 0x310: /* movsd xmm, ea */
3417 if (mod != 3) {
0af10c86 3418 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3419 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19 3420 gen_op_movl_T0_0();
651ba608
FB
3421 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3422 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
664e0f19
FB
3423 } else {
3424 rm = (modrm & 7) | REX_B(s);
3425 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3426 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3427 }
3428 break;
3429 case 0x012: /* movlps */
3430 case 0x112: /* movlpd */
3431 if (mod != 3) {
0af10c86 3432 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3433 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3434 } else {
3435 /* movhlps */
3436 rm = (modrm & 7) | REX_B(s);
3437 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3438 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3439 }
3440 break;
465e9838
FB
3441 case 0x212: /* movsldup */
3442 if (mod != 3) {
0af10c86 3443 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3444 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
465e9838
FB
3445 } else {
3446 rm = (modrm & 7) | REX_B(s);
3447 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3448 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3449 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3450 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3451 }
3452 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3453 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3454 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3455 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3456 break;
3457 case 0x312: /* movddup */
3458 if (mod != 3) {
0af10c86 3459 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3460 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
465e9838
FB
3461 } else {
3462 rm = (modrm & 7) | REX_B(s);
3463 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3464 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3465 }
3466 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
ba6526df 3467 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
465e9838 3468 break;
664e0f19
FB
3469 case 0x016: /* movhps */
3470 case 0x116: /* movhpd */
3471 if (mod != 3) {
0af10c86 3472 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3473 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
664e0f19
FB
3474 } else {
3475 /* movlhps */
3476 rm = (modrm & 7) | REX_B(s);
3477 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3478 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3479 }
3480 break;
3481 case 0x216: /* movshdup */
3482 if (mod != 3) {
0af10c86 3483 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3484 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
664e0f19
FB
3485 } else {
3486 rm = (modrm & 7) | REX_B(s);
3487 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3488 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3489 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3490 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3491 }
3492 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3493 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3494 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3495 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3496 break;
d9f4bb27
AP
3497 case 0x178:
3498 case 0x378:
3499 {
3500 int bit_index, field_length;
3501
3502 if (b1 == 1 && reg != 0)
3503 goto illegal_op;
0af10c86
BS
3504 field_length = cpu_ldub_code(env, s->pc++) & 0x3F;
3505 bit_index = cpu_ldub_code(env, s->pc++) & 0x3F;
d9f4bb27
AP
3506 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3507 offsetof(CPUX86State,xmm_regs[reg]));
3508 if (b1 == 1)
d3eb5eae
BS
3509 gen_helper_extrq_i(cpu_env, cpu_ptr0,
3510 tcg_const_i32(bit_index),
3511 tcg_const_i32(field_length));
d9f4bb27 3512 else
d3eb5eae
BS
3513 gen_helper_insertq_i(cpu_env, cpu_ptr0,
3514 tcg_const_i32(bit_index),
3515 tcg_const_i32(field_length));
d9f4bb27
AP
3516 }
3517 break;
664e0f19 3518 case 0x7e: /* movd ea, mm */
dabd98dd
FB
3519#ifdef TARGET_X86_64
3520 if (s->dflag == 2) {
5af45186
FB
3521 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3522 offsetof(CPUX86State,fpregs[reg].mmx));
0af10c86 3523 gen_ldst_modrm(env, s, modrm, OT_QUAD, OR_TMP0, 1);
5fafdf24 3524 } else
dabd98dd
FB
3525#endif
3526 {
5af45186
FB
3527 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3528 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
0af10c86 3529 gen_ldst_modrm(env, s, modrm, OT_LONG, OR_TMP0, 1);
dabd98dd 3530 }
664e0f19
FB
3531 break;
3532 case 0x17e: /* movd ea, xmm */
dabd98dd
FB
3533#ifdef TARGET_X86_64
3534 if (s->dflag == 2) {
5af45186
FB
3535 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3536 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
0af10c86 3537 gen_ldst_modrm(env, s, modrm, OT_QUAD, OR_TMP0, 1);
5fafdf24 3538 } else
dabd98dd
FB
3539#endif
3540 {
5af45186
FB
3541 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3542 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
0af10c86 3543 gen_ldst_modrm(env, s, modrm, OT_LONG, OR_TMP0, 1);
dabd98dd 3544 }
664e0f19
FB
3545 break;
3546 case 0x27e: /* movq xmm, ea */
3547 if (mod != 3) {
0af10c86 3548 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3549 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3550 } else {
3551 rm = (modrm & 7) | REX_B(s);
3552 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3553 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3554 }
3555 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3556 break;
3557 case 0x7f: /* movq ea, mm */
3558 if (mod != 3) {
0af10c86 3559 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3560 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
664e0f19
FB
3561 } else {
3562 rm = (modrm & 7);
3563 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3564 offsetof(CPUX86State,fpregs[reg].mmx));
3565 }
3566 break;
3567 case 0x011: /* movups */
3568 case 0x111: /* movupd */
3569 case 0x029: /* movaps */
3570 case 0x129: /* movapd */
3571 case 0x17f: /* movdqa ea, xmm */
3572 case 0x27f: /* movdqu ea, xmm */
3573 if (mod != 3) {
0af10c86 3574 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3575 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
664e0f19
FB
3576 } else {
3577 rm = (modrm & 7) | REX_B(s);
3578 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3579 offsetof(CPUX86State,xmm_regs[reg]));
3580 }
3581 break;
3582 case 0x211: /* movss ea, xmm */
3583 if (mod != 3) {
0af10c86 3584 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
651ba608 3585 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
57fec1fe 3586 gen_op_st_T0_A0(OT_LONG + s->mem_index);
664e0f19
FB
3587 } else {
3588 rm = (modrm & 7) | REX_B(s);
3589 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3590 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3591 }
3592 break;
3593 case 0x311: /* movsd ea, xmm */
3594 if (mod != 3) {
0af10c86 3595 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3596 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3597 } else {
3598 rm = (modrm & 7) | REX_B(s);
3599 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3600 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3601 }
3602 break;
3603 case 0x013: /* movlps */
3604 case 0x113: /* movlpd */
3605 if (mod != 3) {
0af10c86 3606 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3607 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3608 } else {
3609 goto illegal_op;
3610 }
3611 break;
3612 case 0x017: /* movhps */
3613 case 0x117: /* movhpd */
3614 if (mod != 3) {
0af10c86 3615 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3616 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
664e0f19
FB
3617 } else {
3618 goto illegal_op;
3619 }
3620 break;
3621 case 0x71: /* shift mm, im */
3622 case 0x72:
3623 case 0x73:
3624 case 0x171: /* shift xmm, im */
3625 case 0x172:
3626 case 0x173:
c045af25
AK
3627 if (b1 >= 2) {
3628 goto illegal_op;
3629 }
0af10c86 3630 val = cpu_ldub_code(env, s->pc++);
664e0f19
FB
3631 if (is_xmm) {
3632 gen_op_movl_T0_im(val);
651ba608 3633 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
664e0f19 3634 gen_op_movl_T0_0();
651ba608 3635 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
664e0f19
FB
3636 op1_offset = offsetof(CPUX86State,xmm_t0);
3637 } else {
3638 gen_op_movl_T0_im(val);
651ba608 3639 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
664e0f19 3640 gen_op_movl_T0_0();
651ba608 3641 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
664e0f19
FB
3642 op1_offset = offsetof(CPUX86State,mmx_t0);
3643 }
d3eb5eae
BS
3644 sse_fn_epp = sse_op_table2[((b - 1) & 3) * 8 +
3645 (((modrm >> 3)) & 7)][b1];
3646 if (!sse_fn_epp) {
664e0f19 3647 goto illegal_op;
c4baa050 3648 }
664e0f19
FB
3649 if (is_xmm) {
3650 rm = (modrm & 7) | REX_B(s);
3651 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3652 } else {
3653 rm = (modrm & 7);
3654 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3655 }
5af45186
FB
3656 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3657 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
d3eb5eae 3658 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3659 break;
3660 case 0x050: /* movmskps */
664e0f19 3661 rm = (modrm & 7) | REX_B(s);
5af45186
FB
3662 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3663 offsetof(CPUX86State,xmm_regs[rm]));
d3eb5eae 3664 gen_helper_movmskps(cpu_tmp2_i32, cpu_env, cpu_ptr0);
b6abf97d 3665 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
57fec1fe 3666 gen_op_mov_reg_T0(OT_LONG, reg);
664e0f19
FB
3667 break;
3668 case 0x150: /* movmskpd */
664e0f19 3669 rm = (modrm & 7) | REX_B(s);
5af45186
FB
3670 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3671 offsetof(CPUX86State,xmm_regs[rm]));
d3eb5eae 3672 gen_helper_movmskpd(cpu_tmp2_i32, cpu_env, cpu_ptr0);
b6abf97d 3673 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
57fec1fe 3674 gen_op_mov_reg_T0(OT_LONG, reg);
664e0f19
FB
3675 break;
3676 case 0x02a: /* cvtpi2ps */
3677 case 0x12a: /* cvtpi2pd */
d3eb5eae 3678 gen_helper_enter_mmx(cpu_env);
664e0f19 3679 if (mod != 3) {
0af10c86 3680 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 3681 op2_offset = offsetof(CPUX86State,mmx_t0);
8686c490 3682 gen_ldq_env_A0(s->mem_index, op2_offset);
664e0f19
FB
3683 } else {
3684 rm = (modrm & 7);
3685 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3686 }
3687 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
5af45186
FB
3688 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3689 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
664e0f19
FB
3690 switch(b >> 8) {
3691 case 0x0:
d3eb5eae 3692 gen_helper_cvtpi2ps(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3693 break;
3694 default:
3695 case 0x1:
d3eb5eae 3696 gen_helper_cvtpi2pd(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3697 break;
3698 }
3699 break;
3700 case 0x22a: /* cvtsi2ss */
3701 case 0x32a: /* cvtsi2sd */
3702 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
0af10c86 3703 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
664e0f19 3704 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
5af45186 3705 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
28e10711 3706 if (ot == OT_LONG) {
d3eb5eae 3707 SSEFunc_0_epi sse_fn_epi = sse_op_table3ai[(b >> 8) & 1];
28e10711 3708 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 3709 sse_fn_epi(cpu_env, cpu_ptr0, cpu_tmp2_i32);
28e10711 3710 } else {
11f8cdbc 3711#ifdef TARGET_X86_64
d3eb5eae
BS
3712 SSEFunc_0_epl sse_fn_epl = sse_op_table3aq[(b >> 8) & 1];
3713 sse_fn_epl(cpu_env, cpu_ptr0, cpu_T[0]);
11f8cdbc
SW
3714#else
3715 goto illegal_op;
3716#endif
28e10711 3717 }
664e0f19
FB
3718 break;
3719 case 0x02c: /* cvttps2pi */
3720 case 0x12c: /* cvttpd2pi */
3721 case 0x02d: /* cvtps2pi */
3722 case 0x12d: /* cvtpd2pi */
d3eb5eae 3723 gen_helper_enter_mmx(cpu_env);
664e0f19 3724 if (mod != 3) {
0af10c86 3725 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 3726 op2_offset = offsetof(CPUX86State,xmm_t0);
8686c490 3727 gen_ldo_env_A0(s->mem_index, op2_offset);
664e0f19
FB
3728 } else {
3729 rm = (modrm & 7) | REX_B(s);
3730 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3731 }
3732 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
5af45186
FB
3733 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3734 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
664e0f19
FB
3735 switch(b) {
3736 case 0x02c:
d3eb5eae 3737 gen_helper_cvttps2pi(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3738 break;
3739 case 0x12c:
d3eb5eae 3740 gen_helper_cvttpd2pi(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3741 break;
3742 case 0x02d:
d3eb5eae 3743 gen_helper_cvtps2pi(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3744 break;
3745 case 0x12d:
d3eb5eae 3746 gen_helper_cvtpd2pi(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3747 break;
3748 }
3749 break;
3750 case 0x22c: /* cvttss2si */
3751 case 0x32c: /* cvttsd2si */
3752 case 0x22d: /* cvtss2si */
3753 case 0x32d: /* cvtsd2si */
3754 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
31313213 3755 if (mod != 3) {
0af10c86 3756 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
31313213 3757 if ((b >> 8) & 1) {
8686c490 3758 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
31313213 3759 } else {
57fec1fe 3760 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
651ba608 3761 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
31313213
FB
3762 }
3763 op2_offset = offsetof(CPUX86State,xmm_t0);
3764 } else {
3765 rm = (modrm & 7) | REX_B(s);
3766 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3767 }
5af45186
FB
3768 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3769 if (ot == OT_LONG) {
d3eb5eae 3770 SSEFunc_i_ep sse_fn_i_ep =
bedc2ac1 3771 sse_op_table3bi[((b >> 7) & 2) | (b & 1)];
d3eb5eae 3772 sse_fn_i_ep(cpu_tmp2_i32, cpu_env, cpu_ptr0);
b6abf97d 3773 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5af45186 3774 } else {
11f8cdbc 3775#ifdef TARGET_X86_64
d3eb5eae 3776 SSEFunc_l_ep sse_fn_l_ep =
bedc2ac1 3777 sse_op_table3bq[((b >> 7) & 2) | (b & 1)];
d3eb5eae 3778 sse_fn_l_ep(cpu_T[0], cpu_env, cpu_ptr0);
11f8cdbc
SW
3779#else
3780 goto illegal_op;
3781#endif
5af45186 3782 }
57fec1fe 3783 gen_op_mov_reg_T0(ot, reg);
664e0f19
FB
3784 break;
3785 case 0xc4: /* pinsrw */
5fafdf24 3786 case 0x1c4:
d1e42c5c 3787 s->rip_offset = 1;
0af10c86
BS
3788 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
3789 val = cpu_ldub_code(env, s->pc++);
664e0f19
FB
3790 if (b1) {
3791 val &= 7;
5af45186
FB
3792 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3793 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
664e0f19
FB
3794 } else {
3795 val &= 3;
5af45186
FB
3796 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3797 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
664e0f19
FB
3798 }
3799 break;
3800 case 0xc5: /* pextrw */
5fafdf24 3801 case 0x1c5:
664e0f19
FB
3802 if (mod != 3)
3803 goto illegal_op;
6dc2d0da 3804 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
0af10c86 3805 val = cpu_ldub_code(env, s->pc++);
664e0f19
FB
3806 if (b1) {
3807 val &= 7;
3808 rm = (modrm & 7) | REX_B(s);
5af45186
FB
3809 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3810 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
664e0f19
FB
3811 } else {
3812 val &= 3;
3813 rm = (modrm & 7);
5af45186
FB
3814 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3815 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
664e0f19
FB
3816 }
3817 reg = ((modrm >> 3) & 7) | rex_r;
6dc2d0da 3818 gen_op_mov_reg_T0(ot, reg);
664e0f19
FB
3819 break;
3820 case 0x1d6: /* movq ea, xmm */
3821 if (mod != 3) {
0af10c86 3822 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3823 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3824 } else {
3825 rm = (modrm & 7) | REX_B(s);
3826 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3827 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3828 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3829 }
3830 break;
3831 case 0x2d6: /* movq2dq */
d3eb5eae 3832 gen_helper_enter_mmx(cpu_env);
480c1cdb
FB
3833 rm = (modrm & 7);
3834 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3835 offsetof(CPUX86State,fpregs[rm].mmx));
3836 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
664e0f19
FB
3837 break;
3838 case 0x3d6: /* movdq2q */
d3eb5eae 3839 gen_helper_enter_mmx(cpu_env);
480c1cdb
FB
3840 rm = (modrm & 7) | REX_B(s);
3841 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3842 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
664e0f19
FB
3843 break;
3844 case 0xd7: /* pmovmskb */
3845 case 0x1d7:
3846 if (mod != 3)
3847 goto illegal_op;
3848 if (b1) {
3849 rm = (modrm & 7) | REX_B(s);
5af45186 3850 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
d3eb5eae 3851 gen_helper_pmovmskb_xmm(cpu_tmp2_i32, cpu_env, cpu_ptr0);
664e0f19
FB
3852 } else {
3853 rm = (modrm & 7);
5af45186 3854 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
d3eb5eae 3855 gen_helper_pmovmskb_mmx(cpu_tmp2_i32, cpu_env, cpu_ptr0);
664e0f19 3856 }
b6abf97d 3857 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
664e0f19 3858 reg = ((modrm >> 3) & 7) | rex_r;
57fec1fe 3859 gen_op_mov_reg_T0(OT_LONG, reg);
664e0f19 3860 break;
111994ee 3861
4242b1bd 3862 case 0x138:
000cacf6 3863 case 0x038:
4242b1bd 3864 b = modrm;
111994ee
RH
3865 if ((b & 0xf0) == 0xf0) {
3866 goto do_0f_38_fx;
3867 }
0af10c86 3868 modrm = cpu_ldub_code(env, s->pc++);
4242b1bd
AZ
3869 rm = modrm & 7;
3870 reg = ((modrm >> 3) & 7) | rex_r;
3871 mod = (modrm >> 6) & 3;
c045af25
AK
3872 if (b1 >= 2) {
3873 goto illegal_op;
3874 }
4242b1bd 3875
d3eb5eae
BS
3876 sse_fn_epp = sse_op_table6[b].op[b1];
3877 if (!sse_fn_epp) {
4242b1bd 3878 goto illegal_op;
c4baa050 3879 }
222a3336
AZ
3880 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
3881 goto illegal_op;
4242b1bd
AZ
3882
3883 if (b1) {
3884 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3885 if (mod == 3) {
3886 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3887 } else {
3888 op2_offset = offsetof(CPUX86State,xmm_t0);
0af10c86 3889 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
222a3336
AZ
3890 switch (b) {
3891 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
3892 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
3893 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
3894 gen_ldq_env_A0(s->mem_index, op2_offset +
3895 offsetof(XMMReg, XMM_Q(0)));
3896 break;
3897 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
3898 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
a7812ae4 3899 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
222a3336 3900 (s->mem_index >> 2) - 1);
a7812ae4 3901 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
222a3336
AZ
3902 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
3903 offsetof(XMMReg, XMM_L(0)));
3904 break;
3905 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
3906 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
3907 (s->mem_index >> 2) - 1);
3908 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
3909 offsetof(XMMReg, XMM_W(0)));
3910 break;
3911 case 0x2a: /* movntqda */
3912 gen_ldo_env_A0(s->mem_index, op1_offset);
3913 return;
3914 default:
3915 gen_ldo_env_A0(s->mem_index, op2_offset);
3916 }
4242b1bd
AZ
3917 }
3918 } else {
3919 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3920 if (mod == 3) {
3921 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3922 } else {
3923 op2_offset = offsetof(CPUX86State,mmx_t0);
0af10c86 3924 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
4242b1bd
AZ
3925 gen_ldq_env_A0(s->mem_index, op2_offset);
3926 }
3927 }
d3eb5eae 3928 if (sse_fn_epp == SSE_SPECIAL) {
222a3336 3929 goto illegal_op;
c4baa050 3930 }
222a3336 3931
4242b1bd
AZ
3932 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3933 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 3934 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
222a3336 3935
3ca51d07
RH
3936 if (b == 0x17) {
3937 set_cc_op(s, CC_OP_EFLAGS);
3938 }
4242b1bd 3939 break;
111994ee
RH
3940
3941 case 0x238:
3942 case 0x338:
3943 do_0f_38_fx:
3944 /* Various integer extensions at 0f 38 f[0-f]. */
3945 b = modrm | (b1 << 8);
0af10c86 3946 modrm = cpu_ldub_code(env, s->pc++);
222a3336
AZ
3947 reg = ((modrm >> 3) & 7) | rex_r;
3948
111994ee
RH
3949 switch (b) {
3950 case 0x3f0: /* crc32 Gd,Eb */
3951 case 0x3f1: /* crc32 Gd,Ey */
3952 do_crc32:
3953 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42)) {
3954 goto illegal_op;
3955 }
3956 if ((b & 0xff) == 0xf0) {
3957 ot = OT_BYTE;
3958 } else if (s->dflag != 2) {
3959 ot = (s->prefix & PREFIX_DATA ? OT_WORD : OT_LONG);
3960 } else {
3961 ot = OT_QUAD;
3962 }
4242b1bd 3963
111994ee
RH
3964 gen_op_mov_TN_reg(OT_LONG, 0, reg);
3965 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3966 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
3967 gen_helper_crc32(cpu_T[0], cpu_tmp2_i32,
3968 cpu_T[0], tcg_const_i32(8 << ot));
222a3336 3969
111994ee
RH
3970 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3971 gen_op_mov_reg_T0(ot, reg);
3972 break;
222a3336 3973
111994ee
RH
3974 case 0x1f0: /* crc32 or movbe */
3975 case 0x1f1:
3976 /* For these insns, the f3 prefix is supposed to have priority
3977 over the 66 prefix, but that's not what we implement above
3978 setting b1. */
3979 if (s->prefix & PREFIX_REPNZ) {
3980 goto do_crc32;
3981 }
3982 /* FALLTHRU */
3983 case 0x0f0: /* movbe Gy,My */
3984 case 0x0f1: /* movbe My,Gy */
3985 if (!(s->cpuid_ext_features & CPUID_EXT_MOVBE)) {
3986 goto illegal_op;
3987 }
3988 if (s->dflag != 2) {
3989 ot = (s->prefix & PREFIX_DATA ? OT_WORD : OT_LONG);
3990 } else {
3991 ot = OT_QUAD;
3992 }
3993
3994 /* Load the data incoming to the bswap. Note that the TCG
3995 implementation of bswap requires the input be zero
3996 extended. In the case of the loads, we simply know that
3997 gen_op_ld_v via gen_ldst_modrm does that already. */
3998 if ((b & 1) == 0) {
3999 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4000 } else {
4001 switch (ot) {
4002 case OT_WORD:
4003 tcg_gen_ext16u_tl(cpu_T[0], cpu_regs[reg]);
4004 break;
4005 default:
4006 tcg_gen_ext32u_tl(cpu_T[0], cpu_regs[reg]);
4007 break;
4008 case OT_QUAD:
4009 tcg_gen_mov_tl(cpu_T[0], cpu_regs[reg]);
4010 break;
4011 }
4012 }
4013
4014 switch (ot) {
4015 case OT_WORD:
4016 tcg_gen_bswap16_tl(cpu_T[0], cpu_T[0]);
4017 break;
4018 default:
4019 tcg_gen_bswap32_tl(cpu_T[0], cpu_T[0]);
4020 break;
4021#ifdef TARGET_X86_64
4022 case OT_QUAD:
4023 tcg_gen_bswap64_tl(cpu_T[0], cpu_T[0]);
4024 break;
4025#endif
4026 }
4027
4028 if ((b & 1) == 0) {
4029 gen_op_mov_reg_T0(ot, reg);
4030 } else {
4031 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
4032 }
4033 break;
4034
7073fbad
RH
4035 case 0x0f2: /* andn Gy, By, Ey */
4036 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI1)
4037 || !(s->prefix & PREFIX_VEX)
4038 || s->vex_l != 0) {
4039 goto illegal_op;
4040 }
4041 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4042 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4043 tcg_gen_andc_tl(cpu_T[0], cpu_regs[s->vex_v], cpu_T[0]);
4044 gen_op_mov_reg_T0(ot, reg);
4045 gen_op_update1_cc();
4046 set_cc_op(s, CC_OP_LOGICB + ot);
4047 break;
4048
c7ab7565
RH
4049 case 0x0f7: /* bextr Gy, Ey, By */
4050 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI1)
4051 || !(s->prefix & PREFIX_VEX)
4052 || s->vex_l != 0) {
4053 goto illegal_op;
4054 }
4055 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4056 {
4057 TCGv bound, zero;
4058
4059 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4060 /* Extract START, and shift the operand.
4061 Shifts larger than operand size get zeros. */
4062 tcg_gen_ext8u_tl(cpu_A0, cpu_regs[s->vex_v]);
4063 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_A0);
4064
4065 bound = tcg_const_tl(ot == OT_QUAD ? 63 : 31);
4066 zero = tcg_const_tl(0);
4067 tcg_gen_movcond_tl(TCG_COND_LEU, cpu_T[0], cpu_A0, bound,
4068 cpu_T[0], zero);
4069 tcg_temp_free(zero);
4070
4071 /* Extract the LEN into a mask. Lengths larger than
4072 operand size get all ones. */
4073 tcg_gen_shri_tl(cpu_A0, cpu_regs[s->vex_v], 8);
4074 tcg_gen_ext8u_tl(cpu_A0, cpu_A0);
4075 tcg_gen_movcond_tl(TCG_COND_LEU, cpu_A0, cpu_A0, bound,
4076 cpu_A0, bound);
4077 tcg_temp_free(bound);
4078 tcg_gen_movi_tl(cpu_T[1], 1);
4079 tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_A0);
4080 tcg_gen_subi_tl(cpu_T[1], cpu_T[1], 1);
4081 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4082
4083 gen_op_mov_reg_T0(ot, reg);
4084 gen_op_update1_cc();
4085 set_cc_op(s, CC_OP_LOGICB + ot);
4086 }
4087 break;
4088
02ea1e6b
RH
4089 case 0x0f5: /* bzhi Gy, Ey, By */
4090 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4091 || !(s->prefix & PREFIX_VEX)
4092 || s->vex_l != 0) {
4093 goto illegal_op;
4094 }
4095 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4096 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4097 tcg_gen_ext8u_tl(cpu_T[1], cpu_regs[s->vex_v]);
4098 {
4099 TCGv bound = tcg_const_tl(ot == OT_QUAD ? 63 : 31);
4100 /* Note that since we're using BMILG (in order to get O
4101 cleared) we need to store the inverse into C. */
4102 tcg_gen_setcond_tl(TCG_COND_LT, cpu_cc_src,
4103 cpu_T[1], bound);
4104 tcg_gen_movcond_tl(TCG_COND_GT, cpu_T[1], cpu_T[1],
4105 bound, bound, cpu_T[1]);
4106 tcg_temp_free(bound);
4107 }
4108 tcg_gen_movi_tl(cpu_A0, -1);
4109 tcg_gen_shl_tl(cpu_A0, cpu_A0, cpu_T[1]);
4110 tcg_gen_andc_tl(cpu_T[0], cpu_T[0], cpu_A0);
4111 gen_op_mov_reg_T0(ot, reg);
4112 gen_op_update1_cc();
4113 set_cc_op(s, CC_OP_BMILGB + ot);
4114 break;
4115
5f1f4b17
RH
4116 case 0x3f6: /* mulx By, Gy, rdx, Ey */
4117 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4118 || !(s->prefix & PREFIX_VEX)
4119 || s->vex_l != 0) {
4120 goto illegal_op;
4121 }
4122 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4123 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4124 switch (ot) {
5f1f4b17 4125 default:
a4bcea3d
RH
4126 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4127 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_regs[R_EDX]);
4128 tcg_gen_mulu2_i32(cpu_tmp2_i32, cpu_tmp3_i32,
4129 cpu_tmp2_i32, cpu_tmp3_i32);
4130 tcg_gen_extu_i32_tl(cpu_regs[s->vex_v], cpu_tmp2_i32);
4131 tcg_gen_extu_i32_tl(cpu_regs[reg], cpu_tmp3_i32);
5f1f4b17
RH
4132 break;
4133#ifdef TARGET_X86_64
4134 case OT_QUAD:
a4bcea3d
RH
4135 tcg_gen_mulu2_i64(cpu_regs[s->vex_v], cpu_regs[reg],
4136 cpu_T[0], cpu_regs[R_EDX]);
5f1f4b17
RH
4137 break;
4138#endif
4139 }
4140 break;
4141
0592f74a
RH
4142 case 0x3f5: /* pdep Gy, By, Ey */
4143 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4144 || !(s->prefix & PREFIX_VEX)
4145 || s->vex_l != 0) {
4146 goto illegal_op;
4147 }
4148 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4149 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4150 /* Note that by zero-extending the mask operand, we
4151 automatically handle zero-extending the result. */
4152 if (s->dflag == 2) {
4153 tcg_gen_mov_tl(cpu_T[1], cpu_regs[s->vex_v]);
4154 } else {
4155 tcg_gen_ext32u_tl(cpu_T[1], cpu_regs[s->vex_v]);
4156 }
4157 gen_helper_pdep(cpu_regs[reg], cpu_T[0], cpu_T[1]);
4158 break;
4159
4160 case 0x2f5: /* pext Gy, By, Ey */
4161 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4162 || !(s->prefix & PREFIX_VEX)
4163 || s->vex_l != 0) {
4164 goto illegal_op;
4165 }
4166 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4167 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4168 /* Note that by zero-extending the mask operand, we
4169 automatically handle zero-extending the result. */
4170 if (s->dflag == 2) {
4171 tcg_gen_mov_tl(cpu_T[1], cpu_regs[s->vex_v]);
4172 } else {
4173 tcg_gen_ext32u_tl(cpu_T[1], cpu_regs[s->vex_v]);
4174 }
4175 gen_helper_pext(cpu_regs[reg], cpu_T[0], cpu_T[1]);
4176 break;
4177
cd7f97ca
RH
4178 case 0x1f6: /* adcx Gy, Ey */
4179 case 0x2f6: /* adox Gy, Ey */
4180 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_ADX)) {
4181 goto illegal_op;
4182 } else {
76f13133 4183 TCGv carry_in, carry_out, zero;
cd7f97ca
RH
4184 int end_op;
4185
4186 ot = (s->dflag == 2 ? OT_QUAD : OT_LONG);
4187 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4188
4189 /* Re-use the carry-out from a previous round. */
4190 TCGV_UNUSED(carry_in);
4191 carry_out = (b == 0x1f6 ? cpu_cc_dst : cpu_cc_src2);
4192 switch (s->cc_op) {
4193 case CC_OP_ADCX:
4194 if (b == 0x1f6) {
4195 carry_in = cpu_cc_dst;
4196 end_op = CC_OP_ADCX;
4197 } else {
4198 end_op = CC_OP_ADCOX;
4199 }
4200 break;
4201 case CC_OP_ADOX:
4202 if (b == 0x1f6) {
4203 end_op = CC_OP_ADCOX;
4204 } else {
4205 carry_in = cpu_cc_src2;
4206 end_op = CC_OP_ADOX;
4207 }
4208 break;
4209 case CC_OP_ADCOX:
4210 end_op = CC_OP_ADCOX;
4211 carry_in = carry_out;
4212 break;
4213 default:
c53de1a2 4214 end_op = (b == 0x1f6 ? CC_OP_ADCX : CC_OP_ADOX);
cd7f97ca
RH
4215 break;
4216 }
4217 /* If we can't reuse carry-out, get it out of EFLAGS. */
4218 if (TCGV_IS_UNUSED(carry_in)) {
4219 if (s->cc_op != CC_OP_ADCX && s->cc_op != CC_OP_ADOX) {
4220 gen_compute_eflags(s);
4221 }
4222 carry_in = cpu_tmp0;
4223 tcg_gen_shri_tl(carry_in, cpu_cc_src,
4224 ctz32(b == 0x1f6 ? CC_C : CC_O));
4225 tcg_gen_andi_tl(carry_in, carry_in, 1);
4226 }
4227
4228 switch (ot) {
4229#ifdef TARGET_X86_64
4230 case OT_LONG:
4231 /* If we know TL is 64-bit, and we want a 32-bit
4232 result, just do everything in 64-bit arithmetic. */
4233 tcg_gen_ext32u_i64(cpu_regs[reg], cpu_regs[reg]);
4234 tcg_gen_ext32u_i64(cpu_T[0], cpu_T[0]);
4235 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_regs[reg]);
4236 tcg_gen_add_i64(cpu_T[0], cpu_T[0], carry_in);
4237 tcg_gen_ext32u_i64(cpu_regs[reg], cpu_T[0]);
4238 tcg_gen_shri_i64(carry_out, cpu_T[0], 32);
4239 break;
4240#endif
4241 default:
4242 /* Otherwise compute the carry-out in two steps. */
76f13133
RH
4243 zero = tcg_const_tl(0);
4244 tcg_gen_add2_tl(cpu_T[0], carry_out,
4245 cpu_T[0], zero,
4246 carry_in, zero);
4247 tcg_gen_add2_tl(cpu_regs[reg], carry_out,
4248 cpu_regs[reg], carry_out,
4249 cpu_T[0], zero);
4250 tcg_temp_free(zero);
cd7f97ca
RH
4251 break;
4252 }
cd7f97ca
RH
4253 set_cc_op(s, end_op);
4254 }
4255 break;
4256
4a554890
RH
4257 case 0x1f7: /* shlx Gy, Ey, By */
4258 case 0x2f7: /* sarx Gy, Ey, By */
4259 case 0x3f7: /* shrx Gy, Ey, By */
4260 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4261 || !(s->prefix & PREFIX_VEX)
4262 || s->vex_l != 0) {
4263 goto illegal_op;
4264 }
4265 ot = (s->dflag == 2 ? OT_QUAD : OT_LONG);
4266 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4267 if (ot == OT_QUAD) {
4268 tcg_gen_andi_tl(cpu_T[1], cpu_regs[s->vex_v], 63);
4269 } else {
4270 tcg_gen_andi_tl(cpu_T[1], cpu_regs[s->vex_v], 31);
4271 }
4272 if (b == 0x1f7) {
4273 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4274 } else if (b == 0x2f7) {
4275 if (ot != OT_QUAD) {
4276 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4277 }
4278 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4279 } else {
4280 if (ot != OT_QUAD) {
4281 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4282 }
4283 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4284 }
4285 gen_op_mov_reg_T0(ot, reg);
4286 break;
4287
bc4b43dc
RH
4288 case 0x0f3:
4289 case 0x1f3:
4290 case 0x2f3:
4291 case 0x3f3: /* Group 17 */
4292 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI1)
4293 || !(s->prefix & PREFIX_VEX)
4294 || s->vex_l != 0) {
4295 goto illegal_op;
4296 }
4297 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4298 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4299
4300 switch (reg & 7) {
4301 case 1: /* blsr By,Ey */
4302 tcg_gen_neg_tl(cpu_T[1], cpu_T[0]);
4303 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4304 gen_op_mov_reg_T0(ot, s->vex_v);
4305 gen_op_update2_cc();
4306 set_cc_op(s, CC_OP_BMILGB + ot);
4307 break;
4308
4309 case 2: /* blsmsk By,Ey */
4310 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4311 tcg_gen_subi_tl(cpu_T[0], cpu_T[0], 1);
4312 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_cc_src);
4313 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4314 set_cc_op(s, CC_OP_BMILGB + ot);
4315 break;
4316
4317 case 3: /* blsi By, Ey */
4318 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4319 tcg_gen_subi_tl(cpu_T[0], cpu_T[0], 1);
4320 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_cc_src);
4321 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4322 set_cc_op(s, CC_OP_BMILGB + ot);
4323 break;
4324
4325 default:
4326 goto illegal_op;
4327 }
4328 break;
4329
111994ee
RH
4330 default:
4331 goto illegal_op;
4332 }
222a3336 4333 break;
111994ee 4334
222a3336
AZ
4335 case 0x03a:
4336 case 0x13a:
4242b1bd 4337 b = modrm;
0af10c86 4338 modrm = cpu_ldub_code(env, s->pc++);
4242b1bd
AZ
4339 rm = modrm & 7;
4340 reg = ((modrm >> 3) & 7) | rex_r;
4341 mod = (modrm >> 6) & 3;
c045af25
AK
4342 if (b1 >= 2) {
4343 goto illegal_op;
4344 }
4242b1bd 4345
d3eb5eae
BS
4346 sse_fn_eppi = sse_op_table7[b].op[b1];
4347 if (!sse_fn_eppi) {
4242b1bd 4348 goto illegal_op;
c4baa050 4349 }
222a3336
AZ
4350 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4351 goto illegal_op;
4352
d3eb5eae 4353 if (sse_fn_eppi == SSE_SPECIAL) {
222a3336
AZ
4354 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4355 rm = (modrm & 7) | REX_B(s);
4356 if (mod != 3)
0af10c86 4357 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
222a3336 4358 reg = ((modrm >> 3) & 7) | rex_r;
0af10c86 4359 val = cpu_ldub_code(env, s->pc++);
222a3336
AZ
4360 switch (b) {
4361 case 0x14: /* pextrb */
4362 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4363 xmm_regs[reg].XMM_B(val & 15)));
4364 if (mod == 3)
4365 gen_op_mov_reg_T0(ot, rm);
4366 else
4367 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4368 (s->mem_index >> 2) - 1);
4369 break;
4370 case 0x15: /* pextrw */
4371 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4372 xmm_regs[reg].XMM_W(val & 7)));
4373 if (mod == 3)
4374 gen_op_mov_reg_T0(ot, rm);
4375 else
4376 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4377 (s->mem_index >> 2) - 1);
4378 break;
4379 case 0x16:
4380 if (ot == OT_LONG) { /* pextrd */
4381 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4382 offsetof(CPUX86State,
4383 xmm_regs[reg].XMM_L(val & 3)));
a7812ae4 4384 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
222a3336 4385 if (mod == 3)
a7812ae4 4386 gen_op_mov_reg_v(ot, rm, cpu_T[0]);
222a3336 4387 else
a7812ae4 4388 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
222a3336
AZ
4389 (s->mem_index >> 2) - 1);
4390 } else { /* pextrq */
a7812ae4 4391#ifdef TARGET_X86_64
222a3336
AZ
4392 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4393 offsetof(CPUX86State,
4394 xmm_regs[reg].XMM_Q(val & 1)));
4395 if (mod == 3)
4396 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4397 else
4398 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4399 (s->mem_index >> 2) - 1);
a7812ae4
PB
4400#else
4401 goto illegal_op;
4402#endif
222a3336
AZ
4403 }
4404 break;
4405 case 0x17: /* extractps */
4406 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4407 xmm_regs[reg].XMM_L(val & 3)));
4408 if (mod == 3)
4409 gen_op_mov_reg_T0(ot, rm);
4410 else
4411 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4412 (s->mem_index >> 2) - 1);
4413 break;
4414 case 0x20: /* pinsrb */
4415 if (mod == 3)
4416 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4417 else
34c6addd 4418 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
222a3336 4419 (s->mem_index >> 2) - 1);
34c6addd 4420 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
222a3336
AZ
4421 xmm_regs[reg].XMM_B(val & 15)));
4422 break;
4423 case 0x21: /* insertps */
a7812ae4 4424 if (mod == 3) {
222a3336
AZ
4425 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4426 offsetof(CPUX86State,xmm_regs[rm]
4427 .XMM_L((val >> 6) & 3)));
a7812ae4
PB
4428 } else {
4429 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
222a3336 4430 (s->mem_index >> 2) - 1);
a7812ae4
PB
4431 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
4432 }
222a3336
AZ
4433 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4434 offsetof(CPUX86State,xmm_regs[reg]
4435 .XMM_L((val >> 4) & 3)));
4436 if ((val >> 0) & 1)
4437 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4438 cpu_env, offsetof(CPUX86State,
4439 xmm_regs[reg].XMM_L(0)));
4440 if ((val >> 1) & 1)
4441 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4442 cpu_env, offsetof(CPUX86State,
4443 xmm_regs[reg].XMM_L(1)));
4444 if ((val >> 2) & 1)
4445 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4446 cpu_env, offsetof(CPUX86State,
4447 xmm_regs[reg].XMM_L(2)));
4448 if ((val >> 3) & 1)
4449 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4450 cpu_env, offsetof(CPUX86State,
4451 xmm_regs[reg].XMM_L(3)));
4452 break;
4453 case 0x22:
4454 if (ot == OT_LONG) { /* pinsrd */
4455 if (mod == 3)
a7812ae4 4456 gen_op_mov_v_reg(ot, cpu_tmp0, rm);
222a3336 4457 else
a7812ae4 4458 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
222a3336 4459 (s->mem_index >> 2) - 1);
a7812ae4 4460 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
222a3336
AZ
4461 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4462 offsetof(CPUX86State,
4463 xmm_regs[reg].XMM_L(val & 3)));
4464 } else { /* pinsrq */
a7812ae4 4465#ifdef TARGET_X86_64
222a3336
AZ
4466 if (mod == 3)
4467 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4468 else
4469 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4470 (s->mem_index >> 2) - 1);
4471 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4472 offsetof(CPUX86State,
4473 xmm_regs[reg].XMM_Q(val & 1)));
a7812ae4
PB
4474#else
4475 goto illegal_op;
4476#endif
222a3336
AZ
4477 }
4478 break;
4479 }
4480 return;
4481 }
4242b1bd
AZ
4482
4483 if (b1) {
4484 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4485 if (mod == 3) {
4486 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4487 } else {
4488 op2_offset = offsetof(CPUX86State,xmm_t0);
0af10c86 4489 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
4242b1bd
AZ
4490 gen_ldo_env_A0(s->mem_index, op2_offset);
4491 }
4492 } else {
4493 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4494 if (mod == 3) {
4495 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4496 } else {
4497 op2_offset = offsetof(CPUX86State,mmx_t0);
0af10c86 4498 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
4242b1bd
AZ
4499 gen_ldq_env_A0(s->mem_index, op2_offset);
4500 }
4501 }
0af10c86 4502 val = cpu_ldub_code(env, s->pc++);
4242b1bd 4503
222a3336 4504 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
3ca51d07 4505 set_cc_op(s, CC_OP_EFLAGS);
222a3336
AZ
4506
4507 if (s->dflag == 2)
4508 /* The helper must use entire 64-bit gp registers */
4509 val |= 1 << 8;
4510 }
4511
4242b1bd
AZ
4512 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4513 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 4514 sse_fn_eppi(cpu_env, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4242b1bd 4515 break;
e2c3c2c5
RH
4516
4517 case 0x33a:
4518 /* Various integer extensions at 0f 3a f[0-f]. */
4519 b = modrm | (b1 << 8);
4520 modrm = cpu_ldub_code(env, s->pc++);
4521 reg = ((modrm >> 3) & 7) | rex_r;
4522
4523 switch (b) {
4524 case 0x3f0: /* rorx Gy,Ey, Ib */
4525 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4526 || !(s->prefix & PREFIX_VEX)
4527 || s->vex_l != 0) {
4528 goto illegal_op;
4529 }
4530 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4531 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4532 b = cpu_ldub_code(env, s->pc++);
4533 if (ot == OT_QUAD) {
4534 tcg_gen_rotri_tl(cpu_T[0], cpu_T[0], b & 63);
4535 } else {
4536 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4537 tcg_gen_rotri_i32(cpu_tmp2_i32, cpu_tmp2_i32, b & 31);
4538 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4539 }
4540 gen_op_mov_reg_T0(ot, reg);
4541 break;
4542
4543 default:
4544 goto illegal_op;
4545 }
4546 break;
4547
664e0f19
FB
4548 default:
4549 goto illegal_op;
4550 }
4551 } else {
4552 /* generic MMX or SSE operation */
d1e42c5c 4553 switch(b) {
d1e42c5c
FB
4554 case 0x70: /* pshufx insn */
4555 case 0xc6: /* pshufx insn */
4556 case 0xc2: /* compare insns */
4557 s->rip_offset = 1;
4558 break;
4559 default:
4560 break;
664e0f19
FB
4561 }
4562 if (is_xmm) {
4563 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4564 if (mod != 3) {
0af10c86 4565 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 4566 op2_offset = offsetof(CPUX86State,xmm_t0);
480c1cdb 4567 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
664e0f19
FB
4568 b == 0xc2)) {
4569 /* specific case for SSE single instructions */
4570 if (b1 == 2) {
4571 /* 32 bit access */
57fec1fe 4572 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
651ba608 4573 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
664e0f19
FB
4574 } else {
4575 /* 64 bit access */
8686c490 4576 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
664e0f19
FB
4577 }
4578 } else {
8686c490 4579 gen_ldo_env_A0(s->mem_index, op2_offset);
664e0f19
FB
4580 }
4581 } else {
4582 rm = (modrm & 7) | REX_B(s);
4583 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4584 }
4585 } else {
4586 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4587 if (mod != 3) {
0af10c86 4588 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 4589 op2_offset = offsetof(CPUX86State,mmx_t0);
8686c490 4590 gen_ldq_env_A0(s->mem_index, op2_offset);
664e0f19
FB
4591 } else {
4592 rm = (modrm & 7);
4593 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4594 }
4595 }
4596 switch(b) {
a35f3ec7 4597 case 0x0f: /* 3DNow! data insns */
e771edab
AJ
4598 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4599 goto illegal_op;
0af10c86 4600 val = cpu_ldub_code(env, s->pc++);
d3eb5eae
BS
4601 sse_fn_epp = sse_op_table5[val];
4602 if (!sse_fn_epp) {
a35f3ec7 4603 goto illegal_op;
c4baa050 4604 }
5af45186
FB
4605 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4606 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 4607 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
a35f3ec7 4608 break;
664e0f19
FB
4609 case 0x70: /* pshufx insn */
4610 case 0xc6: /* pshufx insn */
0af10c86 4611 val = cpu_ldub_code(env, s->pc++);
5af45186
FB
4612 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4613 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
c4baa050 4614 /* XXX: introduce a new table? */
d3eb5eae 4615 sse_fn_ppi = (SSEFunc_0_ppi)sse_fn_epp;
c4baa050 4616 sse_fn_ppi(cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
664e0f19
FB
4617 break;
4618 case 0xc2:
4619 /* compare insns */
0af10c86 4620 val = cpu_ldub_code(env, s->pc++);
664e0f19
FB
4621 if (val >= 8)
4622 goto illegal_op;
d3eb5eae 4623 sse_fn_epp = sse_op_table4[val][b1];
c4baa050 4624
5af45186
FB
4625 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4626 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 4627 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19 4628 break;
b8b6a50b
FB
4629 case 0xf7:
4630 /* maskmov : we must prepare A0 */
4631 if (mod != 3)
4632 goto illegal_op;
4633#ifdef TARGET_X86_64
4634 if (s->aflag == 2) {
4635 gen_op_movq_A0_reg(R_EDI);
4636 } else
4637#endif
4638 {
4639 gen_op_movl_A0_reg(R_EDI);
4640 if (s->aflag == 0)
4641 gen_op_andl_A0_ffff();
4642 }
4643 gen_add_A0_ds_seg(s);
4644
4645 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4646 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
c4baa050 4647 /* XXX: introduce a new table? */
d3eb5eae
BS
4648 sse_fn_eppt = (SSEFunc_0_eppt)sse_fn_epp;
4649 sse_fn_eppt(cpu_env, cpu_ptr0, cpu_ptr1, cpu_A0);
b8b6a50b 4650 break;
664e0f19 4651 default:
5af45186
FB
4652 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4653 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 4654 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
4655 break;
4656 }
4657 if (b == 0x2e || b == 0x2f) {
3ca51d07 4658 set_cc_op(s, CC_OP_EFLAGS);
664e0f19
FB
4659 }
4660 }
4661}
4662
2c0262af
FB
4663/* convert one instruction. s->is_jmp is set if the translation must
4664 be stopped. Return the next pc value */
0af10c86
BS
4665static target_ulong disas_insn(CPUX86State *env, DisasContext *s,
4666 target_ulong pc_start)
2c0262af
FB
4667{
4668 int b, prefixes, aflag, dflag;
4669 int shift, ot;
4670 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
14ce26e7
FB
4671 target_ulong next_eip, tval;
4672 int rex_w, rex_r;
2c0262af 4673
fdefe51c 4674 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
70cff25e 4675 tcg_gen_debug_insn_start(pc_start);
fdefe51c 4676 }
2c0262af
FB
4677 s->pc = pc_start;
4678 prefixes = 0;
4679 aflag = s->code32;
4680 dflag = s->code32;
4681 s->override = -1;
14ce26e7
FB
4682 rex_w = -1;
4683 rex_r = 0;
4684#ifdef TARGET_X86_64
4685 s->rex_x = 0;
4686 s->rex_b = 0;
5fafdf24 4687 x86_64_hregs = 0;
14ce26e7
FB
4688#endif
4689 s->rip_offset = 0; /* for relative ip address */
701ed211
RH
4690 s->vex_l = 0;
4691 s->vex_v = 0;
2c0262af 4692 next_byte:
0af10c86 4693 b = cpu_ldub_code(env, s->pc);
2c0262af 4694 s->pc++;
4a6fd938
RH
4695 /* Collect prefixes. */
4696 switch (b) {
4697 case 0xf3:
4698 prefixes |= PREFIX_REPZ;
4699 goto next_byte;
4700 case 0xf2:
4701 prefixes |= PREFIX_REPNZ;
4702 goto next_byte;
4703 case 0xf0:
4704 prefixes |= PREFIX_LOCK;
4705 goto next_byte;
4706 case 0x2e:
4707 s->override = R_CS;
4708 goto next_byte;
4709 case 0x36:
4710 s->override = R_SS;
4711 goto next_byte;
4712 case 0x3e:
4713 s->override = R_DS;
4714 goto next_byte;
4715 case 0x26:
4716 s->override = R_ES;
4717 goto next_byte;
4718 case 0x64:
4719 s->override = R_FS;
4720 goto next_byte;
4721 case 0x65:
4722 s->override = R_GS;
4723 goto next_byte;
4724 case 0x66:
4725 prefixes |= PREFIX_DATA;
4726 goto next_byte;
4727 case 0x67:
4728 prefixes |= PREFIX_ADR;
4729 goto next_byte;
14ce26e7 4730#ifdef TARGET_X86_64
4a6fd938
RH
4731 case 0x40 ... 0x4f:
4732 if (CODE64(s)) {
14ce26e7
FB
4733 /* REX prefix */
4734 rex_w = (b >> 3) & 1;
4735 rex_r = (b & 0x4) << 1;
4736 s->rex_x = (b & 0x2) << 2;
4737 REX_B(s) = (b & 0x1) << 3;
4738 x86_64_hregs = 1; /* select uniform byte register addressing */
4739 goto next_byte;
4740 }
4a6fd938
RH
4741 break;
4742#endif
701ed211
RH
4743 case 0xc5: /* 2-byte VEX */
4744 case 0xc4: /* 3-byte VEX */
4745 /* VEX prefixes cannot be used except in 32-bit mode.
4746 Otherwise the instruction is LES or LDS. */
4747 if (s->code32 && !s->vm86) {
4748 static const int pp_prefix[4] = {
4749 0, PREFIX_DATA, PREFIX_REPZ, PREFIX_REPNZ
4750 };
4751 int vex3, vex2 = cpu_ldub_code(env, s->pc);
4752
4753 if (!CODE64(s) && (vex2 & 0xc0) != 0xc0) {
4754 /* 4.1.4.6: In 32-bit mode, bits [7:6] must be 11b,
4755 otherwise the instruction is LES or LDS. */
4756 break;
4757 }
4758 s->pc++;
4759
085d8134 4760 /* 4.1.1-4.1.3: No preceding lock, 66, f2, f3, or rex prefixes. */
701ed211
RH
4761 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ
4762 | PREFIX_LOCK | PREFIX_DATA)) {
4763 goto illegal_op;
4764 }
4765#ifdef TARGET_X86_64
4766 if (x86_64_hregs) {
4767 goto illegal_op;
4768 }
4769#endif
4770 rex_r = (~vex2 >> 4) & 8;
4771 if (b == 0xc5) {
4772 vex3 = vex2;
4773 b = cpu_ldub_code(env, s->pc++);
4774 } else {
4775#ifdef TARGET_X86_64
4776 s->rex_x = (~vex2 >> 3) & 8;
4777 s->rex_b = (~vex2 >> 2) & 8;
4778#endif
4779 vex3 = cpu_ldub_code(env, s->pc++);
4780 rex_w = (vex3 >> 7) & 1;
4781 switch (vex2 & 0x1f) {
4782 case 0x01: /* Implied 0f leading opcode bytes. */
4783 b = cpu_ldub_code(env, s->pc++) | 0x100;
4784 break;
4785 case 0x02: /* Implied 0f 38 leading opcode bytes. */
4786 b = 0x138;
4787 break;
4788 case 0x03: /* Implied 0f 3a leading opcode bytes. */
4789 b = 0x13a;
4790 break;
4791 default: /* Reserved for future use. */
4792 goto illegal_op;
4793 }
4794 }
4795 s->vex_v = (~vex3 >> 3) & 0xf;
4796 s->vex_l = (vex3 >> 2) & 1;
4797 prefixes |= pp_prefix[vex3 & 3] | PREFIX_VEX;
4798 }
4799 break;
4a6fd938
RH
4800 }
4801
4802 /* Post-process prefixes. */
4803 if (prefixes & PREFIX_DATA) {
4804 dflag ^= 1;
4805 }
4806 if (prefixes & PREFIX_ADR) {
4807 aflag ^= 1;
4808 }
4809#ifdef TARGET_X86_64
4810 if (CODE64(s)) {
14ce26e7
FB
4811 if (rex_w == 1) {
4812 /* 0x66 is ignored if rex.w is set */
4813 dflag = 2;
14ce26e7 4814 }
4a6fd938 4815 if (!(prefixes & PREFIX_ADR)) {
14ce26e7 4816 aflag = 2;
14ce26e7 4817 }
2c0262af 4818 }
4a6fd938 4819#endif
2c0262af 4820
2c0262af
FB
4821 s->prefix = prefixes;
4822 s->aflag = aflag;
4823 s->dflag = dflag;
4824
4825 /* lock generation */
4826 if (prefixes & PREFIX_LOCK)
a7812ae4 4827 gen_helper_lock();
2c0262af
FB
4828
4829 /* now check op code */
4830 reswitch:
4831 switch(b) {
4832 case 0x0f:
4833 /**************************/
4834 /* extended op code */
0af10c86 4835 b = cpu_ldub_code(env, s->pc++) | 0x100;
2c0262af 4836 goto reswitch;
3b46e624 4837
2c0262af
FB
4838 /**************************/
4839 /* arith & logic */
4840 case 0x00 ... 0x05:
4841 case 0x08 ... 0x0d:
4842 case 0x10 ... 0x15:
4843 case 0x18 ... 0x1d:
4844 case 0x20 ... 0x25:
4845 case 0x28 ... 0x2d:
4846 case 0x30 ... 0x35:
4847 case 0x38 ... 0x3d:
4848 {
4849 int op, f, val;
4850 op = (b >> 3) & 7;
4851 f = (b >> 1) & 3;
4852
4853 if ((b & 1) == 0)
4854 ot = OT_BYTE;
4855 else
14ce26e7 4856 ot = dflag + OT_WORD;
3b46e624 4857
2c0262af
FB
4858 switch(f) {
4859 case 0: /* OP Ev, Gv */
0af10c86 4860 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 4861 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af 4862 mod = (modrm >> 6) & 3;
14ce26e7 4863 rm = (modrm & 7) | REX_B(s);
2c0262af 4864 if (mod != 3) {
0af10c86 4865 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
4866 opreg = OR_TMP0;
4867 } else if (op == OP_XORL && rm == reg) {
4868 xor_zero:
4869 /* xor reg, reg optimisation */
436ff2d2 4870 set_cc_op(s, CC_OP_CLR);
2c0262af 4871 gen_op_movl_T0_0();
57fec1fe 4872 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
4873 break;
4874 } else {
4875 opreg = rm;
4876 }
57fec1fe 4877 gen_op_mov_TN_reg(ot, 1, reg);
2c0262af
FB
4878 gen_op(s, op, ot, opreg);
4879 break;
4880 case 1: /* OP Gv, Ev */
0af10c86 4881 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 4882 mod = (modrm >> 6) & 3;
14ce26e7
FB
4883 reg = ((modrm >> 3) & 7) | rex_r;
4884 rm = (modrm & 7) | REX_B(s);
2c0262af 4885 if (mod != 3) {
0af10c86 4886 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 4887 gen_op_ld_T1_A0(ot + s->mem_index);
2c0262af
FB
4888 } else if (op == OP_XORL && rm == reg) {
4889 goto xor_zero;
4890 } else {
57fec1fe 4891 gen_op_mov_TN_reg(ot, 1, rm);
2c0262af
FB
4892 }
4893 gen_op(s, op, ot, reg);
4894 break;
4895 case 2: /* OP A, Iv */
0af10c86 4896 val = insn_get(env, s, ot);
2c0262af
FB
4897 gen_op_movl_T1_im(val);
4898 gen_op(s, op, ot, OR_EAX);
4899 break;
4900 }
4901 }
4902 break;
4903
ec9d6075
FB
4904 case 0x82:
4905 if (CODE64(s))
4906 goto illegal_op;
2c0262af
FB
4907 case 0x80: /* GRP1 */
4908 case 0x81:
4909 case 0x83:
4910 {
4911 int val;
4912
4913 if ((b & 1) == 0)
4914 ot = OT_BYTE;
4915 else
14ce26e7 4916 ot = dflag + OT_WORD;
3b46e624 4917
0af10c86 4918 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 4919 mod = (modrm >> 6) & 3;
14ce26e7 4920 rm = (modrm & 7) | REX_B(s);
2c0262af 4921 op = (modrm >> 3) & 7;
3b46e624 4922
2c0262af 4923 if (mod != 3) {
14ce26e7
FB
4924 if (b == 0x83)
4925 s->rip_offset = 1;
4926 else
4927 s->rip_offset = insn_const_size(ot);
0af10c86 4928 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
4929 opreg = OR_TMP0;
4930 } else {
14ce26e7 4931 opreg = rm;
2c0262af
FB
4932 }
4933
4934 switch(b) {
4935 default:
4936 case 0x80:
4937 case 0x81:
d64477af 4938 case 0x82:
0af10c86 4939 val = insn_get(env, s, ot);
2c0262af
FB
4940 break;
4941 case 0x83:
0af10c86 4942 val = (int8_t)insn_get(env, s, OT_BYTE);
2c0262af
FB
4943 break;
4944 }
4945 gen_op_movl_T1_im(val);
4946 gen_op(s, op, ot, opreg);
4947 }
4948 break;
4949
4950 /**************************/
4951 /* inc, dec, and other misc arith */
4952 case 0x40 ... 0x47: /* inc Gv */
4953 ot = dflag ? OT_LONG : OT_WORD;
4954 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4955 break;
4956 case 0x48 ... 0x4f: /* dec Gv */
4957 ot = dflag ? OT_LONG : OT_WORD;
4958 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4959 break;
4960 case 0xf6: /* GRP3 */
4961 case 0xf7:
4962 if ((b & 1) == 0)
4963 ot = OT_BYTE;
4964 else
14ce26e7 4965 ot = dflag + OT_WORD;
2c0262af 4966
0af10c86 4967 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 4968 mod = (modrm >> 6) & 3;
14ce26e7 4969 rm = (modrm & 7) | REX_B(s);
2c0262af
FB
4970 op = (modrm >> 3) & 7;
4971 if (mod != 3) {
14ce26e7
FB
4972 if (op == 0)
4973 s->rip_offset = insn_const_size(ot);
0af10c86 4974 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 4975 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 4976 } else {
57fec1fe 4977 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af
FB
4978 }
4979
4980 switch(op) {
4981 case 0: /* test */
0af10c86 4982 val = insn_get(env, s, ot);
2c0262af
FB
4983 gen_op_movl_T1_im(val);
4984 gen_op_testl_T0_T1_cc();
3ca51d07 4985 set_cc_op(s, CC_OP_LOGICB + ot);
2c0262af
FB
4986 break;
4987 case 2: /* not */
b6abf97d 4988 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
2c0262af 4989 if (mod != 3) {
57fec1fe 4990 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 4991 } else {
57fec1fe 4992 gen_op_mov_reg_T0(ot, rm);
2c0262af
FB
4993 }
4994 break;
4995 case 3: /* neg */
b6abf97d 4996 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
2c0262af 4997 if (mod != 3) {
57fec1fe 4998 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 4999 } else {
57fec1fe 5000 gen_op_mov_reg_T0(ot, rm);
2c0262af
FB
5001 }
5002 gen_op_update_neg_cc();
3ca51d07 5003 set_cc_op(s, CC_OP_SUBB + ot);
2c0262af
FB
5004 break;
5005 case 4: /* mul */
5006 switch(ot) {
5007 case OT_BYTE:
0211e5af
FB
5008 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5009 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5010 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
5011 /* XXX: use 32 bit mul which could be faster */
5012 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5013 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5014 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5015 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
3ca51d07 5016 set_cc_op(s, CC_OP_MULB);
2c0262af
FB
5017 break;
5018 case OT_WORD:
0211e5af
FB
5019 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5020 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5021 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
5022 /* XXX: use 32 bit mul which could be faster */
5023 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5024 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5025 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5026 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5027 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5028 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3ca51d07 5029 set_cc_op(s, CC_OP_MULW);
2c0262af
FB
5030 break;
5031 default:
5032 case OT_LONG:
a4bcea3d
RH
5033 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5034 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_regs[R_EAX]);
5035 tcg_gen_mulu2_i32(cpu_tmp2_i32, cpu_tmp3_i32,
5036 cpu_tmp2_i32, cpu_tmp3_i32);
5037 tcg_gen_extu_i32_tl(cpu_regs[R_EAX], cpu_tmp2_i32);
5038 tcg_gen_extu_i32_tl(cpu_regs[R_EDX], cpu_tmp3_i32);
5039 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[R_EAX]);
5040 tcg_gen_mov_tl(cpu_cc_src, cpu_regs[R_EDX]);
3ca51d07 5041 set_cc_op(s, CC_OP_MULL);
2c0262af 5042 break;
14ce26e7
FB
5043#ifdef TARGET_X86_64
5044 case OT_QUAD:
a4bcea3d
RH
5045 tcg_gen_mulu2_i64(cpu_regs[R_EAX], cpu_regs[R_EDX],
5046 cpu_T[0], cpu_regs[R_EAX]);
5047 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[R_EAX]);
5048 tcg_gen_mov_tl(cpu_cc_src, cpu_regs[R_EDX]);
3ca51d07 5049 set_cc_op(s, CC_OP_MULQ);
14ce26e7
FB
5050 break;
5051#endif
2c0262af 5052 }
2c0262af
FB
5053 break;
5054 case 5: /* imul */
5055 switch(ot) {
5056 case OT_BYTE:
0211e5af
FB
5057 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5058 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5059 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5060 /* XXX: use 32 bit mul which could be faster */
5061 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5062 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5063 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5064 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5065 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3ca51d07 5066 set_cc_op(s, CC_OP_MULB);
2c0262af
FB
5067 break;
5068 case OT_WORD:
0211e5af
FB
5069 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5070 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5071 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5072 /* XXX: use 32 bit mul which could be faster */
5073 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5074 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5075 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5076 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5077 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5078 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5079 gen_op_mov_reg_T0(OT_WORD, R_EDX);
3ca51d07 5080 set_cc_op(s, CC_OP_MULW);
2c0262af
FB
5081 break;
5082 default:
5083 case OT_LONG:
a4bcea3d
RH
5084 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5085 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_regs[R_EAX]);
5086 tcg_gen_muls2_i32(cpu_tmp2_i32, cpu_tmp3_i32,
5087 cpu_tmp2_i32, cpu_tmp3_i32);
5088 tcg_gen_extu_i32_tl(cpu_regs[R_EAX], cpu_tmp2_i32);
5089 tcg_gen_extu_i32_tl(cpu_regs[R_EDX], cpu_tmp3_i32);
5090 tcg_gen_sari_i32(cpu_tmp2_i32, cpu_tmp2_i32, 31);
5091 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[R_EAX]);
5092 tcg_gen_sub_i32(cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
5093 tcg_gen_extu_i32_tl(cpu_cc_src, cpu_tmp2_i32);
3ca51d07 5094 set_cc_op(s, CC_OP_MULL);
2c0262af 5095 break;
14ce26e7
FB
5096#ifdef TARGET_X86_64
5097 case OT_QUAD:
a4bcea3d
RH
5098 tcg_gen_muls2_i64(cpu_regs[R_EAX], cpu_regs[R_EDX],
5099 cpu_T[0], cpu_regs[R_EAX]);
5100 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[R_EAX]);
5101 tcg_gen_sari_tl(cpu_cc_src, cpu_regs[R_EAX], 63);
5102 tcg_gen_sub_tl(cpu_cc_src, cpu_cc_src, cpu_regs[R_EDX]);
3ca51d07 5103 set_cc_op(s, CC_OP_MULQ);
14ce26e7
FB
5104 break;
5105#endif
2c0262af 5106 }
2c0262af
FB
5107 break;
5108 case 6: /* div */
5109 switch(ot) {
5110 case OT_BYTE:
14ce26e7 5111 gen_jmp_im(pc_start - s->cs_base);
7923057b 5112 gen_helper_divb_AL(cpu_env, cpu_T[0]);
2c0262af
FB
5113 break;
5114 case OT_WORD:
14ce26e7 5115 gen_jmp_im(pc_start - s->cs_base);
7923057b 5116 gen_helper_divw_AX(cpu_env, cpu_T[0]);
2c0262af
FB
5117 break;
5118 default:
5119 case OT_LONG:
14ce26e7 5120 gen_jmp_im(pc_start - s->cs_base);
7923057b 5121 gen_helper_divl_EAX(cpu_env, cpu_T[0]);
14ce26e7
FB
5122 break;
5123#ifdef TARGET_X86_64
5124 case OT_QUAD:
5125 gen_jmp_im(pc_start - s->cs_base);
7923057b 5126 gen_helper_divq_EAX(cpu_env, cpu_T[0]);
2c0262af 5127 break;
14ce26e7 5128#endif
2c0262af
FB
5129 }
5130 break;
5131 case 7: /* idiv */
5132 switch(ot) {
5133 case OT_BYTE:
14ce26e7 5134 gen_jmp_im(pc_start - s->cs_base);
7923057b 5135 gen_helper_idivb_AL(cpu_env, cpu_T[0]);
2c0262af
FB
5136 break;
5137 case OT_WORD:
14ce26e7 5138 gen_jmp_im(pc_start - s->cs_base);
7923057b 5139 gen_helper_idivw_AX(cpu_env, cpu_T[0]);
2c0262af
FB
5140 break;
5141 default:
5142 case OT_LONG:
14ce26e7 5143 gen_jmp_im(pc_start - s->cs_base);
7923057b 5144 gen_helper_idivl_EAX(cpu_env, cpu_T[0]);
14ce26e7
FB
5145 break;
5146#ifdef TARGET_X86_64
5147 case OT_QUAD:
5148 gen_jmp_im(pc_start - s->cs_base);
7923057b 5149 gen_helper_idivq_EAX(cpu_env, cpu_T[0]);
2c0262af 5150 break;
14ce26e7 5151#endif
2c0262af
FB
5152 }
5153 break;
5154 default:
5155 goto illegal_op;
5156 }
5157 break;
5158
5159 case 0xfe: /* GRP4 */
5160 case 0xff: /* GRP5 */
5161 if ((b & 1) == 0)
5162 ot = OT_BYTE;
5163 else
14ce26e7 5164 ot = dflag + OT_WORD;
2c0262af 5165
0af10c86 5166 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 5167 mod = (modrm >> 6) & 3;
14ce26e7 5168 rm = (modrm & 7) | REX_B(s);
2c0262af
FB
5169 op = (modrm >> 3) & 7;
5170 if (op >= 2 && b == 0xfe) {
5171 goto illegal_op;
5172 }
14ce26e7 5173 if (CODE64(s)) {
aba9d61e 5174 if (op == 2 || op == 4) {
14ce26e7
FB
5175 /* operand size for jumps is 64 bit */
5176 ot = OT_QUAD;
aba9d61e 5177 } else if (op == 3 || op == 5) {
41b1e61f 5178 ot = dflag ? OT_LONG + (rex_w == 1) : OT_WORD;
14ce26e7
FB
5179 } else if (op == 6) {
5180 /* default push size is 64 bit */
5181 ot = dflag ? OT_QUAD : OT_WORD;
5182 }
5183 }
2c0262af 5184 if (mod != 3) {
0af10c86 5185 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af 5186 if (op >= 2 && op != 3 && op != 5)
57fec1fe 5187 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 5188 } else {
57fec1fe 5189 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af
FB
5190 }
5191
5192 switch(op) {
5193 case 0: /* inc Ev */
5194 if (mod != 3)
5195 opreg = OR_TMP0;
5196 else
5197 opreg = rm;
5198 gen_inc(s, ot, opreg, 1);
5199 break;
5200 case 1: /* dec Ev */
5201 if (mod != 3)
5202 opreg = OR_TMP0;
5203 else
5204 opreg = rm;
5205 gen_inc(s, ot, opreg, -1);
5206 break;
5207 case 2: /* call Ev */
4f31916f 5208 /* XXX: optimize if memory (no 'and' is necessary) */
2c0262af
FB
5209 if (s->dflag == 0)
5210 gen_op_andl_T0_ffff();
2c0262af 5211 next_eip = s->pc - s->cs_base;
1ef38687 5212 gen_movtl_T1_im(next_eip);
4f31916f
FB
5213 gen_push_T1(s);
5214 gen_op_jmp_T0();
2c0262af
FB
5215 gen_eob(s);
5216 break;
61382a50 5217 case 3: /* lcall Ev */
57fec1fe 5218 gen_op_ld_T1_A0(ot + s->mem_index);
aba9d61e 5219 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
57fec1fe 5220 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
5221 do_lcall:
5222 if (s->pe && !s->vm86) {
773cdfcc 5223 gen_update_cc_op(s);
14ce26e7 5224 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 5225 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2
BS
5226 gen_helper_lcall_protected(cpu_env, cpu_tmp2_i32, cpu_T[1],
5227 tcg_const_i32(dflag),
a7812ae4 5228 tcg_const_i32(s->pc - pc_start));
2c0262af 5229 } else {
b6abf97d 5230 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2
BS
5231 gen_helper_lcall_real(cpu_env, cpu_tmp2_i32, cpu_T[1],
5232 tcg_const_i32(dflag),
a7812ae4 5233 tcg_const_i32(s->pc - s->cs_base));
2c0262af
FB
5234 }
5235 gen_eob(s);
5236 break;
5237 case 4: /* jmp Ev */
5238 if (s->dflag == 0)
5239 gen_op_andl_T0_ffff();
5240 gen_op_jmp_T0();
5241 gen_eob(s);
5242 break;
5243 case 5: /* ljmp Ev */
57fec1fe 5244 gen_op_ld_T1_A0(ot + s->mem_index);
aba9d61e 5245 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
57fec1fe 5246 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
5247 do_ljmp:
5248 if (s->pe && !s->vm86) {
773cdfcc 5249 gen_update_cc_op(s);
14ce26e7 5250 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 5251 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2 5252 gen_helper_ljmp_protected(cpu_env, cpu_tmp2_i32, cpu_T[1],
a7812ae4 5253 tcg_const_i32(s->pc - pc_start));
2c0262af 5254 } else {
3bd7da9e 5255 gen_op_movl_seg_T0_vm(R_CS);
2c0262af
FB
5256 gen_op_movl_T0_T1();
5257 gen_op_jmp_T0();
5258 }
5259 gen_eob(s);
5260 break;
5261 case 6: /* push Ev */
5262 gen_push_T0(s);
5263 break;
5264 default:
5265 goto illegal_op;
5266 }
5267 break;
5268
5269 case 0x84: /* test Ev, Gv */
5fafdf24 5270 case 0x85:
2c0262af
FB
5271 if ((b & 1) == 0)
5272 ot = OT_BYTE;
5273 else
14ce26e7 5274 ot = dflag + OT_WORD;
2c0262af 5275
0af10c86 5276 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5277 reg = ((modrm >> 3) & 7) | rex_r;
3b46e624 5278
0af10c86 5279 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
57fec1fe 5280 gen_op_mov_TN_reg(ot, 1, reg);
2c0262af 5281 gen_op_testl_T0_T1_cc();
3ca51d07 5282 set_cc_op(s, CC_OP_LOGICB + ot);
2c0262af 5283 break;
3b46e624 5284
2c0262af
FB
5285 case 0xa8: /* test eAX, Iv */
5286 case 0xa9:
5287 if ((b & 1) == 0)
5288 ot = OT_BYTE;
5289 else
14ce26e7 5290 ot = dflag + OT_WORD;
0af10c86 5291 val = insn_get(env, s, ot);
2c0262af 5292
57fec1fe 5293 gen_op_mov_TN_reg(ot, 0, OR_EAX);
2c0262af
FB
5294 gen_op_movl_T1_im(val);
5295 gen_op_testl_T0_T1_cc();
3ca51d07 5296 set_cc_op(s, CC_OP_LOGICB + ot);
2c0262af 5297 break;
3b46e624 5298
2c0262af 5299 case 0x98: /* CWDE/CBW */
14ce26e7
FB
5300#ifdef TARGET_X86_64
5301 if (dflag == 2) {
e108dd01
FB
5302 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5303 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5304 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
14ce26e7
FB
5305 } else
5306#endif
e108dd01
FB
5307 if (dflag == 1) {
5308 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5309 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5310 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5311 } else {
5312 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5313 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5314 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5315 }
2c0262af
FB
5316 break;
5317 case 0x99: /* CDQ/CWD */
14ce26e7
FB
5318#ifdef TARGET_X86_64
5319 if (dflag == 2) {
e108dd01
FB
5320 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5321 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5322 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
14ce26e7
FB
5323 } else
5324#endif
e108dd01
FB
5325 if (dflag == 1) {
5326 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5327 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5328 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5329 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5330 } else {
5331 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5332 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5333 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5334 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5335 }
2c0262af
FB
5336 break;
5337 case 0x1af: /* imul Gv, Ev */
5338 case 0x69: /* imul Gv, Ev, I */
5339 case 0x6b:
14ce26e7 5340 ot = dflag + OT_WORD;
0af10c86 5341 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7
FB
5342 reg = ((modrm >> 3) & 7) | rex_r;
5343 if (b == 0x69)
5344 s->rip_offset = insn_const_size(ot);
5345 else if (b == 0x6b)
5346 s->rip_offset = 1;
0af10c86 5347 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
2c0262af 5348 if (b == 0x69) {
0af10c86 5349 val = insn_get(env, s, ot);
2c0262af
FB
5350 gen_op_movl_T1_im(val);
5351 } else if (b == 0x6b) {
0af10c86 5352 val = (int8_t)insn_get(env, s, OT_BYTE);
2c0262af
FB
5353 gen_op_movl_T1_im(val);
5354 } else {
57fec1fe 5355 gen_op_mov_TN_reg(ot, 1, reg);
2c0262af 5356 }
a4bcea3d 5357 switch (ot) {
0211e5af 5358#ifdef TARGET_X86_64
a4bcea3d
RH
5359 case OT_QUAD:
5360 tcg_gen_muls2_i64(cpu_regs[reg], cpu_T[1], cpu_T[0], cpu_T[1]);
5361 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[reg]);
5362 tcg_gen_sari_tl(cpu_cc_src, cpu_cc_dst, 63);
5363 tcg_gen_sub_tl(cpu_cc_src, cpu_cc_src, cpu_T[1]);
5364 break;
0211e5af 5365#endif
a4bcea3d
RH
5366 case OT_LONG:
5367 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5368 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5369 tcg_gen_muls2_i32(cpu_tmp2_i32, cpu_tmp3_i32,
5370 cpu_tmp2_i32, cpu_tmp3_i32);
5371 tcg_gen_extu_i32_tl(cpu_regs[reg], cpu_tmp2_i32);
5372 tcg_gen_sari_i32(cpu_tmp2_i32, cpu_tmp2_i32, 31);
5373 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[reg]);
5374 tcg_gen_sub_i32(cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
5375 tcg_gen_extu_i32_tl(cpu_cc_src, cpu_tmp2_i32);
5376 break;
5377 default:
0211e5af
FB
5378 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5379 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5380 /* XXX: use 32 bit mul which could be faster */
5381 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5382 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5383 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5384 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
a4bcea3d
RH
5385 gen_op_mov_reg_T0(ot, reg);
5386 break;
2c0262af 5387 }
3ca51d07 5388 set_cc_op(s, CC_OP_MULB + ot);
2c0262af
FB
5389 break;
5390 case 0x1c0:
5391 case 0x1c1: /* xadd Ev, Gv */
5392 if ((b & 1) == 0)
5393 ot = OT_BYTE;
5394 else
14ce26e7 5395 ot = dflag + OT_WORD;
0af10c86 5396 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5397 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af
FB
5398 mod = (modrm >> 6) & 3;
5399 if (mod == 3) {
14ce26e7 5400 rm = (modrm & 7) | REX_B(s);
57fec1fe
FB
5401 gen_op_mov_TN_reg(ot, 0, reg);
5402 gen_op_mov_TN_reg(ot, 1, rm);
2c0262af 5403 gen_op_addl_T0_T1();
57fec1fe
FB
5404 gen_op_mov_reg_T1(ot, reg);
5405 gen_op_mov_reg_T0(ot, rm);
2c0262af 5406 } else {
0af10c86 5407 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe
FB
5408 gen_op_mov_TN_reg(ot, 0, reg);
5409 gen_op_ld_T1_A0(ot + s->mem_index);
2c0262af 5410 gen_op_addl_T0_T1();
57fec1fe
FB
5411 gen_op_st_T0_A0(ot + s->mem_index);
5412 gen_op_mov_reg_T1(ot, reg);
2c0262af
FB
5413 }
5414 gen_op_update2_cc();
3ca51d07 5415 set_cc_op(s, CC_OP_ADDB + ot);
2c0262af
FB
5416 break;
5417 case 0x1b0:
5418 case 0x1b1: /* cmpxchg Ev, Gv */
cad3a37d 5419 {
1130328e 5420 int label1, label2;
1e4840bf 5421 TCGv t0, t1, t2, a0;
cad3a37d
FB
5422
5423 if ((b & 1) == 0)
5424 ot = OT_BYTE;
5425 else
5426 ot = dflag + OT_WORD;
0af10c86 5427 modrm = cpu_ldub_code(env, s->pc++);
cad3a37d
FB
5428 reg = ((modrm >> 3) & 7) | rex_r;
5429 mod = (modrm >> 6) & 3;
a7812ae4
PB
5430 t0 = tcg_temp_local_new();
5431 t1 = tcg_temp_local_new();
5432 t2 = tcg_temp_local_new();
5433 a0 = tcg_temp_local_new();
1e4840bf 5434 gen_op_mov_v_reg(ot, t1, reg);
cad3a37d
FB
5435 if (mod == 3) {
5436 rm = (modrm & 7) | REX_B(s);
1e4840bf 5437 gen_op_mov_v_reg(ot, t0, rm);
cad3a37d 5438 } else {
0af10c86 5439 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
1e4840bf
FB
5440 tcg_gen_mov_tl(a0, cpu_A0);
5441 gen_op_ld_v(ot + s->mem_index, t0, a0);
cad3a37d
FB
5442 rm = 0; /* avoid warning */
5443 }
5444 label1 = gen_new_label();
a3251186
RH
5445 tcg_gen_mov_tl(t2, cpu_regs[R_EAX]);
5446 gen_extu(ot, t0);
1e4840bf 5447 gen_extu(ot, t2);
a3251186 5448 tcg_gen_brcond_tl(TCG_COND_EQ, t2, t0, label1);
f7e80adf 5449 label2 = gen_new_label();
cad3a37d 5450 if (mod == 3) {
1e4840bf 5451 gen_op_mov_reg_v(ot, R_EAX, t0);
1130328e
FB
5452 tcg_gen_br(label2);
5453 gen_set_label(label1);
1e4840bf 5454 gen_op_mov_reg_v(ot, rm, t1);
cad3a37d 5455 } else {
f7e80adf
AG
5456 /* perform no-op store cycle like physical cpu; must be
5457 before changing accumulator to ensure idempotency if
5458 the store faults and the instruction is restarted */
5459 gen_op_st_v(ot + s->mem_index, t0, a0);
1e4840bf 5460 gen_op_mov_reg_v(ot, R_EAX, t0);
f7e80adf 5461 tcg_gen_br(label2);
1130328e 5462 gen_set_label(label1);
1e4840bf 5463 gen_op_st_v(ot + s->mem_index, t1, a0);
cad3a37d 5464 }
f7e80adf 5465 gen_set_label(label2);
1e4840bf 5466 tcg_gen_mov_tl(cpu_cc_src, t0);
a3251186
RH
5467 tcg_gen_mov_tl(cpu_cc_srcT, t2);
5468 tcg_gen_sub_tl(cpu_cc_dst, t2, t0);
3ca51d07 5469 set_cc_op(s, CC_OP_SUBB + ot);
1e4840bf
FB
5470 tcg_temp_free(t0);
5471 tcg_temp_free(t1);
5472 tcg_temp_free(t2);
5473 tcg_temp_free(a0);
2c0262af 5474 }
2c0262af
FB
5475 break;
5476 case 0x1c7: /* cmpxchg8b */
0af10c86 5477 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 5478 mod = (modrm >> 6) & 3;
71c3558e 5479 if ((mod == 3) || ((modrm & 0x38) != 0x8))
2c0262af 5480 goto illegal_op;
1b9d9ebb
FB
5481#ifdef TARGET_X86_64
5482 if (dflag == 2) {
5483 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5484 goto illegal_op;
5485 gen_jmp_im(pc_start - s->cs_base);
773cdfcc 5486 gen_update_cc_op(s);
0af10c86 5487 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
92fc4b58 5488 gen_helper_cmpxchg16b(cpu_env, cpu_A0);
1b9d9ebb
FB
5489 } else
5490#endif
5491 {
5492 if (!(s->cpuid_features & CPUID_CX8))
5493 goto illegal_op;
5494 gen_jmp_im(pc_start - s->cs_base);
773cdfcc 5495 gen_update_cc_op(s);
0af10c86 5496 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
92fc4b58 5497 gen_helper_cmpxchg8b(cpu_env, cpu_A0);
1b9d9ebb 5498 }
3ca51d07 5499 set_cc_op(s, CC_OP_EFLAGS);
2c0262af 5500 break;
3b46e624 5501
2c0262af
FB
5502 /**************************/
5503 /* push/pop */
5504 case 0x50 ... 0x57: /* push */
57fec1fe 5505 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
2c0262af
FB
5506 gen_push_T0(s);
5507 break;
5508 case 0x58 ... 0x5f: /* pop */
14ce26e7
FB
5509 if (CODE64(s)) {
5510 ot = dflag ? OT_QUAD : OT_WORD;
5511 } else {
5512 ot = dflag + OT_WORD;
5513 }
2c0262af 5514 gen_pop_T0(s);
77729c24 5515 /* NOTE: order is important for pop %sp */
2c0262af 5516 gen_pop_update(s);
57fec1fe 5517 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
2c0262af
FB
5518 break;
5519 case 0x60: /* pusha */
14ce26e7
FB
5520 if (CODE64(s))
5521 goto illegal_op;
2c0262af
FB
5522 gen_pusha(s);
5523 break;
5524 case 0x61: /* popa */
14ce26e7
FB
5525 if (CODE64(s))
5526 goto illegal_op;
2c0262af
FB
5527 gen_popa(s);
5528 break;
5529 case 0x68: /* push Iv */
5530 case 0x6a:
14ce26e7
FB
5531 if (CODE64(s)) {
5532 ot = dflag ? OT_QUAD : OT_WORD;
5533 } else {
5534 ot = dflag + OT_WORD;
5535 }
2c0262af 5536 if (b == 0x68)
0af10c86 5537 val = insn_get(env, s, ot);
2c0262af 5538 else
0af10c86 5539 val = (int8_t)insn_get(env, s, OT_BYTE);
2c0262af
FB
5540 gen_op_movl_T0_im(val);
5541 gen_push_T0(s);
5542 break;
5543 case 0x8f: /* pop Ev */
14ce26e7
FB
5544 if (CODE64(s)) {
5545 ot = dflag ? OT_QUAD : OT_WORD;
5546 } else {
5547 ot = dflag + OT_WORD;
5548 }
0af10c86 5549 modrm = cpu_ldub_code(env, s->pc++);
77729c24 5550 mod = (modrm >> 6) & 3;
2c0262af 5551 gen_pop_T0(s);
77729c24
FB
5552 if (mod == 3) {
5553 /* NOTE: order is important for pop %sp */
5554 gen_pop_update(s);
14ce26e7 5555 rm = (modrm & 7) | REX_B(s);
57fec1fe 5556 gen_op_mov_reg_T0(ot, rm);
77729c24
FB
5557 } else {
5558 /* NOTE: order is important too for MMU exceptions */
14ce26e7 5559 s->popl_esp_hack = 1 << ot;
0af10c86 5560 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
77729c24
FB
5561 s->popl_esp_hack = 0;
5562 gen_pop_update(s);
5563 }
2c0262af
FB
5564 break;
5565 case 0xc8: /* enter */
5566 {
5567 int level;
0af10c86 5568 val = cpu_lduw_code(env, s->pc);
2c0262af 5569 s->pc += 2;
0af10c86 5570 level = cpu_ldub_code(env, s->pc++);
2c0262af
FB
5571 gen_enter(s, val, level);
5572 }
5573 break;
5574 case 0xc9: /* leave */
5575 /* XXX: exception not precise (ESP is updated before potential exception) */
14ce26e7 5576 if (CODE64(s)) {
57fec1fe
FB
5577 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5578 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
14ce26e7 5579 } else if (s->ss32) {
57fec1fe
FB
5580 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5581 gen_op_mov_reg_T0(OT_LONG, R_ESP);
2c0262af 5582 } else {
57fec1fe
FB
5583 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5584 gen_op_mov_reg_T0(OT_WORD, R_ESP);
2c0262af
FB
5585 }
5586 gen_pop_T0(s);
14ce26e7
FB
5587 if (CODE64(s)) {
5588 ot = dflag ? OT_QUAD : OT_WORD;
5589 } else {
5590 ot = dflag + OT_WORD;
5591 }
57fec1fe 5592 gen_op_mov_reg_T0(ot, R_EBP);
2c0262af
FB
5593 gen_pop_update(s);
5594 break;
5595 case 0x06: /* push es */
5596 case 0x0e: /* push cs */
5597 case 0x16: /* push ss */
5598 case 0x1e: /* push ds */
14ce26e7
FB
5599 if (CODE64(s))
5600 goto illegal_op;
2c0262af
FB
5601 gen_op_movl_T0_seg(b >> 3);
5602 gen_push_T0(s);
5603 break;
5604 case 0x1a0: /* push fs */
5605 case 0x1a8: /* push gs */
5606 gen_op_movl_T0_seg((b >> 3) & 7);
5607 gen_push_T0(s);
5608 break;
5609 case 0x07: /* pop es */
5610 case 0x17: /* pop ss */
5611 case 0x1f: /* pop ds */
14ce26e7
FB
5612 if (CODE64(s))
5613 goto illegal_op;
2c0262af
FB
5614 reg = b >> 3;
5615 gen_pop_T0(s);
5616 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5617 gen_pop_update(s);
5618 if (reg == R_SS) {
a2cc3b24
FB
5619 /* if reg == SS, inhibit interrupts/trace. */
5620 /* If several instructions disable interrupts, only the
5621 _first_ does it */
5622 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
f0967a1a 5623 gen_helper_set_inhibit_irq(cpu_env);
2c0262af
FB
5624 s->tf = 0;
5625 }
5626 if (s->is_jmp) {
14ce26e7 5627 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
5628 gen_eob(s);
5629 }
5630 break;
5631 case 0x1a1: /* pop fs */
5632 case 0x1a9: /* pop gs */
5633 gen_pop_T0(s);
5634 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5635 gen_pop_update(s);
5636 if (s->is_jmp) {
14ce26e7 5637 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
5638 gen_eob(s);
5639 }
5640 break;
5641
5642 /**************************/
5643 /* mov */
5644 case 0x88:
5645 case 0x89: /* mov Gv, Ev */
5646 if ((b & 1) == 0)
5647 ot = OT_BYTE;
5648 else
14ce26e7 5649 ot = dflag + OT_WORD;
0af10c86 5650 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5651 reg = ((modrm >> 3) & 7) | rex_r;
3b46e624 5652
2c0262af 5653 /* generate a generic store */
0af10c86 5654 gen_ldst_modrm(env, s, modrm, ot, reg, 1);
2c0262af
FB
5655 break;
5656 case 0xc6:
5657 case 0xc7: /* mov Ev, Iv */
5658 if ((b & 1) == 0)
5659 ot = OT_BYTE;
5660 else
14ce26e7 5661 ot = dflag + OT_WORD;
0af10c86 5662 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 5663 mod = (modrm >> 6) & 3;
14ce26e7
FB
5664 if (mod != 3) {
5665 s->rip_offset = insn_const_size(ot);
0af10c86 5666 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
14ce26e7 5667 }
0af10c86 5668 val = insn_get(env, s, ot);
2c0262af
FB
5669 gen_op_movl_T0_im(val);
5670 if (mod != 3)
57fec1fe 5671 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 5672 else
57fec1fe 5673 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
2c0262af
FB
5674 break;
5675 case 0x8a:
5676 case 0x8b: /* mov Ev, Gv */
5677 if ((b & 1) == 0)
5678 ot = OT_BYTE;
5679 else
14ce26e7 5680 ot = OT_WORD + dflag;
0af10c86 5681 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5682 reg = ((modrm >> 3) & 7) | rex_r;
3b46e624 5683
0af10c86 5684 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
57fec1fe 5685 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
5686 break;
5687 case 0x8e: /* mov seg, Gv */
0af10c86 5688 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
5689 reg = (modrm >> 3) & 7;
5690 if (reg >= 6 || reg == R_CS)
5691 goto illegal_op;
0af10c86 5692 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
2c0262af
FB
5693 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5694 if (reg == R_SS) {
5695 /* if reg == SS, inhibit interrupts/trace */
a2cc3b24
FB
5696 /* If several instructions disable interrupts, only the
5697 _first_ does it */
5698 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
f0967a1a 5699 gen_helper_set_inhibit_irq(cpu_env);
2c0262af
FB
5700 s->tf = 0;
5701 }
5702 if (s->is_jmp) {
14ce26e7 5703 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
5704 gen_eob(s);
5705 }
5706 break;
5707 case 0x8c: /* mov Gv, seg */
0af10c86 5708 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
5709 reg = (modrm >> 3) & 7;
5710 mod = (modrm >> 6) & 3;
5711 if (reg >= 6)
5712 goto illegal_op;
5713 gen_op_movl_T0_seg(reg);
14ce26e7
FB
5714 if (mod == 3)
5715 ot = OT_WORD + dflag;
5716 else
5717 ot = OT_WORD;
0af10c86 5718 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
2c0262af
FB
5719 break;
5720
5721 case 0x1b6: /* movzbS Gv, Eb */
5722 case 0x1b7: /* movzwS Gv, Eb */
5723 case 0x1be: /* movsbS Gv, Eb */
5724 case 0x1bf: /* movswS Gv, Eb */
5725 {
5726 int d_ot;
5727 /* d_ot is the size of destination */
5728 d_ot = dflag + OT_WORD;
5729 /* ot is the size of source */
5730 ot = (b & 1) + OT_BYTE;
0af10c86 5731 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5732 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af 5733 mod = (modrm >> 6) & 3;
14ce26e7 5734 rm = (modrm & 7) | REX_B(s);
3b46e624 5735
2c0262af 5736 if (mod == 3) {
57fec1fe 5737 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af
FB
5738 switch(ot | (b & 8)) {
5739 case OT_BYTE:
e108dd01 5740 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
2c0262af
FB
5741 break;
5742 case OT_BYTE | 8:
e108dd01 5743 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
2c0262af
FB
5744 break;
5745 case OT_WORD:
e108dd01 5746 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
2c0262af
FB
5747 break;
5748 default:
5749 case OT_WORD | 8:
e108dd01 5750 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
2c0262af
FB
5751 break;
5752 }
57fec1fe 5753 gen_op_mov_reg_T0(d_ot, reg);
2c0262af 5754 } else {
0af10c86 5755 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af 5756 if (b & 8) {
57fec1fe 5757 gen_op_lds_T0_A0(ot + s->mem_index);
2c0262af 5758 } else {
57fec1fe 5759 gen_op_ldu_T0_A0(ot + s->mem_index);
2c0262af 5760 }
57fec1fe 5761 gen_op_mov_reg_T0(d_ot, reg);
2c0262af
FB
5762 }
5763 }
5764 break;
5765
5766 case 0x8d: /* lea */
14ce26e7 5767 ot = dflag + OT_WORD;
0af10c86 5768 modrm = cpu_ldub_code(env, s->pc++);
3a1d9b8b
FB
5769 mod = (modrm >> 6) & 3;
5770 if (mod == 3)
5771 goto illegal_op;
14ce26e7 5772 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af
FB
5773 /* we must ensure that no segment is added */
5774 s->override = -1;
5775 val = s->addseg;
5776 s->addseg = 0;
0af10c86 5777 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af 5778 s->addseg = val;
57fec1fe 5779 gen_op_mov_reg_A0(ot - OT_WORD, reg);
2c0262af 5780 break;
3b46e624 5781
2c0262af
FB
5782 case 0xa0: /* mov EAX, Ov */
5783 case 0xa1:
5784 case 0xa2: /* mov Ov, EAX */
5785 case 0xa3:
2c0262af 5786 {
14ce26e7
FB
5787 target_ulong offset_addr;
5788
5789 if ((b & 1) == 0)
5790 ot = OT_BYTE;
5791 else
5792 ot = dflag + OT_WORD;
5793#ifdef TARGET_X86_64
8f091a59 5794 if (s->aflag == 2) {
0af10c86 5795 offset_addr = cpu_ldq_code(env, s->pc);
14ce26e7 5796 s->pc += 8;
57fec1fe 5797 gen_op_movq_A0_im(offset_addr);
5fafdf24 5798 } else
14ce26e7
FB
5799#endif
5800 {
5801 if (s->aflag) {
0af10c86 5802 offset_addr = insn_get(env, s, OT_LONG);
14ce26e7 5803 } else {
0af10c86 5804 offset_addr = insn_get(env, s, OT_WORD);
14ce26e7
FB
5805 }
5806 gen_op_movl_A0_im(offset_addr);
5807 }
664e0f19 5808 gen_add_A0_ds_seg(s);
14ce26e7 5809 if ((b & 2) == 0) {
57fec1fe
FB
5810 gen_op_ld_T0_A0(ot + s->mem_index);
5811 gen_op_mov_reg_T0(ot, R_EAX);
14ce26e7 5812 } else {
57fec1fe
FB
5813 gen_op_mov_TN_reg(ot, 0, R_EAX);
5814 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af
FB
5815 }
5816 }
2c0262af
FB
5817 break;
5818 case 0xd7: /* xlat */
14ce26e7 5819#ifdef TARGET_X86_64
8f091a59 5820 if (s->aflag == 2) {
57fec1fe 5821 gen_op_movq_A0_reg(R_EBX);
bbf662ee
FB
5822 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5823 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5824 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5fafdf24 5825 } else
14ce26e7
FB
5826#endif
5827 {
57fec1fe 5828 gen_op_movl_A0_reg(R_EBX);
bbf662ee
FB
5829 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5830 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5831 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
14ce26e7
FB
5832 if (s->aflag == 0)
5833 gen_op_andl_A0_ffff();
bbf662ee
FB
5834 else
5835 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
14ce26e7 5836 }
664e0f19 5837 gen_add_A0_ds_seg(s);
57fec1fe
FB
5838 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5839 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
2c0262af
FB
5840 break;
5841 case 0xb0 ... 0xb7: /* mov R, Ib */
0af10c86 5842 val = insn_get(env, s, OT_BYTE);
2c0262af 5843 gen_op_movl_T0_im(val);
57fec1fe 5844 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
2c0262af
FB
5845 break;
5846 case 0xb8 ... 0xbf: /* mov R, Iv */
14ce26e7
FB
5847#ifdef TARGET_X86_64
5848 if (dflag == 2) {
5849 uint64_t tmp;
5850 /* 64 bit case */
0af10c86 5851 tmp = cpu_ldq_code(env, s->pc);
14ce26e7
FB
5852 s->pc += 8;
5853 reg = (b & 7) | REX_B(s);
5854 gen_movtl_T0_im(tmp);
57fec1fe 5855 gen_op_mov_reg_T0(OT_QUAD, reg);
5fafdf24 5856 } else
14ce26e7
FB
5857#endif
5858 {
5859 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 5860 val = insn_get(env, s, ot);
14ce26e7
FB
5861 reg = (b & 7) | REX_B(s);
5862 gen_op_movl_T0_im(val);
57fec1fe 5863 gen_op_mov_reg_T0(ot, reg);
14ce26e7 5864 }
2c0262af
FB
5865 break;
5866
5867 case 0x91 ... 0x97: /* xchg R, EAX */
7418027e 5868 do_xchg_reg_eax:
14ce26e7
FB
5869 ot = dflag + OT_WORD;
5870 reg = (b & 7) | REX_B(s);
2c0262af
FB
5871 rm = R_EAX;
5872 goto do_xchg_reg;
5873 case 0x86:
5874 case 0x87: /* xchg Ev, Gv */
5875 if ((b & 1) == 0)
5876 ot = OT_BYTE;
5877 else
14ce26e7 5878 ot = dflag + OT_WORD;
0af10c86 5879 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5880 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af
FB
5881 mod = (modrm >> 6) & 3;
5882 if (mod == 3) {
14ce26e7 5883 rm = (modrm & 7) | REX_B(s);
2c0262af 5884 do_xchg_reg:
57fec1fe
FB
5885 gen_op_mov_TN_reg(ot, 0, reg);
5886 gen_op_mov_TN_reg(ot, 1, rm);
5887 gen_op_mov_reg_T0(ot, rm);
5888 gen_op_mov_reg_T1(ot, reg);
2c0262af 5889 } else {
0af10c86 5890 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 5891 gen_op_mov_TN_reg(ot, 0, reg);
2c0262af
FB
5892 /* for xchg, lock is implicit */
5893 if (!(prefixes & PREFIX_LOCK))
a7812ae4 5894 gen_helper_lock();
57fec1fe
FB
5895 gen_op_ld_T1_A0(ot + s->mem_index);
5896 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 5897 if (!(prefixes & PREFIX_LOCK))
a7812ae4 5898 gen_helper_unlock();
57fec1fe 5899 gen_op_mov_reg_T1(ot, reg);
2c0262af
FB
5900 }
5901 break;
5902 case 0xc4: /* les Gv */
701ed211 5903 /* In CODE64 this is VEX3; see above. */
2c0262af
FB
5904 op = R_ES;
5905 goto do_lxx;
5906 case 0xc5: /* lds Gv */
701ed211 5907 /* In CODE64 this is VEX2; see above. */
2c0262af
FB
5908 op = R_DS;
5909 goto do_lxx;
5910 case 0x1b2: /* lss Gv */
5911 op = R_SS;
5912 goto do_lxx;
5913 case 0x1b4: /* lfs Gv */
5914 op = R_FS;
5915 goto do_lxx;
5916 case 0x1b5: /* lgs Gv */
5917 op = R_GS;
5918 do_lxx:
5919 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 5920 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5921 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af
FB
5922 mod = (modrm >> 6) & 3;
5923 if (mod == 3)
5924 goto illegal_op;
0af10c86 5925 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 5926 gen_op_ld_T1_A0(ot + s->mem_index);
aba9d61e 5927 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
2c0262af 5928 /* load the segment first to handle exceptions properly */
57fec1fe 5929 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
5930 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5931 /* then put the data */
57fec1fe 5932 gen_op_mov_reg_T1(ot, reg);
2c0262af 5933 if (s->is_jmp) {
14ce26e7 5934 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
5935 gen_eob(s);
5936 }
5937 break;
3b46e624 5938
2c0262af
FB
5939 /************************/
5940 /* shifts */
5941 case 0xc0:
5942 case 0xc1:
5943 /* shift Ev,Ib */
5944 shift = 2;
5945 grp2:
5946 {
5947 if ((b & 1) == 0)
5948 ot = OT_BYTE;
5949 else
14ce26e7 5950 ot = dflag + OT_WORD;
3b46e624 5951
0af10c86 5952 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 5953 mod = (modrm >> 6) & 3;
2c0262af 5954 op = (modrm >> 3) & 7;
3b46e624 5955
2c0262af 5956 if (mod != 3) {
14ce26e7
FB
5957 if (shift == 2) {
5958 s->rip_offset = 1;
5959 }
0af10c86 5960 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
5961 opreg = OR_TMP0;
5962 } else {
14ce26e7 5963 opreg = (modrm & 7) | REX_B(s);
2c0262af
FB
5964 }
5965
5966 /* simpler op */
5967 if (shift == 0) {
5968 gen_shift(s, op, ot, opreg, OR_ECX);
5969 } else {
5970 if (shift == 2) {
0af10c86 5971 shift = cpu_ldub_code(env, s->pc++);
2c0262af
FB
5972 }
5973 gen_shifti(s, op, ot, opreg, shift);
5974 }
5975 }
5976 break;
5977 case 0xd0:
5978 case 0xd1:
5979 /* shift Ev,1 */
5980 shift = 1;
5981 goto grp2;
5982 case 0xd2:
5983 case 0xd3:
5984 /* shift Ev,cl */
5985 shift = 0;
5986 goto grp2;
5987
5988 case 0x1a4: /* shld imm */
5989 op = 0;
5990 shift = 1;
5991 goto do_shiftd;
5992 case 0x1a5: /* shld cl */
5993 op = 0;
5994 shift = 0;
5995 goto do_shiftd;
5996 case 0x1ac: /* shrd imm */
5997 op = 1;
5998 shift = 1;
5999 goto do_shiftd;
6000 case 0x1ad: /* shrd cl */
6001 op = 1;
6002 shift = 0;
6003 do_shiftd:
14ce26e7 6004 ot = dflag + OT_WORD;
0af10c86 6005 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 6006 mod = (modrm >> 6) & 3;
14ce26e7
FB
6007 rm = (modrm & 7) | REX_B(s);
6008 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af 6009 if (mod != 3) {
0af10c86 6010 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
b6abf97d 6011 opreg = OR_TMP0;
2c0262af 6012 } else {
b6abf97d 6013 opreg = rm;
2c0262af 6014 }
57fec1fe 6015 gen_op_mov_TN_reg(ot, 1, reg);
3b46e624 6016
2c0262af 6017 if (shift) {
3b9d3cf1
PB
6018 TCGv imm = tcg_const_tl(cpu_ldub_code(env, s->pc++));
6019 gen_shiftd_rm_T1(s, ot, opreg, op, imm);
6020 tcg_temp_free(imm);
2c0262af 6021 } else {
3b9d3cf1 6022 gen_shiftd_rm_T1(s, ot, opreg, op, cpu_regs[R_ECX]);
2c0262af
FB
6023 }
6024 break;
6025
6026 /************************/
6027 /* floats */
5fafdf24 6028 case 0xd8 ... 0xdf:
7eee2a50
FB
6029 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6030 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6031 /* XXX: what to do if illegal op ? */
6032 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6033 break;
6034 }
0af10c86 6035 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
6036 mod = (modrm >> 6) & 3;
6037 rm = modrm & 7;
6038 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
2c0262af
FB
6039 if (mod != 3) {
6040 /* memory op */
0af10c86 6041 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
6042 switch(op) {
6043 case 0x00 ... 0x07: /* fxxxs */
6044 case 0x10 ... 0x17: /* fixxxl */
6045 case 0x20 ... 0x27: /* fxxxl */
6046 case 0x30 ... 0x37: /* fixxx */
6047 {
6048 int op1;
6049 op1 = op & 7;
6050
6051 switch(op >> 4) {
6052 case 0:
ba7cd150 6053 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
b6abf97d 6054 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6055 gen_helper_flds_FT0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6056 break;
6057 case 1:
ba7cd150 6058 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
b6abf97d 6059 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6060 gen_helper_fildl_FT0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6061 break;
6062 case 2:
b6abf97d 6063 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6064 (s->mem_index >> 2) - 1);
d3eb5eae 6065 gen_helper_fldl_FT0(cpu_env, cpu_tmp1_i64);
2c0262af
FB
6066 break;
6067 case 3:
6068 default:
ba7cd150 6069 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
b6abf97d 6070 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6071 gen_helper_fildl_FT0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6072 break;
6073 }
3b46e624 6074
a7812ae4 6075 gen_helper_fp_arith_ST0_FT0(op1);
2c0262af
FB
6076 if (op1 == 3) {
6077 /* fcomp needs pop */
d3eb5eae 6078 gen_helper_fpop(cpu_env);
2c0262af
FB
6079 }
6080 }
6081 break;
6082 case 0x08: /* flds */
6083 case 0x0a: /* fsts */
6084 case 0x0b: /* fstps */
465e9838
FB
6085 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6086 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6087 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
2c0262af
FB
6088 switch(op & 7) {
6089 case 0:
6090 switch(op >> 4) {
6091 case 0:
ba7cd150 6092 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
b6abf97d 6093 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6094 gen_helper_flds_ST0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6095 break;
6096 case 1:
ba7cd150 6097 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
b6abf97d 6098 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6099 gen_helper_fildl_ST0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6100 break;
6101 case 2:
b6abf97d 6102 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6103 (s->mem_index >> 2) - 1);
d3eb5eae 6104 gen_helper_fldl_ST0(cpu_env, cpu_tmp1_i64);
2c0262af
FB
6105 break;
6106 case 3:
6107 default:
ba7cd150 6108 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
b6abf97d 6109 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6110 gen_helper_fildl_ST0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6111 break;
6112 }
6113 break;
465e9838 6114 case 1:
19e6c4b8 6115 /* XXX: the corresponding CPUID bit must be tested ! */
465e9838
FB
6116 switch(op >> 4) {
6117 case 1:
d3eb5eae 6118 gen_helper_fisttl_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6119 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6120 gen_op_st_T0_A0(OT_LONG + s->mem_index);
465e9838
FB
6121 break;
6122 case 2:
d3eb5eae 6123 gen_helper_fisttll_ST0(cpu_tmp1_i64, cpu_env);
b6abf97d 6124 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6125 (s->mem_index >> 2) - 1);
465e9838
FB
6126 break;
6127 case 3:
6128 default:
d3eb5eae 6129 gen_helper_fistt_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6130 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6131 gen_op_st_T0_A0(OT_WORD + s->mem_index);
19e6c4b8 6132 break;
465e9838 6133 }
d3eb5eae 6134 gen_helper_fpop(cpu_env);
465e9838 6135 break;
2c0262af
FB
6136 default:
6137 switch(op >> 4) {
6138 case 0:
d3eb5eae 6139 gen_helper_fsts_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6140 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6141 gen_op_st_T0_A0(OT_LONG + s->mem_index);
2c0262af
FB
6142 break;
6143 case 1:
d3eb5eae 6144 gen_helper_fistl_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6145 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6146 gen_op_st_T0_A0(OT_LONG + s->mem_index);
2c0262af
FB
6147 break;
6148 case 2:
d3eb5eae 6149 gen_helper_fstl_ST0(cpu_tmp1_i64, cpu_env);
b6abf97d 6150 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6151 (s->mem_index >> 2) - 1);
2c0262af
FB
6152 break;
6153 case 3:
6154 default:
d3eb5eae 6155 gen_helper_fist_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6156 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6157 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
6158 break;
6159 }
6160 if ((op & 7) == 3)
d3eb5eae 6161 gen_helper_fpop(cpu_env);
2c0262af
FB
6162 break;
6163 }
6164 break;
6165 case 0x0c: /* fldenv mem */
773cdfcc 6166 gen_update_cc_op(s);
19e6c4b8 6167 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6168 gen_helper_fldenv(cpu_env, cpu_A0, tcg_const_i32(s->dflag));
2c0262af
FB
6169 break;
6170 case 0x0d: /* fldcw mem */
19e6c4b8 6171 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
b6abf97d 6172 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6173 gen_helper_fldcw(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6174 break;
6175 case 0x0e: /* fnstenv mem */
773cdfcc 6176 gen_update_cc_op(s);
19e6c4b8 6177 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6178 gen_helper_fstenv(cpu_env, cpu_A0, tcg_const_i32(s->dflag));
2c0262af
FB
6179 break;
6180 case 0x0f: /* fnstcw mem */
d3eb5eae 6181 gen_helper_fnstcw(cpu_tmp2_i32, cpu_env);
b6abf97d 6182 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
19e6c4b8 6183 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
6184 break;
6185 case 0x1d: /* fldt mem */
773cdfcc 6186 gen_update_cc_op(s);
19e6c4b8 6187 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6188 gen_helper_fldt_ST0(cpu_env, cpu_A0);
2c0262af
FB
6189 break;
6190 case 0x1f: /* fstpt mem */
773cdfcc 6191 gen_update_cc_op(s);
19e6c4b8 6192 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae
BS
6193 gen_helper_fstt_ST0(cpu_env, cpu_A0);
6194 gen_helper_fpop(cpu_env);
2c0262af
FB
6195 break;
6196 case 0x2c: /* frstor mem */
773cdfcc 6197 gen_update_cc_op(s);
19e6c4b8 6198 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6199 gen_helper_frstor(cpu_env, cpu_A0, tcg_const_i32(s->dflag));
2c0262af
FB
6200 break;
6201 case 0x2e: /* fnsave mem */
773cdfcc 6202 gen_update_cc_op(s);
19e6c4b8 6203 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6204 gen_helper_fsave(cpu_env, cpu_A0, tcg_const_i32(s->dflag));
2c0262af
FB
6205 break;
6206 case 0x2f: /* fnstsw mem */
d3eb5eae 6207 gen_helper_fnstsw(cpu_tmp2_i32, cpu_env);
b6abf97d 6208 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
19e6c4b8 6209 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
6210 break;
6211 case 0x3c: /* fbld */
773cdfcc 6212 gen_update_cc_op(s);
19e6c4b8 6213 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6214 gen_helper_fbld_ST0(cpu_env, cpu_A0);
2c0262af
FB
6215 break;
6216 case 0x3e: /* fbstp */
773cdfcc 6217 gen_update_cc_op(s);
19e6c4b8 6218 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae
BS
6219 gen_helper_fbst_ST0(cpu_env, cpu_A0);
6220 gen_helper_fpop(cpu_env);
2c0262af
FB
6221 break;
6222 case 0x3d: /* fildll */
b6abf97d 6223 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6224 (s->mem_index >> 2) - 1);
d3eb5eae 6225 gen_helper_fildll_ST0(cpu_env, cpu_tmp1_i64);
2c0262af
FB
6226 break;
6227 case 0x3f: /* fistpll */
d3eb5eae 6228 gen_helper_fistll_ST0(cpu_tmp1_i64, cpu_env);
b6abf97d 6229 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6230 (s->mem_index >> 2) - 1);
d3eb5eae 6231 gen_helper_fpop(cpu_env);
2c0262af
FB
6232 break;
6233 default:
6234 goto illegal_op;
6235 }
6236 } else {
6237 /* register float ops */
6238 opreg = rm;
6239
6240 switch(op) {
6241 case 0x08: /* fld sti */
d3eb5eae
BS
6242 gen_helper_fpush(cpu_env);
6243 gen_helper_fmov_ST0_STN(cpu_env,
6244 tcg_const_i32((opreg + 1) & 7));
2c0262af
FB
6245 break;
6246 case 0x09: /* fxchg sti */
c169c906
FB
6247 case 0x29: /* fxchg4 sti, undocumented op */
6248 case 0x39: /* fxchg7 sti, undocumented op */
d3eb5eae 6249 gen_helper_fxchg_ST0_STN(cpu_env, tcg_const_i32(opreg));
2c0262af
FB
6250 break;
6251 case 0x0a: /* grp d9/2 */
6252 switch(rm) {
6253 case 0: /* fnop */
023fe10d 6254 /* check exceptions (FreeBSD FPU probe) */
773cdfcc 6255 gen_update_cc_op(s);
14ce26e7 6256 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6257 gen_helper_fwait(cpu_env);
2c0262af
FB
6258 break;
6259 default:
6260 goto illegal_op;
6261 }
6262 break;
6263 case 0x0c: /* grp d9/4 */
6264 switch(rm) {
6265 case 0: /* fchs */
d3eb5eae 6266 gen_helper_fchs_ST0(cpu_env);
2c0262af
FB
6267 break;
6268 case 1: /* fabs */
d3eb5eae 6269 gen_helper_fabs_ST0(cpu_env);
2c0262af
FB
6270 break;
6271 case 4: /* ftst */
d3eb5eae
BS
6272 gen_helper_fldz_FT0(cpu_env);
6273 gen_helper_fcom_ST0_FT0(cpu_env);
2c0262af
FB
6274 break;
6275 case 5: /* fxam */
d3eb5eae 6276 gen_helper_fxam_ST0(cpu_env);
2c0262af
FB
6277 break;
6278 default:
6279 goto illegal_op;
6280 }
6281 break;
6282 case 0x0d: /* grp d9/5 */
6283 {
6284 switch(rm) {
6285 case 0:
d3eb5eae
BS
6286 gen_helper_fpush(cpu_env);
6287 gen_helper_fld1_ST0(cpu_env);
2c0262af
FB
6288 break;
6289 case 1:
d3eb5eae
BS
6290 gen_helper_fpush(cpu_env);
6291 gen_helper_fldl2t_ST0(cpu_env);
2c0262af
FB
6292 break;
6293 case 2:
d3eb5eae
BS
6294 gen_helper_fpush(cpu_env);
6295 gen_helper_fldl2e_ST0(cpu_env);
2c0262af
FB
6296 break;
6297 case 3:
d3eb5eae
BS
6298 gen_helper_fpush(cpu_env);
6299 gen_helper_fldpi_ST0(cpu_env);
2c0262af
FB
6300 break;
6301 case 4:
d3eb5eae
BS
6302 gen_helper_fpush(cpu_env);
6303 gen_helper_fldlg2_ST0(cpu_env);
2c0262af
FB
6304 break;
6305 case 5:
d3eb5eae
BS
6306 gen_helper_fpush(cpu_env);
6307 gen_helper_fldln2_ST0(cpu_env);
2c0262af
FB
6308 break;
6309 case 6:
d3eb5eae
BS
6310 gen_helper_fpush(cpu_env);
6311 gen_helper_fldz_ST0(cpu_env);
2c0262af
FB
6312 break;
6313 default:
6314 goto illegal_op;
6315 }
6316 }
6317 break;
6318 case 0x0e: /* grp d9/6 */
6319 switch(rm) {
6320 case 0: /* f2xm1 */
d3eb5eae 6321 gen_helper_f2xm1(cpu_env);
2c0262af
FB
6322 break;
6323 case 1: /* fyl2x */
d3eb5eae 6324 gen_helper_fyl2x(cpu_env);
2c0262af
FB
6325 break;
6326 case 2: /* fptan */
d3eb5eae 6327 gen_helper_fptan(cpu_env);
2c0262af
FB
6328 break;
6329 case 3: /* fpatan */
d3eb5eae 6330 gen_helper_fpatan(cpu_env);
2c0262af
FB
6331 break;
6332 case 4: /* fxtract */
d3eb5eae 6333 gen_helper_fxtract(cpu_env);
2c0262af
FB
6334 break;
6335 case 5: /* fprem1 */
d3eb5eae 6336 gen_helper_fprem1(cpu_env);
2c0262af
FB
6337 break;
6338 case 6: /* fdecstp */
d3eb5eae 6339 gen_helper_fdecstp(cpu_env);
2c0262af
FB
6340 break;
6341 default:
6342 case 7: /* fincstp */
d3eb5eae 6343 gen_helper_fincstp(cpu_env);
2c0262af
FB
6344 break;
6345 }
6346 break;
6347 case 0x0f: /* grp d9/7 */
6348 switch(rm) {
6349 case 0: /* fprem */
d3eb5eae 6350 gen_helper_fprem(cpu_env);
2c0262af
FB
6351 break;
6352 case 1: /* fyl2xp1 */
d3eb5eae 6353 gen_helper_fyl2xp1(cpu_env);
2c0262af
FB
6354 break;
6355 case 2: /* fsqrt */
d3eb5eae 6356 gen_helper_fsqrt(cpu_env);
2c0262af
FB
6357 break;
6358 case 3: /* fsincos */
d3eb5eae 6359 gen_helper_fsincos(cpu_env);
2c0262af
FB
6360 break;
6361 case 5: /* fscale */
d3eb5eae 6362 gen_helper_fscale(cpu_env);
2c0262af
FB
6363 break;
6364 case 4: /* frndint */
d3eb5eae 6365 gen_helper_frndint(cpu_env);
2c0262af
FB
6366 break;
6367 case 6: /* fsin */
d3eb5eae 6368 gen_helper_fsin(cpu_env);
2c0262af
FB
6369 break;
6370 default:
6371 case 7: /* fcos */
d3eb5eae 6372 gen_helper_fcos(cpu_env);
2c0262af
FB
6373 break;
6374 }
6375 break;
6376 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6377 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6378 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6379 {
6380 int op1;
3b46e624 6381
2c0262af
FB
6382 op1 = op & 7;
6383 if (op >= 0x20) {
a7812ae4 6384 gen_helper_fp_arith_STN_ST0(op1, opreg);
2c0262af 6385 if (op >= 0x30)
d3eb5eae 6386 gen_helper_fpop(cpu_env);
2c0262af 6387 } else {
d3eb5eae 6388 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
a7812ae4 6389 gen_helper_fp_arith_ST0_FT0(op1);
2c0262af
FB
6390 }
6391 }
6392 break;
6393 case 0x02: /* fcom */
c169c906 6394 case 0x22: /* fcom2, undocumented op */
d3eb5eae
BS
6395 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6396 gen_helper_fcom_ST0_FT0(cpu_env);
2c0262af
FB
6397 break;
6398 case 0x03: /* fcomp */
c169c906
FB
6399 case 0x23: /* fcomp3, undocumented op */
6400 case 0x32: /* fcomp5, undocumented op */
d3eb5eae
BS
6401 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6402 gen_helper_fcom_ST0_FT0(cpu_env);
6403 gen_helper_fpop(cpu_env);
2c0262af
FB
6404 break;
6405 case 0x15: /* da/5 */
6406 switch(rm) {
6407 case 1: /* fucompp */
d3eb5eae
BS
6408 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(1));
6409 gen_helper_fucom_ST0_FT0(cpu_env);
6410 gen_helper_fpop(cpu_env);
6411 gen_helper_fpop(cpu_env);
2c0262af
FB
6412 break;
6413 default:
6414 goto illegal_op;
6415 }
6416 break;
6417 case 0x1c:
6418 switch(rm) {
6419 case 0: /* feni (287 only, just do nop here) */
6420 break;
6421 case 1: /* fdisi (287 only, just do nop here) */
6422 break;
6423 case 2: /* fclex */
d3eb5eae 6424 gen_helper_fclex(cpu_env);
2c0262af
FB
6425 break;
6426 case 3: /* fninit */
d3eb5eae 6427 gen_helper_fninit(cpu_env);
2c0262af
FB
6428 break;
6429 case 4: /* fsetpm (287 only, just do nop here) */
6430 break;
6431 default:
6432 goto illegal_op;
6433 }
6434 break;
6435 case 0x1d: /* fucomi */
773cdfcc 6436 gen_update_cc_op(s);
d3eb5eae
BS
6437 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6438 gen_helper_fucomi_ST0_FT0(cpu_env);
3ca51d07 6439 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
6440 break;
6441 case 0x1e: /* fcomi */
773cdfcc 6442 gen_update_cc_op(s);
d3eb5eae
BS
6443 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6444 gen_helper_fcomi_ST0_FT0(cpu_env);
3ca51d07 6445 set_cc_op(s, CC_OP_EFLAGS);
2c0262af 6446 break;
658c8bda 6447 case 0x28: /* ffree sti */
d3eb5eae 6448 gen_helper_ffree_STN(cpu_env, tcg_const_i32(opreg));
5fafdf24 6449 break;
2c0262af 6450 case 0x2a: /* fst sti */
d3eb5eae 6451 gen_helper_fmov_STN_ST0(cpu_env, tcg_const_i32(opreg));
2c0262af
FB
6452 break;
6453 case 0x2b: /* fstp sti */
c169c906
FB
6454 case 0x0b: /* fstp1 sti, undocumented op */
6455 case 0x3a: /* fstp8 sti, undocumented op */
6456 case 0x3b: /* fstp9 sti, undocumented op */
d3eb5eae
BS
6457 gen_helper_fmov_STN_ST0(cpu_env, tcg_const_i32(opreg));
6458 gen_helper_fpop(cpu_env);
2c0262af
FB
6459 break;
6460 case 0x2c: /* fucom st(i) */
d3eb5eae
BS
6461 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6462 gen_helper_fucom_ST0_FT0(cpu_env);
2c0262af
FB
6463 break;
6464 case 0x2d: /* fucomp st(i) */
d3eb5eae
BS
6465 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6466 gen_helper_fucom_ST0_FT0(cpu_env);
6467 gen_helper_fpop(cpu_env);
2c0262af
FB
6468 break;
6469 case 0x33: /* de/3 */
6470 switch(rm) {
6471 case 1: /* fcompp */
d3eb5eae
BS
6472 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(1));
6473 gen_helper_fcom_ST0_FT0(cpu_env);
6474 gen_helper_fpop(cpu_env);
6475 gen_helper_fpop(cpu_env);
2c0262af
FB
6476 break;
6477 default:
6478 goto illegal_op;
6479 }
6480 break;
c169c906 6481 case 0x38: /* ffreep sti, undocumented op */
d3eb5eae
BS
6482 gen_helper_ffree_STN(cpu_env, tcg_const_i32(opreg));
6483 gen_helper_fpop(cpu_env);
c169c906 6484 break;
2c0262af
FB
6485 case 0x3c: /* df/4 */
6486 switch(rm) {
6487 case 0:
d3eb5eae 6488 gen_helper_fnstsw(cpu_tmp2_i32, cpu_env);
b6abf97d 6489 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
19e6c4b8 6490 gen_op_mov_reg_T0(OT_WORD, R_EAX);
2c0262af
FB
6491 break;
6492 default:
6493 goto illegal_op;
6494 }
6495 break;
6496 case 0x3d: /* fucomip */
773cdfcc 6497 gen_update_cc_op(s);
d3eb5eae
BS
6498 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6499 gen_helper_fucomi_ST0_FT0(cpu_env);
6500 gen_helper_fpop(cpu_env);
3ca51d07 6501 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
6502 break;
6503 case 0x3e: /* fcomip */
773cdfcc 6504 gen_update_cc_op(s);
d3eb5eae
BS
6505 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6506 gen_helper_fcomi_ST0_FT0(cpu_env);
6507 gen_helper_fpop(cpu_env);
3ca51d07 6508 set_cc_op(s, CC_OP_EFLAGS);
2c0262af 6509 break;
a2cc3b24
FB
6510 case 0x10 ... 0x13: /* fcmovxx */
6511 case 0x18 ... 0x1b:
6512 {
19e6c4b8 6513 int op1, l1;
d70040bc 6514 static const uint8_t fcmov_cc[8] = {
a2cc3b24
FB
6515 (JCC_B << 1),
6516 (JCC_Z << 1),
6517 (JCC_BE << 1),
6518 (JCC_P << 1),
6519 };
1e4840bf 6520 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
19e6c4b8 6521 l1 = gen_new_label();
dc259201 6522 gen_jcc1_noeob(s, op1, l1);
d3eb5eae 6523 gen_helper_fmov_ST0_STN(cpu_env, tcg_const_i32(opreg));
19e6c4b8 6524 gen_set_label(l1);
a2cc3b24
FB
6525 }
6526 break;
2c0262af
FB
6527 default:
6528 goto illegal_op;
6529 }
6530 }
6531 break;
6532 /************************/
6533 /* string ops */
6534
6535 case 0xa4: /* movsS */
6536 case 0xa5:
6537 if ((b & 1) == 0)
6538 ot = OT_BYTE;
6539 else
14ce26e7 6540 ot = dflag + OT_WORD;
2c0262af
FB
6541
6542 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6543 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6544 } else {
6545 gen_movs(s, ot);
6546 }
6547 break;
3b46e624 6548
2c0262af
FB
6549 case 0xaa: /* stosS */
6550 case 0xab:
6551 if ((b & 1) == 0)
6552 ot = OT_BYTE;
6553 else
14ce26e7 6554 ot = dflag + OT_WORD;
2c0262af
FB
6555
6556 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6557 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6558 } else {
6559 gen_stos(s, ot);
6560 }
6561 break;
6562 case 0xac: /* lodsS */
6563 case 0xad:
6564 if ((b & 1) == 0)
6565 ot = OT_BYTE;
6566 else
14ce26e7 6567 ot = dflag + OT_WORD;
2c0262af
FB
6568 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6569 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6570 } else {
6571 gen_lods(s, ot);
6572 }
6573 break;
6574 case 0xae: /* scasS */
6575 case 0xaf:
6576 if ((b & 1) == 0)
6577 ot = OT_BYTE;
6578 else
14ce26e7 6579 ot = dflag + OT_WORD;
2c0262af
FB
6580 if (prefixes & PREFIX_REPNZ) {
6581 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6582 } else if (prefixes & PREFIX_REPZ) {
6583 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6584 } else {
6585 gen_scas(s, ot);
2c0262af
FB
6586 }
6587 break;
6588
6589 case 0xa6: /* cmpsS */
6590 case 0xa7:
6591 if ((b & 1) == 0)
6592 ot = OT_BYTE;
6593 else
14ce26e7 6594 ot = dflag + OT_WORD;
2c0262af
FB
6595 if (prefixes & PREFIX_REPNZ) {
6596 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6597 } else if (prefixes & PREFIX_REPZ) {
6598 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6599 } else {
6600 gen_cmps(s, ot);
2c0262af
FB
6601 }
6602 break;
6603 case 0x6c: /* insS */
6604 case 0x6d:
f115e911
FB
6605 if ((b & 1) == 0)
6606 ot = OT_BYTE;
6607 else
6608 ot = dflag ? OT_LONG : OT_WORD;
57fec1fe 6609 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
0573fbfc 6610 gen_op_andl_T0_ffff();
b8b6a50b
FB
6611 gen_check_io(s, ot, pc_start - s->cs_base,
6612 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
f115e911
FB
6613 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6614 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
2c0262af 6615 } else {
f115e911 6616 gen_ins(s, ot);
2e70f6ef
PB
6617 if (use_icount) {
6618 gen_jmp(s, s->pc - s->cs_base);
6619 }
2c0262af
FB
6620 }
6621 break;
6622 case 0x6e: /* outsS */
6623 case 0x6f:
f115e911
FB
6624 if ((b & 1) == 0)
6625 ot = OT_BYTE;
6626 else
6627 ot = dflag ? OT_LONG : OT_WORD;
57fec1fe 6628 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
0573fbfc 6629 gen_op_andl_T0_ffff();
b8b6a50b
FB
6630 gen_check_io(s, ot, pc_start - s->cs_base,
6631 svm_is_rep(prefixes) | 4);
f115e911
FB
6632 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6633 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
2c0262af 6634 } else {
f115e911 6635 gen_outs(s, ot);
2e70f6ef
PB
6636 if (use_icount) {
6637 gen_jmp(s, s->pc - s->cs_base);
6638 }
2c0262af
FB
6639 }
6640 break;
6641
6642 /************************/
6643 /* port I/O */
0573fbfc 6644
2c0262af
FB
6645 case 0xe4:
6646 case 0xe5:
f115e911
FB
6647 if ((b & 1) == 0)
6648 ot = OT_BYTE;
6649 else
6650 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 6651 val = cpu_ldub_code(env, s->pc++);
f115e911 6652 gen_op_movl_T0_im(val);
b8b6a50b
FB
6653 gen_check_io(s, ot, pc_start - s->cs_base,
6654 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
2e70f6ef
PB
6655 if (use_icount)
6656 gen_io_start();
b6abf97d 6657 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
a7812ae4 6658 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
57fec1fe 6659 gen_op_mov_reg_T1(ot, R_EAX);
2e70f6ef
PB
6660 if (use_icount) {
6661 gen_io_end();
6662 gen_jmp(s, s->pc - s->cs_base);
6663 }
2c0262af
FB
6664 break;
6665 case 0xe6:
6666 case 0xe7:
f115e911
FB
6667 if ((b & 1) == 0)
6668 ot = OT_BYTE;
6669 else
6670 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 6671 val = cpu_ldub_code(env, s->pc++);
f115e911 6672 gen_op_movl_T0_im(val);
b8b6a50b
FB
6673 gen_check_io(s, ot, pc_start - s->cs_base,
6674 svm_is_rep(prefixes));
57fec1fe 6675 gen_op_mov_TN_reg(ot, 1, R_EAX);
b8b6a50b 6676
2e70f6ef
PB
6677 if (use_icount)
6678 gen_io_start();
b6abf97d 6679 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
b6abf97d 6680 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
a7812ae4 6681 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
2e70f6ef
PB
6682 if (use_icount) {
6683 gen_io_end();
6684 gen_jmp(s, s->pc - s->cs_base);
6685 }
2c0262af
FB
6686 break;
6687 case 0xec:
6688 case 0xed:
f115e911
FB
6689 if ((b & 1) == 0)
6690 ot = OT_BYTE;
6691 else
6692 ot = dflag ? OT_LONG : OT_WORD;
57fec1fe 6693 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4f31916f 6694 gen_op_andl_T0_ffff();
b8b6a50b
FB
6695 gen_check_io(s, ot, pc_start - s->cs_base,
6696 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
2e70f6ef
PB
6697 if (use_icount)
6698 gen_io_start();
b6abf97d 6699 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
a7812ae4 6700 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
57fec1fe 6701 gen_op_mov_reg_T1(ot, R_EAX);
2e70f6ef
PB
6702 if (use_icount) {
6703 gen_io_end();
6704 gen_jmp(s, s->pc - s->cs_base);
6705 }
2c0262af
FB
6706 break;
6707 case 0xee:
6708 case 0xef:
f115e911
FB
6709 if ((b & 1) == 0)
6710 ot = OT_BYTE;
6711 else
6712 ot = dflag ? OT_LONG : OT_WORD;
57fec1fe 6713 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4f31916f 6714 gen_op_andl_T0_ffff();
b8b6a50b
FB
6715 gen_check_io(s, ot, pc_start - s->cs_base,
6716 svm_is_rep(prefixes));
57fec1fe 6717 gen_op_mov_TN_reg(ot, 1, R_EAX);
b8b6a50b 6718
2e70f6ef
PB
6719 if (use_icount)
6720 gen_io_start();
b6abf97d 6721 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
b6abf97d 6722 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
a7812ae4 6723 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
2e70f6ef
PB
6724 if (use_icount) {
6725 gen_io_end();
6726 gen_jmp(s, s->pc - s->cs_base);
6727 }
2c0262af
FB
6728 break;
6729
6730 /************************/
6731 /* control */
6732 case 0xc2: /* ret im */
0af10c86 6733 val = cpu_ldsw_code(env, s->pc);
2c0262af
FB
6734 s->pc += 2;
6735 gen_pop_T0(s);
8f091a59
FB
6736 if (CODE64(s) && s->dflag)
6737 s->dflag = 2;
2c0262af
FB
6738 gen_stack_update(s, val + (2 << s->dflag));
6739 if (s->dflag == 0)
6740 gen_op_andl_T0_ffff();
6741 gen_op_jmp_T0();
6742 gen_eob(s);
6743 break;
6744 case 0xc3: /* ret */
6745 gen_pop_T0(s);
6746 gen_pop_update(s);
6747 if (s->dflag == 0)
6748 gen_op_andl_T0_ffff();
6749 gen_op_jmp_T0();
6750 gen_eob(s);
6751 break;
6752 case 0xca: /* lret im */
0af10c86 6753 val = cpu_ldsw_code(env, s->pc);
2c0262af
FB
6754 s->pc += 2;
6755 do_lret:
6756 if (s->pe && !s->vm86) {
773cdfcc 6757 gen_update_cc_op(s);
14ce26e7 6758 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 6759 gen_helper_lret_protected(cpu_env, tcg_const_i32(s->dflag),
a7812ae4 6760 tcg_const_i32(val));
2c0262af
FB
6761 } else {
6762 gen_stack_A0(s);
6763 /* pop offset */
57fec1fe 6764 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
2c0262af
FB
6765 if (s->dflag == 0)
6766 gen_op_andl_T0_ffff();
6767 /* NOTE: keeping EIP updated is not a problem in case of
6768 exception */
6769 gen_op_jmp_T0();
6770 /* pop selector */
6771 gen_op_addl_A0_im(2 << s->dflag);
57fec1fe 6772 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
3bd7da9e 6773 gen_op_movl_seg_T0_vm(R_CS);
2c0262af
FB
6774 /* add stack offset */
6775 gen_stack_update(s, val + (4 << s->dflag));
6776 }
6777 gen_eob(s);
6778 break;
6779 case 0xcb: /* lret */
6780 val = 0;
6781 goto do_lret;
6782 case 0xcf: /* iret */
872929aa 6783 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
2c0262af
FB
6784 if (!s->pe) {
6785 /* real mode */
2999a0b2 6786 gen_helper_iret_real(cpu_env, tcg_const_i32(s->dflag));
3ca51d07 6787 set_cc_op(s, CC_OP_EFLAGS);
f115e911
FB
6788 } else if (s->vm86) {
6789 if (s->iopl != 3) {
6790 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6791 } else {
2999a0b2 6792 gen_helper_iret_real(cpu_env, tcg_const_i32(s->dflag));
3ca51d07 6793 set_cc_op(s, CC_OP_EFLAGS);
f115e911 6794 }
2c0262af 6795 } else {
773cdfcc 6796 gen_update_cc_op(s);
14ce26e7 6797 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 6798 gen_helper_iret_protected(cpu_env, tcg_const_i32(s->dflag),
a7812ae4 6799 tcg_const_i32(s->pc - s->cs_base));
3ca51d07 6800 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
6801 }
6802 gen_eob(s);
6803 break;
6804 case 0xe8: /* call im */
6805 {
14ce26e7 6806 if (dflag)
0af10c86 6807 tval = (int32_t)insn_get(env, s, OT_LONG);
14ce26e7 6808 else
0af10c86 6809 tval = (int16_t)insn_get(env, s, OT_WORD);
2c0262af 6810 next_eip = s->pc - s->cs_base;
14ce26e7 6811 tval += next_eip;
2c0262af 6812 if (s->dflag == 0)
14ce26e7 6813 tval &= 0xffff;
99596385
AJ
6814 else if(!CODE64(s))
6815 tval &= 0xffffffff;
14ce26e7 6816 gen_movtl_T0_im(next_eip);
2c0262af 6817 gen_push_T0(s);
14ce26e7 6818 gen_jmp(s, tval);
2c0262af
FB
6819 }
6820 break;
6821 case 0x9a: /* lcall im */
6822 {
6823 unsigned int selector, offset;
3b46e624 6824
14ce26e7
FB
6825 if (CODE64(s))
6826 goto illegal_op;
2c0262af 6827 ot = dflag ? OT_LONG : OT_WORD;
0af10c86
BS
6828 offset = insn_get(env, s, ot);
6829 selector = insn_get(env, s, OT_WORD);
3b46e624 6830
2c0262af 6831 gen_op_movl_T0_im(selector);
14ce26e7 6832 gen_op_movl_T1_imu(offset);
2c0262af
FB
6833 }
6834 goto do_lcall;
ecada8a2 6835 case 0xe9: /* jmp im */
14ce26e7 6836 if (dflag)
0af10c86 6837 tval = (int32_t)insn_get(env, s, OT_LONG);
14ce26e7 6838 else
0af10c86 6839 tval = (int16_t)insn_get(env, s, OT_WORD);
14ce26e7 6840 tval += s->pc - s->cs_base;
2c0262af 6841 if (s->dflag == 0)
14ce26e7 6842 tval &= 0xffff;
32938e12
AJ
6843 else if(!CODE64(s))
6844 tval &= 0xffffffff;
14ce26e7 6845 gen_jmp(s, tval);
2c0262af
FB
6846 break;
6847 case 0xea: /* ljmp im */
6848 {
6849 unsigned int selector, offset;
6850
14ce26e7
FB
6851 if (CODE64(s))
6852 goto illegal_op;
2c0262af 6853 ot = dflag ? OT_LONG : OT_WORD;
0af10c86
BS
6854 offset = insn_get(env, s, ot);
6855 selector = insn_get(env, s, OT_WORD);
3b46e624 6856
2c0262af 6857 gen_op_movl_T0_im(selector);
14ce26e7 6858 gen_op_movl_T1_imu(offset);
2c0262af
FB
6859 }
6860 goto do_ljmp;
6861 case 0xeb: /* jmp Jb */
0af10c86 6862 tval = (int8_t)insn_get(env, s, OT_BYTE);
14ce26e7 6863 tval += s->pc - s->cs_base;
2c0262af 6864 if (s->dflag == 0)
14ce26e7
FB
6865 tval &= 0xffff;
6866 gen_jmp(s, tval);
2c0262af
FB
6867 break;
6868 case 0x70 ... 0x7f: /* jcc Jb */
0af10c86 6869 tval = (int8_t)insn_get(env, s, OT_BYTE);
2c0262af
FB
6870 goto do_jcc;
6871 case 0x180 ... 0x18f: /* jcc Jv */
6872 if (dflag) {
0af10c86 6873 tval = (int32_t)insn_get(env, s, OT_LONG);
2c0262af 6874 } else {
0af10c86 6875 tval = (int16_t)insn_get(env, s, OT_WORD);
2c0262af
FB
6876 }
6877 do_jcc:
6878 next_eip = s->pc - s->cs_base;
14ce26e7 6879 tval += next_eip;
2c0262af 6880 if (s->dflag == 0)
14ce26e7
FB
6881 tval &= 0xffff;
6882 gen_jcc(s, b, tval, next_eip);
2c0262af
FB
6883 break;
6884
6885 case 0x190 ... 0x19f: /* setcc Gv */
0af10c86 6886 modrm = cpu_ldub_code(env, s->pc++);
cc8b6f5b 6887 gen_setcc1(s, b, cpu_T[0]);
0af10c86 6888 gen_ldst_modrm(env, s, modrm, OT_BYTE, OR_TMP0, 1);
2c0262af
FB
6889 break;
6890 case 0x140 ... 0x14f: /* cmov Gv, Ev */
f32d3781
PB
6891 ot = dflag + OT_WORD;
6892 modrm = cpu_ldub_code(env, s->pc++);
6893 reg = ((modrm >> 3) & 7) | rex_r;
6894 gen_cmovcc1(env, s, ot, b, modrm, reg);
2c0262af 6895 break;
3b46e624 6896
2c0262af
FB
6897 /************************/
6898 /* flags */
6899 case 0x9c: /* pushf */
872929aa 6900 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
2c0262af
FB
6901 if (s->vm86 && s->iopl != 3) {
6902 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6903 } else {
773cdfcc 6904 gen_update_cc_op(s);
f0967a1a 6905 gen_helper_read_eflags(cpu_T[0], cpu_env);
2c0262af
FB
6906 gen_push_T0(s);
6907 }
6908 break;
6909 case 0x9d: /* popf */
872929aa 6910 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
2c0262af
FB
6911 if (s->vm86 && s->iopl != 3) {
6912 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6913 } else {
6914 gen_pop_T0(s);
6915 if (s->cpl == 0) {
6916 if (s->dflag) {
f0967a1a
BS
6917 gen_helper_write_eflags(cpu_env, cpu_T[0],
6918 tcg_const_i32((TF_MASK | AC_MASK |
6919 ID_MASK | NT_MASK |
6920 IF_MASK |
6921 IOPL_MASK)));
2c0262af 6922 } else {
f0967a1a
BS
6923 gen_helper_write_eflags(cpu_env, cpu_T[0],
6924 tcg_const_i32((TF_MASK | AC_MASK |
6925 ID_MASK | NT_MASK |
6926 IF_MASK | IOPL_MASK)
6927 & 0xffff));
2c0262af
FB
6928 }
6929 } else {
4136f33c
FB
6930 if (s->cpl <= s->iopl) {
6931 if (s->dflag) {
f0967a1a
BS
6932 gen_helper_write_eflags(cpu_env, cpu_T[0],
6933 tcg_const_i32((TF_MASK |
6934 AC_MASK |
6935 ID_MASK |
6936 NT_MASK |
6937 IF_MASK)));
4136f33c 6938 } else {
f0967a1a
BS
6939 gen_helper_write_eflags(cpu_env, cpu_T[0],
6940 tcg_const_i32((TF_MASK |
6941 AC_MASK |
6942 ID_MASK |
6943 NT_MASK |
6944 IF_MASK)
6945 & 0xffff));
4136f33c 6946 }
2c0262af 6947 } else {
4136f33c 6948 if (s->dflag) {
f0967a1a
BS
6949 gen_helper_write_eflags(cpu_env, cpu_T[0],
6950 tcg_const_i32((TF_MASK | AC_MASK |
6951 ID_MASK | NT_MASK)));
4136f33c 6952 } else {
f0967a1a
BS
6953 gen_helper_write_eflags(cpu_env, cpu_T[0],
6954 tcg_const_i32((TF_MASK | AC_MASK |
6955 ID_MASK | NT_MASK)
6956 & 0xffff));
4136f33c 6957 }
2c0262af
FB
6958 }
6959 }
6960 gen_pop_update(s);
3ca51d07 6961 set_cc_op(s, CC_OP_EFLAGS);
a9321a4d 6962 /* abort translation because TF/AC flag may change */
14ce26e7 6963 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
6964 gen_eob(s);
6965 }
6966 break;
6967 case 0x9e: /* sahf */
12e26b75 6968 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
14ce26e7 6969 goto illegal_op;
57fec1fe 6970 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
d229edce 6971 gen_compute_eflags(s);
bd7a7b33
FB
6972 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
6973 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
6974 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
2c0262af
FB
6975 break;
6976 case 0x9f: /* lahf */
12e26b75 6977 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
14ce26e7 6978 goto illegal_op;
d229edce 6979 gen_compute_eflags(s);
bd7a7b33 6980 /* Note: gen_compute_eflags() only gives the condition codes */
d229edce 6981 tcg_gen_ori_tl(cpu_T[0], cpu_cc_src, 0x02);
57fec1fe 6982 gen_op_mov_reg_T0(OT_BYTE, R_AH);
2c0262af
FB
6983 break;
6984 case 0xf5: /* cmc */
d229edce 6985 gen_compute_eflags(s);
bd7a7b33 6986 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
2c0262af
FB
6987 break;
6988 case 0xf8: /* clc */
d229edce 6989 gen_compute_eflags(s);
bd7a7b33 6990 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
2c0262af
FB
6991 break;
6992 case 0xf9: /* stc */
d229edce 6993 gen_compute_eflags(s);
bd7a7b33 6994 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
2c0262af
FB
6995 break;
6996 case 0xfc: /* cld */
b6abf97d 6997 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
317ac620 6998 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUX86State, df));
2c0262af
FB
6999 break;
7000 case 0xfd: /* std */
b6abf97d 7001 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
317ac620 7002 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUX86State, df));
2c0262af
FB
7003 break;
7004
7005 /************************/
7006 /* bit operations */
7007 case 0x1ba: /* bt/bts/btr/btc Gv, im */
14ce26e7 7008 ot = dflag + OT_WORD;
0af10c86 7009 modrm = cpu_ldub_code(env, s->pc++);
33698e5f 7010 op = (modrm >> 3) & 7;
2c0262af 7011 mod = (modrm >> 6) & 3;
14ce26e7 7012 rm = (modrm & 7) | REX_B(s);
2c0262af 7013 if (mod != 3) {
14ce26e7 7014 s->rip_offset = 1;
0af10c86 7015 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 7016 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 7017 } else {
57fec1fe 7018 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af
FB
7019 }
7020 /* load shift */
0af10c86 7021 val = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7022 gen_op_movl_T1_im(val);
7023 if (op < 4)
7024 goto illegal_op;
7025 op -= 4;
f484d386 7026 goto bt_op;
2c0262af
FB
7027 case 0x1a3: /* bt Gv, Ev */
7028 op = 0;
7029 goto do_btx;
7030 case 0x1ab: /* bts */
7031 op = 1;
7032 goto do_btx;
7033 case 0x1b3: /* btr */
7034 op = 2;
7035 goto do_btx;
7036 case 0x1bb: /* btc */
7037 op = 3;
7038 do_btx:
14ce26e7 7039 ot = dflag + OT_WORD;
0af10c86 7040 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 7041 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af 7042 mod = (modrm >> 6) & 3;
14ce26e7 7043 rm = (modrm & 7) | REX_B(s);
57fec1fe 7044 gen_op_mov_TN_reg(OT_LONG, 1, reg);
2c0262af 7045 if (mod != 3) {
0af10c86 7046 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af 7047 /* specific case: we need to add a displacement */
f484d386
FB
7048 gen_exts(ot, cpu_T[1]);
7049 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7050 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7051 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
57fec1fe 7052 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 7053 } else {
57fec1fe 7054 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af 7055 }
f484d386
FB
7056 bt_op:
7057 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7058 switch(op) {
7059 case 0:
7060 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7061 tcg_gen_movi_tl(cpu_cc_dst, 0);
7062 break;
7063 case 1:
7064 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7065 tcg_gen_movi_tl(cpu_tmp0, 1);
7066 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7067 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7068 break;
7069 case 2:
7070 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7071 tcg_gen_movi_tl(cpu_tmp0, 1);
7072 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7073 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7074 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7075 break;
7076 default:
7077 case 3:
7078 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7079 tcg_gen_movi_tl(cpu_tmp0, 1);
7080 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7081 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7082 break;
7083 }
3ca51d07 7084 set_cc_op(s, CC_OP_SARB + ot);
2c0262af
FB
7085 if (op != 0) {
7086 if (mod != 3)
57fec1fe 7087 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 7088 else
57fec1fe 7089 gen_op_mov_reg_T0(ot, rm);
f484d386
FB
7090 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7091 tcg_gen_movi_tl(cpu_cc_dst, 0);
2c0262af
FB
7092 }
7093 break;
321c5351
RH
7094 case 0x1bc: /* bsf / tzcnt */
7095 case 0x1bd: /* bsr / lzcnt */
7096 ot = dflag + OT_WORD;
7097 modrm = cpu_ldub_code(env, s->pc++);
7098 reg = ((modrm >> 3) & 7) | rex_r;
7099 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
7100 gen_extu(ot, cpu_T[0]);
7101
7102 /* Note that lzcnt and tzcnt are in different extensions. */
7103 if ((prefixes & PREFIX_REPZ)
7104 && (b & 1
7105 ? s->cpuid_ext3_features & CPUID_EXT3_ABM
7106 : s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI1)) {
7107 int size = 8 << ot;
7108 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
7109 if (b & 1) {
7110 /* For lzcnt, reduce the target_ulong result by the
7111 number of zeros that we expect to find at the top. */
7112 gen_helper_clz(cpu_T[0], cpu_T[0]);
7113 tcg_gen_subi_tl(cpu_T[0], cpu_T[0], TARGET_LONG_BITS - size);
6191b059 7114 } else {
321c5351
RH
7115 /* For tzcnt, a zero input must return the operand size:
7116 force all bits outside the operand size to 1. */
7117 target_ulong mask = (target_ulong)-2 << (size - 1);
7118 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], mask);
7119 gen_helper_ctz(cpu_T[0], cpu_T[0]);
6191b059 7120 }
321c5351
RH
7121 /* For lzcnt/tzcnt, C and Z bits are defined and are
7122 related to the result. */
7123 gen_op_update1_cc();
7124 set_cc_op(s, CC_OP_BMILGB + ot);
7125 } else {
7126 /* For bsr/bsf, only the Z bit is defined and it is related
7127 to the input and not the result. */
7128 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
7129 set_cc_op(s, CC_OP_LOGICB + ot);
7130 if (b & 1) {
7131 /* For bsr, return the bit index of the first 1 bit,
7132 not the count of leading zeros. */
7133 gen_helper_clz(cpu_T[0], cpu_T[0]);
7134 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], TARGET_LONG_BITS - 1);
7135 } else {
7136 gen_helper_ctz(cpu_T[0], cpu_T[0]);
7137 }
7138 /* ??? The manual says that the output is undefined when the
7139 input is zero, but real hardware leaves it unchanged, and
7140 real programs appear to depend on that. */
7141 tcg_gen_movi_tl(cpu_tmp0, 0);
7142 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_T[0], cpu_cc_dst, cpu_tmp0,
7143 cpu_regs[reg], cpu_T[0]);
6191b059 7144 }
321c5351 7145 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
7146 break;
7147 /************************/
7148 /* bcd */
7149 case 0x27: /* daa */
14ce26e7
FB
7150 if (CODE64(s))
7151 goto illegal_op;
773cdfcc 7152 gen_update_cc_op(s);
7923057b 7153 gen_helper_daa(cpu_env);
3ca51d07 7154 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
7155 break;
7156 case 0x2f: /* das */
14ce26e7
FB
7157 if (CODE64(s))
7158 goto illegal_op;
773cdfcc 7159 gen_update_cc_op(s);
7923057b 7160 gen_helper_das(cpu_env);
3ca51d07 7161 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
7162 break;
7163 case 0x37: /* aaa */
14ce26e7
FB
7164 if (CODE64(s))
7165 goto illegal_op;
773cdfcc 7166 gen_update_cc_op(s);
7923057b 7167 gen_helper_aaa(cpu_env);
3ca51d07 7168 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
7169 break;
7170 case 0x3f: /* aas */
14ce26e7
FB
7171 if (CODE64(s))
7172 goto illegal_op;
773cdfcc 7173 gen_update_cc_op(s);
7923057b 7174 gen_helper_aas(cpu_env);
3ca51d07 7175 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
7176 break;
7177 case 0xd4: /* aam */
14ce26e7
FB
7178 if (CODE64(s))
7179 goto illegal_op;
0af10c86 7180 val = cpu_ldub_code(env, s->pc++);
b6d7c3db
TS
7181 if (val == 0) {
7182 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7183 } else {
7923057b 7184 gen_helper_aam(cpu_env, tcg_const_i32(val));
3ca51d07 7185 set_cc_op(s, CC_OP_LOGICB);
b6d7c3db 7186 }
2c0262af
FB
7187 break;
7188 case 0xd5: /* aad */
14ce26e7
FB
7189 if (CODE64(s))
7190 goto illegal_op;
0af10c86 7191 val = cpu_ldub_code(env, s->pc++);
7923057b 7192 gen_helper_aad(cpu_env, tcg_const_i32(val));
3ca51d07 7193 set_cc_op(s, CC_OP_LOGICB);
2c0262af
FB
7194 break;
7195 /************************/
7196 /* misc */
7197 case 0x90: /* nop */
ab1f142b 7198 /* XXX: correct lock test for all insn */
7418027e 7199 if (prefixes & PREFIX_LOCK) {
ab1f142b 7200 goto illegal_op;
7418027e
RH
7201 }
7202 /* If REX_B is set, then this is xchg eax, r8d, not a nop. */
7203 if (REX_B(s)) {
7204 goto do_xchg_reg_eax;
7205 }
0573fbfc
TS
7206 if (prefixes & PREFIX_REPZ) {
7207 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7208 }
2c0262af
FB
7209 break;
7210 case 0x9b: /* fwait */
5fafdf24 7211 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7eee2a50
FB
7212 (HF_MP_MASK | HF_TS_MASK)) {
7213 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2ee73ac3 7214 } else {
773cdfcc 7215 gen_update_cc_op(s);
14ce26e7 7216 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 7217 gen_helper_fwait(cpu_env);
7eee2a50 7218 }
2c0262af
FB
7219 break;
7220 case 0xcc: /* int3 */
7221 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7222 break;
7223 case 0xcd: /* int N */
0af10c86 7224 val = cpu_ldub_code(env, s->pc++);
f115e911 7225 if (s->vm86 && s->iopl != 3) {
5fafdf24 7226 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
f115e911
FB
7227 } else {
7228 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7229 }
2c0262af
FB
7230 break;
7231 case 0xce: /* into */
14ce26e7
FB
7232 if (CODE64(s))
7233 goto illegal_op;
773cdfcc 7234 gen_update_cc_op(s);
a8ede8ba 7235 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7236 gen_helper_into(cpu_env, tcg_const_i32(s->pc - pc_start));
2c0262af 7237 break;
0b97134b 7238#ifdef WANT_ICEBP
2c0262af 7239 case 0xf1: /* icebp (undocumented, exits to external debugger) */
872929aa 7240 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
aba9d61e 7241#if 1
2c0262af 7242 gen_debug(s, pc_start - s->cs_base);
aba9d61e
FB
7243#else
7244 /* start debug */
0af10c86 7245 tb_flush(env);
24537a01 7246 qemu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
aba9d61e 7247#endif
2c0262af 7248 break;
0b97134b 7249#endif
2c0262af
FB
7250 case 0xfa: /* cli */
7251 if (!s->vm86) {
7252 if (s->cpl <= s->iopl) {
f0967a1a 7253 gen_helper_cli(cpu_env);
2c0262af
FB
7254 } else {
7255 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7256 }
7257 } else {
7258 if (s->iopl == 3) {
f0967a1a 7259 gen_helper_cli(cpu_env);
2c0262af
FB
7260 } else {
7261 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7262 }
7263 }
7264 break;
7265 case 0xfb: /* sti */
7266 if (!s->vm86) {
7267 if (s->cpl <= s->iopl) {
7268 gen_sti:
f0967a1a 7269 gen_helper_sti(cpu_env);
2c0262af 7270 /* interruptions are enabled only the first insn after sti */
a2cc3b24
FB
7271 /* If several instructions disable interrupts, only the
7272 _first_ does it */
7273 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
f0967a1a 7274 gen_helper_set_inhibit_irq(cpu_env);
2c0262af 7275 /* give a chance to handle pending irqs */
14ce26e7 7276 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
7277 gen_eob(s);
7278 } else {
7279 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7280 }
7281 } else {
7282 if (s->iopl == 3) {
7283 goto gen_sti;
7284 } else {
7285 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7286 }
7287 }
7288 break;
7289 case 0x62: /* bound */
14ce26e7
FB
7290 if (CODE64(s))
7291 goto illegal_op;
2c0262af 7292 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 7293 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7294 reg = (modrm >> 3) & 7;
7295 mod = (modrm >> 6) & 3;
7296 if (mod == 3)
7297 goto illegal_op;
57fec1fe 7298 gen_op_mov_TN_reg(ot, 0, reg);
0af10c86 7299 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
14ce26e7 7300 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 7301 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
92fc4b58
BS
7302 if (ot == OT_WORD) {
7303 gen_helper_boundw(cpu_env, cpu_A0, cpu_tmp2_i32);
7304 } else {
7305 gen_helper_boundl(cpu_env, cpu_A0, cpu_tmp2_i32);
7306 }
2c0262af
FB
7307 break;
7308 case 0x1c8 ... 0x1cf: /* bswap reg */
14ce26e7
FB
7309 reg = (b & 7) | REX_B(s);
7310#ifdef TARGET_X86_64
7311 if (dflag == 2) {
57fec1fe 7312 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
66896cb8 7313 tcg_gen_bswap64_i64(cpu_T[0], cpu_T[0]);
57fec1fe 7314 gen_op_mov_reg_T0(OT_QUAD, reg);
5fafdf24 7315 } else
8777643e 7316#endif
57fec1fe
FB
7317 {
7318 gen_op_mov_TN_reg(OT_LONG, 0, reg);
8777643e
AJ
7319 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
7320 tcg_gen_bswap32_tl(cpu_T[0], cpu_T[0]);
57fec1fe 7321 gen_op_mov_reg_T0(OT_LONG, reg);
14ce26e7 7322 }
2c0262af
FB
7323 break;
7324 case 0xd6: /* salc */
14ce26e7
FB
7325 if (CODE64(s))
7326 goto illegal_op;
cc8b6f5b 7327 gen_compute_eflags_c(s, cpu_T[0]);
bd7a7b33
FB
7328 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7329 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
2c0262af
FB
7330 break;
7331 case 0xe0: /* loopnz */
7332 case 0xe1: /* loopz */
2c0262af
FB
7333 case 0xe2: /* loop */
7334 case 0xe3: /* jecxz */
14ce26e7 7335 {
6e0d8677 7336 int l1, l2, l3;
14ce26e7 7337
0af10c86 7338 tval = (int8_t)insn_get(env, s, OT_BYTE);
14ce26e7
FB
7339 next_eip = s->pc - s->cs_base;
7340 tval += next_eip;
7341 if (s->dflag == 0)
7342 tval &= 0xffff;
3b46e624 7343
14ce26e7
FB
7344 l1 = gen_new_label();
7345 l2 = gen_new_label();
6e0d8677 7346 l3 = gen_new_label();
14ce26e7 7347 b &= 3;
6e0d8677
FB
7348 switch(b) {
7349 case 0: /* loopnz */
7350 case 1: /* loopz */
6e0d8677
FB
7351 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7352 gen_op_jz_ecx(s->aflag, l3);
5bdb91b0 7353 gen_jcc1(s, (JCC_Z << 1) | (b ^ 1), l1);
6e0d8677
FB
7354 break;
7355 case 2: /* loop */
7356 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7357 gen_op_jnz_ecx(s->aflag, l1);
7358 break;
7359 default:
7360 case 3: /* jcxz */
7361 gen_op_jz_ecx(s->aflag, l1);
7362 break;
14ce26e7
FB
7363 }
7364
6e0d8677 7365 gen_set_label(l3);
14ce26e7 7366 gen_jmp_im(next_eip);
8e1c85e3 7367 tcg_gen_br(l2);
6e0d8677 7368
14ce26e7
FB
7369 gen_set_label(l1);
7370 gen_jmp_im(tval);
7371 gen_set_label(l2);
7372 gen_eob(s);
7373 }
2c0262af
FB
7374 break;
7375 case 0x130: /* wrmsr */
7376 case 0x132: /* rdmsr */
7377 if (s->cpl != 0) {
7378 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7379 } else {
773cdfcc 7380 gen_update_cc_op(s);
872929aa 7381 gen_jmp_im(pc_start - s->cs_base);
0573fbfc 7382 if (b & 2) {
4a7443be 7383 gen_helper_rdmsr(cpu_env);
0573fbfc 7384 } else {
4a7443be 7385 gen_helper_wrmsr(cpu_env);
0573fbfc 7386 }
2c0262af
FB
7387 }
7388 break;
7389 case 0x131: /* rdtsc */
773cdfcc 7390 gen_update_cc_op(s);
ecada8a2 7391 gen_jmp_im(pc_start - s->cs_base);
efade670
PB
7392 if (use_icount)
7393 gen_io_start();
4a7443be 7394 gen_helper_rdtsc(cpu_env);
efade670
PB
7395 if (use_icount) {
7396 gen_io_end();
7397 gen_jmp(s, s->pc - s->cs_base);
7398 }
2c0262af 7399 break;
df01e0fc 7400 case 0x133: /* rdpmc */
773cdfcc 7401 gen_update_cc_op(s);
df01e0fc 7402 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7403 gen_helper_rdpmc(cpu_env);
df01e0fc 7404 break;
023fe10d 7405 case 0x134: /* sysenter */
2436b61a 7406 /* For Intel SYSENTER is valid on 64-bit */
0af10c86 7407 if (CODE64(s) && env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
14ce26e7 7408 goto illegal_op;
023fe10d
FB
7409 if (!s->pe) {
7410 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7411 } else {
728d803b 7412 gen_update_cc_op(s);
14ce26e7 7413 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 7414 gen_helper_sysenter(cpu_env);
023fe10d
FB
7415 gen_eob(s);
7416 }
7417 break;
7418 case 0x135: /* sysexit */
2436b61a 7419 /* For Intel SYSEXIT is valid on 64-bit */
0af10c86 7420 if (CODE64(s) && env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
14ce26e7 7421 goto illegal_op;
023fe10d
FB
7422 if (!s->pe) {
7423 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7424 } else {
728d803b 7425 gen_update_cc_op(s);
14ce26e7 7426 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 7427 gen_helper_sysexit(cpu_env, tcg_const_i32(dflag));
023fe10d
FB
7428 gen_eob(s);
7429 }
7430 break;
14ce26e7
FB
7431#ifdef TARGET_X86_64
7432 case 0x105: /* syscall */
7433 /* XXX: is it usable in real mode ? */
728d803b 7434 gen_update_cc_op(s);
14ce26e7 7435 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 7436 gen_helper_syscall(cpu_env, tcg_const_i32(s->pc - pc_start));
14ce26e7
FB
7437 gen_eob(s);
7438 break;
7439 case 0x107: /* sysret */
7440 if (!s->pe) {
7441 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7442 } else {
728d803b 7443 gen_update_cc_op(s);
14ce26e7 7444 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 7445 gen_helper_sysret(cpu_env, tcg_const_i32(s->dflag));
aba9d61e 7446 /* condition codes are modified only in long mode */
3ca51d07
RH
7447 if (s->lma) {
7448 set_cc_op(s, CC_OP_EFLAGS);
7449 }
14ce26e7
FB
7450 gen_eob(s);
7451 }
7452 break;
7453#endif
2c0262af 7454 case 0x1a2: /* cpuid */
773cdfcc 7455 gen_update_cc_op(s);
9575cb94 7456 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7457 gen_helper_cpuid(cpu_env);
2c0262af
FB
7458 break;
7459 case 0xf4: /* hlt */
7460 if (s->cpl != 0) {
7461 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7462 } else {
773cdfcc 7463 gen_update_cc_op(s);
94451178 7464 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7465 gen_helper_hlt(cpu_env, tcg_const_i32(s->pc - pc_start));
5779406a 7466 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
7467 }
7468 break;
7469 case 0x100:
0af10c86 7470 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7471 mod = (modrm >> 6) & 3;
7472 op = (modrm >> 3) & 7;
7473 switch(op) {
7474 case 0: /* sldt */
f115e911
FB
7475 if (!s->pe || s->vm86)
7476 goto illegal_op;
872929aa 7477 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
651ba608 7478 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
2c0262af
FB
7479 ot = OT_WORD;
7480 if (mod == 3)
7481 ot += s->dflag;
0af10c86 7482 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
2c0262af
FB
7483 break;
7484 case 2: /* lldt */
f115e911
FB
7485 if (!s->pe || s->vm86)
7486 goto illegal_op;
2c0262af
FB
7487 if (s->cpl != 0) {
7488 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7489 } else {
872929aa 7490 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
0af10c86 7491 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
14ce26e7 7492 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 7493 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2 7494 gen_helper_lldt(cpu_env, cpu_tmp2_i32);
2c0262af
FB
7495 }
7496 break;
7497 case 1: /* str */
f115e911
FB
7498 if (!s->pe || s->vm86)
7499 goto illegal_op;
872929aa 7500 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
651ba608 7501 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
2c0262af
FB
7502 ot = OT_WORD;
7503 if (mod == 3)
7504 ot += s->dflag;
0af10c86 7505 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
2c0262af
FB
7506 break;
7507 case 3: /* ltr */
f115e911
FB
7508 if (!s->pe || s->vm86)
7509 goto illegal_op;
2c0262af
FB
7510 if (s->cpl != 0) {
7511 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7512 } else {
872929aa 7513 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
0af10c86 7514 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
14ce26e7 7515 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 7516 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2 7517 gen_helper_ltr(cpu_env, cpu_tmp2_i32);
2c0262af
FB
7518 }
7519 break;
7520 case 4: /* verr */
7521 case 5: /* verw */
f115e911
FB
7522 if (!s->pe || s->vm86)
7523 goto illegal_op;
0af10c86 7524 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
773cdfcc 7525 gen_update_cc_op(s);
2999a0b2
BS
7526 if (op == 4) {
7527 gen_helper_verr(cpu_env, cpu_T[0]);
7528 } else {
7529 gen_helper_verw(cpu_env, cpu_T[0]);
7530 }
3ca51d07 7531 set_cc_op(s, CC_OP_EFLAGS);
f115e911 7532 break;
2c0262af
FB
7533 default:
7534 goto illegal_op;
7535 }
7536 break;
7537 case 0x101:
0af10c86 7538 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7539 mod = (modrm >> 6) & 3;
7540 op = (modrm >> 3) & 7;
3d7374c5 7541 rm = modrm & 7;
2c0262af
FB
7542 switch(op) {
7543 case 0: /* sgdt */
2c0262af
FB
7544 if (mod == 3)
7545 goto illegal_op;
872929aa 7546 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
0af10c86 7547 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
651ba608 7548 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
57fec1fe 7549 gen_op_st_T0_A0(OT_WORD + s->mem_index);
aba9d61e 7550 gen_add_A0_im(s, 2);
651ba608 7551 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
2c0262af
FB
7552 if (!s->dflag)
7553 gen_op_andl_T0_im(0xffffff);
57fec1fe 7554 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
2c0262af 7555 break;
3d7374c5
FB
7556 case 1:
7557 if (mod == 3) {
7558 switch (rm) {
7559 case 0: /* monitor */
7560 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7561 s->cpl != 0)
7562 goto illegal_op;
773cdfcc 7563 gen_update_cc_op(s);
3d7374c5
FB
7564 gen_jmp_im(pc_start - s->cs_base);
7565#ifdef TARGET_X86_64
7566 if (s->aflag == 2) {
bbf662ee 7567 gen_op_movq_A0_reg(R_EAX);
5fafdf24 7568 } else
3d7374c5
FB
7569#endif
7570 {
bbf662ee 7571 gen_op_movl_A0_reg(R_EAX);
3d7374c5
FB
7572 if (s->aflag == 0)
7573 gen_op_andl_A0_ffff();
7574 }
7575 gen_add_A0_ds_seg(s);
4a7443be 7576 gen_helper_monitor(cpu_env, cpu_A0);
3d7374c5
FB
7577 break;
7578 case 1: /* mwait */
7579 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7580 s->cpl != 0)
7581 goto illegal_op;
728d803b 7582 gen_update_cc_op(s);
94451178 7583 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7584 gen_helper_mwait(cpu_env, tcg_const_i32(s->pc - pc_start));
3d7374c5
FB
7585 gen_eob(s);
7586 break;
a9321a4d
PA
7587 case 2: /* clac */
7588 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_SMAP) ||
7589 s->cpl != 0) {
7590 goto illegal_op;
7591 }
7592 gen_helper_clac(cpu_env);
7593 gen_jmp_im(s->pc - s->cs_base);
7594 gen_eob(s);
7595 break;
7596 case 3: /* stac */
7597 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_SMAP) ||
7598 s->cpl != 0) {
7599 goto illegal_op;
7600 }
7601 gen_helper_stac(cpu_env);
7602 gen_jmp_im(s->pc - s->cs_base);
7603 gen_eob(s);
7604 break;
3d7374c5
FB
7605 default:
7606 goto illegal_op;
7607 }
7608 } else { /* sidt */
872929aa 7609 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
0af10c86 7610 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
651ba608 7611 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
57fec1fe 7612 gen_op_st_T0_A0(OT_WORD + s->mem_index);
3d7374c5 7613 gen_add_A0_im(s, 2);
651ba608 7614 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
3d7374c5
FB
7615 if (!s->dflag)
7616 gen_op_andl_T0_im(0xffffff);
57fec1fe 7617 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
3d7374c5
FB
7618 }
7619 break;
2c0262af
FB
7620 case 2: /* lgdt */
7621 case 3: /* lidt */
0573fbfc 7622 if (mod == 3) {
773cdfcc 7623 gen_update_cc_op(s);
872929aa 7624 gen_jmp_im(pc_start - s->cs_base);
0573fbfc
TS
7625 switch(rm) {
7626 case 0: /* VMRUN */
872929aa
FB
7627 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7628 goto illegal_op;
7629 if (s->cpl != 0) {
7630 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
0573fbfc 7631 break;
872929aa 7632 } else {
052e80d5 7633 gen_helper_vmrun(cpu_env, tcg_const_i32(s->aflag),
a7812ae4 7634 tcg_const_i32(s->pc - pc_start));
db620f46 7635 tcg_gen_exit_tb(0);
5779406a 7636 s->is_jmp = DISAS_TB_JUMP;
872929aa 7637 }
0573fbfc
TS
7638 break;
7639 case 1: /* VMMCALL */
872929aa
FB
7640 if (!(s->flags & HF_SVME_MASK))
7641 goto illegal_op;
052e80d5 7642 gen_helper_vmmcall(cpu_env);
0573fbfc
TS
7643 break;
7644 case 2: /* VMLOAD */
872929aa
FB
7645 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7646 goto illegal_op;
7647 if (s->cpl != 0) {
7648 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7649 break;
7650 } else {
052e80d5 7651 gen_helper_vmload(cpu_env, tcg_const_i32(s->aflag));
872929aa 7652 }
0573fbfc
TS
7653 break;
7654 case 3: /* VMSAVE */
872929aa
FB
7655 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7656 goto illegal_op;
7657 if (s->cpl != 0) {
7658 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7659 break;
7660 } else {
052e80d5 7661 gen_helper_vmsave(cpu_env, tcg_const_i32(s->aflag));
872929aa 7662 }
0573fbfc
TS
7663 break;
7664 case 4: /* STGI */
872929aa
FB
7665 if ((!(s->flags & HF_SVME_MASK) &&
7666 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7667 !s->pe)
7668 goto illegal_op;
7669 if (s->cpl != 0) {
7670 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7671 break;
7672 } else {
052e80d5 7673 gen_helper_stgi(cpu_env);
872929aa 7674 }
0573fbfc
TS
7675 break;
7676 case 5: /* CLGI */
872929aa
FB
7677 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7678 goto illegal_op;
7679 if (s->cpl != 0) {
7680 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7681 break;
7682 } else {
052e80d5 7683 gen_helper_clgi(cpu_env);
872929aa 7684 }
0573fbfc
TS
7685 break;
7686 case 6: /* SKINIT */
872929aa
FB
7687 if ((!(s->flags & HF_SVME_MASK) &&
7688 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7689 !s->pe)
7690 goto illegal_op;
052e80d5 7691 gen_helper_skinit(cpu_env);
0573fbfc
TS
7692 break;
7693 case 7: /* INVLPGA */
872929aa
FB
7694 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7695 goto illegal_op;
7696 if (s->cpl != 0) {
7697 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7698 break;
7699 } else {
052e80d5 7700 gen_helper_invlpga(cpu_env, tcg_const_i32(s->aflag));
872929aa 7701 }
0573fbfc
TS
7702 break;
7703 default:
7704 goto illegal_op;
7705 }
7706 } else if (s->cpl != 0) {
2c0262af
FB
7707 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7708 } else {
872929aa
FB
7709 gen_svm_check_intercept(s, pc_start,
7710 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
0af10c86 7711 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 7712 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
aba9d61e 7713 gen_add_A0_im(s, 2);
57fec1fe 7714 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
2c0262af
FB
7715 if (!s->dflag)
7716 gen_op_andl_T0_im(0xffffff);
7717 if (op == 2) {
651ba608
FB
7718 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7719 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
2c0262af 7720 } else {
651ba608
FB
7721 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7722 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
2c0262af
FB
7723 }
7724 }
7725 break;
7726 case 4: /* smsw */
872929aa 7727 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
e2542fe2 7728#if defined TARGET_X86_64 && defined HOST_WORDS_BIGENDIAN
f60d2728 7729 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]) + 4);
7730#else
651ba608 7731 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
f60d2728 7732#endif
0af10c86 7733 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 1);
2c0262af
FB
7734 break;
7735 case 6: /* lmsw */
7736 if (s->cpl != 0) {
7737 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7738 } else {
872929aa 7739 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
0af10c86 7740 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
4a7443be 7741 gen_helper_lmsw(cpu_env, cpu_T[0]);
14ce26e7 7742 gen_jmp_im(s->pc - s->cs_base);
d71b9a8b 7743 gen_eob(s);
2c0262af
FB
7744 }
7745 break;
1b050077
AP
7746 case 7:
7747 if (mod != 3) { /* invlpg */
7748 if (s->cpl != 0) {
7749 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7750 } else {
773cdfcc 7751 gen_update_cc_op(s);
1b050077 7752 gen_jmp_im(pc_start - s->cs_base);
0af10c86 7753 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
4a7443be 7754 gen_helper_invlpg(cpu_env, cpu_A0);
1b050077
AP
7755 gen_jmp_im(s->pc - s->cs_base);
7756 gen_eob(s);
7757 }
2c0262af 7758 } else {
1b050077
AP
7759 switch (rm) {
7760 case 0: /* swapgs */
14ce26e7 7761#ifdef TARGET_X86_64
1b050077
AP
7762 if (CODE64(s)) {
7763 if (s->cpl != 0) {
7764 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7765 } else {
7766 tcg_gen_ld_tl(cpu_T[0], cpu_env,
7767 offsetof(CPUX86State,segs[R_GS].base));
7768 tcg_gen_ld_tl(cpu_T[1], cpu_env,
7769 offsetof(CPUX86State,kernelgsbase));
7770 tcg_gen_st_tl(cpu_T[1], cpu_env,
7771 offsetof(CPUX86State,segs[R_GS].base));
7772 tcg_gen_st_tl(cpu_T[0], cpu_env,
7773 offsetof(CPUX86State,kernelgsbase));
7774 }
5fafdf24 7775 } else
14ce26e7
FB
7776#endif
7777 {
7778 goto illegal_op;
7779 }
1b050077
AP
7780 break;
7781 case 1: /* rdtscp */
7782 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7783 goto illegal_op;
773cdfcc 7784 gen_update_cc_op(s);
9575cb94 7785 gen_jmp_im(pc_start - s->cs_base);
1b050077
AP
7786 if (use_icount)
7787 gen_io_start();
4a7443be 7788 gen_helper_rdtscp(cpu_env);
1b050077
AP
7789 if (use_icount) {
7790 gen_io_end();
7791 gen_jmp(s, s->pc - s->cs_base);
7792 }
7793 break;
7794 default:
7795 goto illegal_op;
14ce26e7 7796 }
2c0262af
FB
7797 }
7798 break;
7799 default:
7800 goto illegal_op;
7801 }
7802 break;
3415a4dd
FB
7803 case 0x108: /* invd */
7804 case 0x109: /* wbinvd */
7805 if (s->cpl != 0) {
7806 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7807 } else {
872929aa 7808 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
3415a4dd
FB
7809 /* nothing to do */
7810 }
7811 break;
14ce26e7
FB
7812 case 0x63: /* arpl or movslS (x86_64) */
7813#ifdef TARGET_X86_64
7814 if (CODE64(s)) {
7815 int d_ot;
7816 /* d_ot is the size of destination */
7817 d_ot = dflag + OT_WORD;
7818
0af10c86 7819 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7
FB
7820 reg = ((modrm >> 3) & 7) | rex_r;
7821 mod = (modrm >> 6) & 3;
7822 rm = (modrm & 7) | REX_B(s);
3b46e624 7823
14ce26e7 7824 if (mod == 3) {
57fec1fe 7825 gen_op_mov_TN_reg(OT_LONG, 0, rm);
14ce26e7
FB
7826 /* sign extend */
7827 if (d_ot == OT_QUAD)
e108dd01 7828 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
57fec1fe 7829 gen_op_mov_reg_T0(d_ot, reg);
14ce26e7 7830 } else {
0af10c86 7831 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
14ce26e7 7832 if (d_ot == OT_QUAD) {
57fec1fe 7833 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
14ce26e7 7834 } else {
57fec1fe 7835 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
14ce26e7 7836 }
57fec1fe 7837 gen_op_mov_reg_T0(d_ot, reg);
14ce26e7 7838 }
5fafdf24 7839 } else
14ce26e7
FB
7840#endif
7841 {
3bd7da9e 7842 int label1;
49d9fdcc 7843 TCGv t0, t1, t2, a0;
1e4840bf 7844
14ce26e7
FB
7845 if (!s->pe || s->vm86)
7846 goto illegal_op;
a7812ae4
PB
7847 t0 = tcg_temp_local_new();
7848 t1 = tcg_temp_local_new();
7849 t2 = tcg_temp_local_new();
3bd7da9e 7850 ot = OT_WORD;
0af10c86 7851 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7
FB
7852 reg = (modrm >> 3) & 7;
7853 mod = (modrm >> 6) & 3;
7854 rm = modrm & 7;
7855 if (mod != 3) {
0af10c86 7856 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
1e4840bf 7857 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
49d9fdcc
LD
7858 a0 = tcg_temp_local_new();
7859 tcg_gen_mov_tl(a0, cpu_A0);
14ce26e7 7860 } else {
1e4840bf 7861 gen_op_mov_v_reg(ot, t0, rm);
49d9fdcc 7862 TCGV_UNUSED(a0);
14ce26e7 7863 }
1e4840bf
FB
7864 gen_op_mov_v_reg(ot, t1, reg);
7865 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7866 tcg_gen_andi_tl(t1, t1, 3);
7867 tcg_gen_movi_tl(t2, 0);
3bd7da9e 7868 label1 = gen_new_label();
1e4840bf
FB
7869 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7870 tcg_gen_andi_tl(t0, t0, ~3);
7871 tcg_gen_or_tl(t0, t0, t1);
7872 tcg_gen_movi_tl(t2, CC_Z);
3bd7da9e 7873 gen_set_label(label1);
14ce26e7 7874 if (mod != 3) {
49d9fdcc
LD
7875 gen_op_st_v(ot + s->mem_index, t0, a0);
7876 tcg_temp_free(a0);
7877 } else {
1e4840bf 7878 gen_op_mov_reg_v(ot, rm, t0);
14ce26e7 7879 }
d229edce 7880 gen_compute_eflags(s);
3bd7da9e 7881 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
1e4840bf 7882 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
1e4840bf
FB
7883 tcg_temp_free(t0);
7884 tcg_temp_free(t1);
7885 tcg_temp_free(t2);
f115e911 7886 }
f115e911 7887 break;
2c0262af
FB
7888 case 0x102: /* lar */
7889 case 0x103: /* lsl */
cec6843e
FB
7890 {
7891 int label1;
1e4840bf 7892 TCGv t0;
cec6843e
FB
7893 if (!s->pe || s->vm86)
7894 goto illegal_op;
7895 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 7896 modrm = cpu_ldub_code(env, s->pc++);
cec6843e 7897 reg = ((modrm >> 3) & 7) | rex_r;
0af10c86 7898 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
a7812ae4 7899 t0 = tcg_temp_local_new();
773cdfcc 7900 gen_update_cc_op(s);
2999a0b2
BS
7901 if (b == 0x102) {
7902 gen_helper_lar(t0, cpu_env, cpu_T[0]);
7903 } else {
7904 gen_helper_lsl(t0, cpu_env, cpu_T[0]);
7905 }
cec6843e
FB
7906 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
7907 label1 = gen_new_label();
cb63669a 7908 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1e4840bf 7909 gen_op_mov_reg_v(ot, reg, t0);
cec6843e 7910 gen_set_label(label1);
3ca51d07 7911 set_cc_op(s, CC_OP_EFLAGS);
1e4840bf 7912 tcg_temp_free(t0);
cec6843e 7913 }
2c0262af
FB
7914 break;
7915 case 0x118:
0af10c86 7916 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7917 mod = (modrm >> 6) & 3;
7918 op = (modrm >> 3) & 7;
7919 switch(op) {
7920 case 0: /* prefetchnta */
7921 case 1: /* prefetchnt0 */
7922 case 2: /* prefetchnt0 */
7923 case 3: /* prefetchnt0 */
7924 if (mod == 3)
7925 goto illegal_op;
0af10c86 7926 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
7927 /* nothing more to do */
7928 break;
e17a36ce 7929 default: /* nop (multi byte) */
0af10c86 7930 gen_nop_modrm(env, s, modrm);
e17a36ce 7931 break;
2c0262af
FB
7932 }
7933 break;
e17a36ce 7934 case 0x119 ... 0x11f: /* nop (multi byte) */
0af10c86
BS
7935 modrm = cpu_ldub_code(env, s->pc++);
7936 gen_nop_modrm(env, s, modrm);
e17a36ce 7937 break;
2c0262af
FB
7938 case 0x120: /* mov reg, crN */
7939 case 0x122: /* mov crN, reg */
7940 if (s->cpl != 0) {
7941 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7942 } else {
0af10c86 7943 modrm = cpu_ldub_code(env, s->pc++);
5c73b757
MO
7944 /* Ignore the mod bits (assume (modrm&0xc0)==0xc0).
7945 * AMD documentation (24594.pdf) and testing of
7946 * intel 386 and 486 processors all show that the mod bits
7947 * are assumed to be 1's, regardless of actual values.
7948 */
14ce26e7
FB
7949 rm = (modrm & 7) | REX_B(s);
7950 reg = ((modrm >> 3) & 7) | rex_r;
7951 if (CODE64(s))
7952 ot = OT_QUAD;
7953 else
7954 ot = OT_LONG;
ccd59d09
AP
7955 if ((prefixes & PREFIX_LOCK) && (reg == 0) &&
7956 (s->cpuid_ext3_features & CPUID_EXT3_CR8LEG)) {
7957 reg = 8;
7958 }
2c0262af
FB
7959 switch(reg) {
7960 case 0:
7961 case 2:
7962 case 3:
7963 case 4:
9230e66e 7964 case 8:
773cdfcc 7965 gen_update_cc_op(s);
872929aa 7966 gen_jmp_im(pc_start - s->cs_base);
2c0262af 7967 if (b & 2) {
57fec1fe 7968 gen_op_mov_TN_reg(ot, 0, rm);
4a7443be
BS
7969 gen_helper_write_crN(cpu_env, tcg_const_i32(reg),
7970 cpu_T[0]);
14ce26e7 7971 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
7972 gen_eob(s);
7973 } else {
4a7443be 7974 gen_helper_read_crN(cpu_T[0], cpu_env, tcg_const_i32(reg));
57fec1fe 7975 gen_op_mov_reg_T0(ot, rm);
2c0262af
FB
7976 }
7977 break;
7978 default:
7979 goto illegal_op;
7980 }
7981 }
7982 break;
7983 case 0x121: /* mov reg, drN */
7984 case 0x123: /* mov drN, reg */
7985 if (s->cpl != 0) {
7986 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7987 } else {
0af10c86 7988 modrm = cpu_ldub_code(env, s->pc++);
5c73b757
MO
7989 /* Ignore the mod bits (assume (modrm&0xc0)==0xc0).
7990 * AMD documentation (24594.pdf) and testing of
7991 * intel 386 and 486 processors all show that the mod bits
7992 * are assumed to be 1's, regardless of actual values.
7993 */
14ce26e7
FB
7994 rm = (modrm & 7) | REX_B(s);
7995 reg = ((modrm >> 3) & 7) | rex_r;
7996 if (CODE64(s))
7997 ot = OT_QUAD;
7998 else
7999 ot = OT_LONG;
2c0262af 8000 /* XXX: do it dynamically with CR4.DE bit */
14ce26e7 8001 if (reg == 4 || reg == 5 || reg >= 8)
2c0262af
FB
8002 goto illegal_op;
8003 if (b & 2) {
0573fbfc 8004 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
57fec1fe 8005 gen_op_mov_TN_reg(ot, 0, rm);
4a7443be 8006 gen_helper_movl_drN_T0(cpu_env, tcg_const_i32(reg), cpu_T[0]);
14ce26e7 8007 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
8008 gen_eob(s);
8009 } else {
0573fbfc 8010 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
651ba608 8011 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
57fec1fe 8012 gen_op_mov_reg_T0(ot, rm);
2c0262af
FB
8013 }
8014 }
8015 break;
8016 case 0x106: /* clts */
8017 if (s->cpl != 0) {
8018 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8019 } else {
0573fbfc 8020 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
f0967a1a 8021 gen_helper_clts(cpu_env);
7eee2a50 8022 /* abort block because static cpu state changed */
14ce26e7 8023 gen_jmp_im(s->pc - s->cs_base);
7eee2a50 8024 gen_eob(s);
2c0262af
FB
8025 }
8026 break;
222a3336 8027 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
664e0f19
FB
8028 case 0x1c3: /* MOVNTI reg, mem */
8029 if (!(s->cpuid_features & CPUID_SSE2))
14ce26e7 8030 goto illegal_op;
664e0f19 8031 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
0af10c86 8032 modrm = cpu_ldub_code(env, s->pc++);
664e0f19
FB
8033 mod = (modrm >> 6) & 3;
8034 if (mod == 3)
8035 goto illegal_op;
8036 reg = ((modrm >> 3) & 7) | rex_r;
8037 /* generate a generic store */
0af10c86 8038 gen_ldst_modrm(env, s, modrm, ot, reg, 1);
14ce26e7 8039 break;
664e0f19 8040 case 0x1ae:
0af10c86 8041 modrm = cpu_ldub_code(env, s->pc++);
664e0f19
FB
8042 mod = (modrm >> 6) & 3;
8043 op = (modrm >> 3) & 7;
8044 switch(op) {
8045 case 0: /* fxsave */
5fafdf24 8046 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
09d85fb8 8047 (s->prefix & PREFIX_LOCK))
14ce26e7 8048 goto illegal_op;
09d85fb8 8049 if ((s->flags & HF_EM_MASK) || (s->flags & HF_TS_MASK)) {
0fd14b72
FB
8050 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8051 break;
8052 }
0af10c86 8053 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
773cdfcc 8054 gen_update_cc_op(s);
19e6c4b8 8055 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 8056 gen_helper_fxsave(cpu_env, cpu_A0, tcg_const_i32((s->dflag == 2)));
664e0f19
FB
8057 break;
8058 case 1: /* fxrstor */
5fafdf24 8059 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
09d85fb8 8060 (s->prefix & PREFIX_LOCK))
14ce26e7 8061 goto illegal_op;
09d85fb8 8062 if ((s->flags & HF_EM_MASK) || (s->flags & HF_TS_MASK)) {
0fd14b72
FB
8063 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8064 break;
8065 }
0af10c86 8066 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
773cdfcc 8067 gen_update_cc_op(s);
19e6c4b8 8068 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae
BS
8069 gen_helper_fxrstor(cpu_env, cpu_A0,
8070 tcg_const_i32((s->dflag == 2)));
664e0f19
FB
8071 break;
8072 case 2: /* ldmxcsr */
8073 case 3: /* stmxcsr */
8074 if (s->flags & HF_TS_MASK) {
8075 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8076 break;
14ce26e7 8077 }
664e0f19
FB
8078 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8079 mod == 3)
14ce26e7 8080 goto illegal_op;
0af10c86 8081 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 8082 if (op == 2) {
57fec1fe 8083 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
20f8bd48 8084 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 8085 gen_helper_ldmxcsr(cpu_env, cpu_tmp2_i32);
14ce26e7 8086 } else {
651ba608 8087 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
57fec1fe 8088 gen_op_st_T0_A0(OT_LONG + s->mem_index);
14ce26e7 8089 }
664e0f19
FB
8090 break;
8091 case 5: /* lfence */
8092 case 6: /* mfence */
8001c294 8093 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE2))
664e0f19
FB
8094 goto illegal_op;
8095 break;
8f091a59
FB
8096 case 7: /* sfence / clflush */
8097 if ((modrm & 0xc7) == 0xc0) {
8098 /* sfence */
a35f3ec7 8099 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8f091a59
FB
8100 if (!(s->cpuid_features & CPUID_SSE))
8101 goto illegal_op;
8102 } else {
8103 /* clflush */
8104 if (!(s->cpuid_features & CPUID_CLFLUSH))
8105 goto illegal_op;
0af10c86 8106 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8f091a59
FB
8107 }
8108 break;
664e0f19 8109 default:
14ce26e7
FB
8110 goto illegal_op;
8111 }
8112 break;
a35f3ec7 8113 case 0x10d: /* 3DNow! prefetch(w) */
0af10c86 8114 modrm = cpu_ldub_code(env, s->pc++);
a35f3ec7
AJ
8115 mod = (modrm >> 6) & 3;
8116 if (mod == 3)
8117 goto illegal_op;
0af10c86 8118 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8f091a59
FB
8119 /* ignore for now */
8120 break;
3b21e03e 8121 case 0x1aa: /* rsm */
872929aa 8122 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
3b21e03e
FB
8123 if (!(s->flags & HF_SMM_MASK))
8124 goto illegal_op;
728d803b 8125 gen_update_cc_op(s);
3b21e03e 8126 gen_jmp_im(s->pc - s->cs_base);
608badfc 8127 gen_helper_rsm(cpu_env);
3b21e03e
FB
8128 gen_eob(s);
8129 break;
222a3336
AZ
8130 case 0x1b8: /* SSE4.2 popcnt */
8131 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8132 PREFIX_REPZ)
8133 goto illegal_op;
8134 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8135 goto illegal_op;
8136
0af10c86 8137 modrm = cpu_ldub_code(env, s->pc++);
8b4a3df8 8138 reg = ((modrm >> 3) & 7) | rex_r;
222a3336
AZ
8139
8140 if (s->prefix & PREFIX_DATA)
8141 ot = OT_WORD;
8142 else if (s->dflag != 2)
8143 ot = OT_LONG;
8144 else
8145 ot = OT_QUAD;
8146
0af10c86 8147 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
d3eb5eae 8148 gen_helper_popcnt(cpu_T[0], cpu_env, cpu_T[0], tcg_const_i32(ot));
222a3336 8149 gen_op_mov_reg_T0(ot, reg);
fdb0d09d 8150
3ca51d07 8151 set_cc_op(s, CC_OP_EFLAGS);
222a3336 8152 break;
a35f3ec7
AJ
8153 case 0x10e ... 0x10f:
8154 /* 3DNow! instructions, ignore prefixes */
8155 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
664e0f19
FB
8156 case 0x110 ... 0x117:
8157 case 0x128 ... 0x12f:
4242b1bd 8158 case 0x138 ... 0x13a:
d9f4bb27 8159 case 0x150 ... 0x179:
664e0f19
FB
8160 case 0x17c ... 0x17f:
8161 case 0x1c2:
8162 case 0x1c4 ... 0x1c6:
8163 case 0x1d0 ... 0x1fe:
0af10c86 8164 gen_sse(env, s, b, pc_start, rex_r);
664e0f19 8165 break;
2c0262af
FB
8166 default:
8167 goto illegal_op;
8168 }
8169 /* lock generation */
8170 if (s->prefix & PREFIX_LOCK)
a7812ae4 8171 gen_helper_unlock();
2c0262af
FB
8172 return s->pc;
8173 illegal_op:
ab1f142b 8174 if (s->prefix & PREFIX_LOCK)
a7812ae4 8175 gen_helper_unlock();
2c0262af
FB
8176 /* XXX: ensure that no lock was generated */
8177 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8178 return s->pc;
8179}
8180
2c0262af
FB
8181void optimize_flags_init(void)
8182{
a7812ae4
PB
8183 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
8184 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8185 offsetof(CPUX86State, cc_op), "cc_op");
317ac620 8186 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUX86State, cc_dst),
a7812ae4 8187 "cc_dst");
a3251186
RH
8188 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUX86State, cc_src),
8189 "cc_src");
988c3eb0
RH
8190 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0, offsetof(CPUX86State, cc_src2),
8191 "cc_src2");
437a88a5 8192
cc739bb0
LD
8193#ifdef TARGET_X86_64
8194 cpu_regs[R_EAX] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8195 offsetof(CPUX86State, regs[R_EAX]), "rax");
cc739bb0 8196 cpu_regs[R_ECX] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8197 offsetof(CPUX86State, regs[R_ECX]), "rcx");
cc739bb0 8198 cpu_regs[R_EDX] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8199 offsetof(CPUX86State, regs[R_EDX]), "rdx");
cc739bb0 8200 cpu_regs[R_EBX] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8201 offsetof(CPUX86State, regs[R_EBX]), "rbx");
cc739bb0 8202 cpu_regs[R_ESP] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8203 offsetof(CPUX86State, regs[R_ESP]), "rsp");
cc739bb0 8204 cpu_regs[R_EBP] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8205 offsetof(CPUX86State, regs[R_EBP]), "rbp");
cc739bb0 8206 cpu_regs[R_ESI] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8207 offsetof(CPUX86State, regs[R_ESI]), "rsi");
cc739bb0 8208 cpu_regs[R_EDI] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8209 offsetof(CPUX86State, regs[R_EDI]), "rdi");
cc739bb0 8210 cpu_regs[8] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8211 offsetof(CPUX86State, regs[8]), "r8");
cc739bb0 8212 cpu_regs[9] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8213 offsetof(CPUX86State, regs[9]), "r9");
cc739bb0 8214 cpu_regs[10] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8215 offsetof(CPUX86State, regs[10]), "r10");
cc739bb0 8216 cpu_regs[11] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8217 offsetof(CPUX86State, regs[11]), "r11");
cc739bb0 8218 cpu_regs[12] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8219 offsetof(CPUX86State, regs[12]), "r12");
cc739bb0 8220 cpu_regs[13] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8221 offsetof(CPUX86State, regs[13]), "r13");
cc739bb0 8222 cpu_regs[14] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8223 offsetof(CPUX86State, regs[14]), "r14");
cc739bb0 8224 cpu_regs[15] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8225 offsetof(CPUX86State, regs[15]), "r15");
cc739bb0
LD
8226#else
8227 cpu_regs[R_EAX] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8228 offsetof(CPUX86State, regs[R_EAX]), "eax");
cc739bb0 8229 cpu_regs[R_ECX] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8230 offsetof(CPUX86State, regs[R_ECX]), "ecx");
cc739bb0 8231 cpu_regs[R_EDX] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8232 offsetof(CPUX86State, regs[R_EDX]), "edx");
cc739bb0 8233 cpu_regs[R_EBX] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8234 offsetof(CPUX86State, regs[R_EBX]), "ebx");
cc739bb0 8235 cpu_regs[R_ESP] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8236 offsetof(CPUX86State, regs[R_ESP]), "esp");
cc739bb0 8237 cpu_regs[R_EBP] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8238 offsetof(CPUX86State, regs[R_EBP]), "ebp");
cc739bb0 8239 cpu_regs[R_ESI] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8240 offsetof(CPUX86State, regs[R_ESI]), "esi");
cc739bb0 8241 cpu_regs[R_EDI] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8242 offsetof(CPUX86State, regs[R_EDI]), "edi");
cc739bb0
LD
8243#endif
8244
437a88a5 8245 /* register helpers */
a7812ae4 8246#define GEN_HELPER 2
437a88a5 8247#include "helper.h"
2c0262af
FB
8248}
8249
8250/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8251 basic block 'tb'. If search_pc is TRUE, also generate PC
8252 information for each intermediate instruction. */
317ac620 8253static inline void gen_intermediate_code_internal(CPUX86State *env,
2cfc5f17
TS
8254 TranslationBlock *tb,
8255 int search_pc)
2c0262af
FB
8256{
8257 DisasContext dc1, *dc = &dc1;
14ce26e7 8258 target_ulong pc_ptr;
2c0262af 8259 uint16_t *gen_opc_end;
a1d1bb31 8260 CPUBreakpoint *bp;
7f5b7d3e 8261 int j, lj;
c068688b 8262 uint64_t flags;
14ce26e7
FB
8263 target_ulong pc_start;
8264 target_ulong cs_base;
2e70f6ef
PB
8265 int num_insns;
8266 int max_insns;
3b46e624 8267
2c0262af 8268 /* generate intermediate code */
14ce26e7
FB
8269 pc_start = tb->pc;
8270 cs_base = tb->cs_base;
2c0262af 8271 flags = tb->flags;
3a1d9b8b 8272
4f31916f 8273 dc->pe = (flags >> HF_PE_SHIFT) & 1;
2c0262af
FB
8274 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8275 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8276 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8277 dc->f_st = 0;
8278 dc->vm86 = (flags >> VM_SHIFT) & 1;
8279 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8280 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8281 dc->tf = (flags >> TF_SHIFT) & 1;
34865134 8282 dc->singlestep_enabled = env->singlestep_enabled;
2c0262af 8283 dc->cc_op = CC_OP_DYNAMIC;
e207582f 8284 dc->cc_op_dirty = false;
2c0262af
FB
8285 dc->cs_base = cs_base;
8286 dc->tb = tb;
8287 dc->popl_esp_hack = 0;
8288 /* select memory access functions */
8289 dc->mem_index = 0;
8290 if (flags & HF_SOFTMMU_MASK) {
a9321a4d 8291 dc->mem_index = (cpu_mmu_index(env) + 1) << 2;
2c0262af 8292 }
14ce26e7 8293 dc->cpuid_features = env->cpuid_features;
3d7374c5 8294 dc->cpuid_ext_features = env->cpuid_ext_features;
e771edab 8295 dc->cpuid_ext2_features = env->cpuid_ext2_features;
12e26b75 8296 dc->cpuid_ext3_features = env->cpuid_ext3_features;
a9321a4d 8297 dc->cpuid_7_0_ebx_features = env->cpuid_7_0_ebx_features;
14ce26e7
FB
8298#ifdef TARGET_X86_64
8299 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8300 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8301#endif
7eee2a50 8302 dc->flags = flags;
a2cc3b24
FB
8303 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8304 (flags & HF_INHIBIT_IRQ_MASK)
415fa2ea 8305#ifndef CONFIG_SOFTMMU
2c0262af
FB
8306 || (flags & HF_SOFTMMU_MASK)
8307#endif
8308 );
4f31916f
FB
8309#if 0
8310 /* check addseg logic */
dc196a57 8311 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
4f31916f
FB
8312 printf("ERROR addseg\n");
8313#endif
8314
a7812ae4
PB
8315 cpu_T[0] = tcg_temp_new();
8316 cpu_T[1] = tcg_temp_new();
8317 cpu_A0 = tcg_temp_new();
a7812ae4
PB
8318
8319 cpu_tmp0 = tcg_temp_new();
8320 cpu_tmp1_i64 = tcg_temp_new_i64();
8321 cpu_tmp2_i32 = tcg_temp_new_i32();
8322 cpu_tmp3_i32 = tcg_temp_new_i32();
8323 cpu_tmp4 = tcg_temp_new();
a7812ae4
PB
8324 cpu_ptr0 = tcg_temp_new_ptr();
8325 cpu_ptr1 = tcg_temp_new_ptr();
a3251186 8326 cpu_cc_srcT = tcg_temp_local_new();
57fec1fe 8327
92414b31 8328 gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
2c0262af
FB
8329
8330 dc->is_jmp = DISAS_NEXT;
8331 pc_ptr = pc_start;
8332 lj = -1;
2e70f6ef
PB
8333 num_insns = 0;
8334 max_insns = tb->cflags & CF_COUNT_MASK;
8335 if (max_insns == 0)
8336 max_insns = CF_COUNT_MASK;
2c0262af 8337
806f352d 8338 gen_tb_start();
2c0262af 8339 for(;;) {
72cf2d4f
BS
8340 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
8341 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
a2397807
JK
8342 if (bp->pc == pc_ptr &&
8343 !((bp->flags & BP_CPU) && (tb->flags & HF_RF_MASK))) {
2c0262af
FB
8344 gen_debug(dc, pc_ptr - dc->cs_base);
8345 break;
8346 }
8347 }
8348 }
8349 if (search_pc) {
92414b31 8350 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
2c0262af
FB
8351 if (lj < j) {
8352 lj++;
8353 while (lj < j)
ab1103de 8354 tcg_ctx.gen_opc_instr_start[lj++] = 0;
2c0262af 8355 }
25983cad 8356 tcg_ctx.gen_opc_pc[lj] = pc_ptr;
2c0262af 8357 gen_opc_cc_op[lj] = dc->cc_op;
ab1103de 8358 tcg_ctx.gen_opc_instr_start[lj] = 1;
c9c99c22 8359 tcg_ctx.gen_opc_icount[lj] = num_insns;
2c0262af 8360 }
2e70f6ef
PB
8361 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8362 gen_io_start();
8363
0af10c86 8364 pc_ptr = disas_insn(env, dc, pc_ptr);
2e70f6ef 8365 num_insns++;
2c0262af
FB
8366 /* stop translation if indicated */
8367 if (dc->is_jmp)
8368 break;
8369 /* if single step mode, we generate only one instruction and
8370 generate an exception */
a2cc3b24
FB
8371 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8372 the flag and abort the translation to give the irqs a
8373 change to be happen */
5fafdf24 8374 if (dc->tf || dc->singlestep_enabled ||
2e70f6ef 8375 (flags & HF_INHIBIT_IRQ_MASK)) {
14ce26e7 8376 gen_jmp_im(pc_ptr - dc->cs_base);
2c0262af
FB
8377 gen_eob(dc);
8378 break;
8379 }
8380 /* if too long translation, stop generation too */
efd7f486 8381 if (tcg_ctx.gen_opc_ptr >= gen_opc_end ||
2e70f6ef
PB
8382 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8383 num_insns >= max_insns) {
14ce26e7 8384 gen_jmp_im(pc_ptr - dc->cs_base);
2c0262af
FB
8385 gen_eob(dc);
8386 break;
8387 }
1b530a6d
AJ
8388 if (singlestep) {
8389 gen_jmp_im(pc_ptr - dc->cs_base);
8390 gen_eob(dc);
8391 break;
8392 }
2c0262af 8393 }
2e70f6ef
PB
8394 if (tb->cflags & CF_LAST_IO)
8395 gen_io_end();
806f352d 8396 gen_tb_end(tb, num_insns);
efd7f486 8397 *tcg_ctx.gen_opc_ptr = INDEX_op_end;
2c0262af
FB
8398 /* we don't forget to fill the last values */
8399 if (search_pc) {
92414b31 8400 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
2c0262af
FB
8401 lj++;
8402 while (lj <= j)
ab1103de 8403 tcg_ctx.gen_opc_instr_start[lj++] = 0;
2c0262af 8404 }
3b46e624 8405
2c0262af 8406#ifdef DEBUG_DISAS
8fec2b8c 8407 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
14ce26e7 8408 int disas_flags;
93fcfe39
AL
8409 qemu_log("----------------\n");
8410 qemu_log("IN: %s\n", lookup_symbol(pc_start));
14ce26e7
FB
8411#ifdef TARGET_X86_64
8412 if (dc->code64)
8413 disas_flags = 2;
8414 else
8415#endif
8416 disas_flags = !dc->code32;
f4359b9f 8417 log_target_disas(env, pc_start, pc_ptr - pc_start, disas_flags);
93fcfe39 8418 qemu_log("\n");
2c0262af
FB
8419 }
8420#endif
8421
2e70f6ef 8422 if (!search_pc) {
2c0262af 8423 tb->size = pc_ptr - pc_start;
2e70f6ef
PB
8424 tb->icount = num_insns;
8425 }
2c0262af
FB
8426}
8427
317ac620 8428void gen_intermediate_code(CPUX86State *env, TranslationBlock *tb)
2c0262af 8429{
2cfc5f17 8430 gen_intermediate_code_internal(env, tb, 0);
2c0262af
FB
8431}
8432
317ac620 8433void gen_intermediate_code_pc(CPUX86State *env, TranslationBlock *tb)
2c0262af 8434{
2cfc5f17 8435 gen_intermediate_code_internal(env, tb, 1);
2c0262af
FB
8436}
8437
317ac620 8438void restore_state_to_opc(CPUX86State *env, TranslationBlock *tb, int pc_pos)
d2856f1a
AJ
8439{
8440 int cc_op;
8441#ifdef DEBUG_DISAS
8fec2b8c 8442 if (qemu_loglevel_mask(CPU_LOG_TB_OP)) {
d2856f1a 8443 int i;
93fcfe39 8444 qemu_log("RESTORE:\n");
d2856f1a 8445 for(i = 0;i <= pc_pos; i++) {
ab1103de 8446 if (tcg_ctx.gen_opc_instr_start[i]) {
25983cad
EV
8447 qemu_log("0x%04x: " TARGET_FMT_lx "\n", i,
8448 tcg_ctx.gen_opc_pc[i]);
d2856f1a
AJ
8449 }
8450 }
e87b7cb0 8451 qemu_log("pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
25983cad 8452 pc_pos, tcg_ctx.gen_opc_pc[pc_pos] - tb->cs_base,
d2856f1a
AJ
8453 (uint32_t)tb->cs_base);
8454 }
8455#endif
25983cad 8456 env->eip = tcg_ctx.gen_opc_pc[pc_pos] - tb->cs_base;
d2856f1a
AJ
8457 cc_op = gen_opc_cc_op[pc_pos];
8458 if (cc_op != CC_OP_DYNAMIC)
8459 env->cc_op = cc_op;
8460}