]> git.proxmox.com Git - qemu.git/blame - target-i386/translate.c
virtio-net-ccw: switch to the new API.
[qemu.git] / target-i386 / translate.c
CommitLineData
2c0262af
FB
1/*
2 * i386 translation
5fafdf24 3 *
2c0262af
FB
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
8167ee88 17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
2c0262af
FB
18 */
19#include <stdarg.h>
20#include <stdlib.h>
21#include <stdio.h>
22#include <string.h>
23#include <inttypes.h>
24#include <signal.h>
2c0262af 25
bec93d72 26#include "qemu/host-utils.h"
2c0262af 27#include "cpu.h"
76cad711 28#include "disas/disas.h"
57fec1fe 29#include "tcg-op.h"
2c0262af 30
a7812ae4
PB
31#include "helper.h"
32#define GEN_HELPER 1
33#include "helper.h"
34
2c0262af
FB
35#define PREFIX_REPZ 0x01
36#define PREFIX_REPNZ 0x02
37#define PREFIX_LOCK 0x04
38#define PREFIX_DATA 0x08
39#define PREFIX_ADR 0x10
701ed211 40#define PREFIX_VEX 0x20
2c0262af 41
14ce26e7 42#ifdef TARGET_X86_64
14ce26e7
FB
43#define CODE64(s) ((s)->code64)
44#define REX_X(s) ((s)->rex_x)
45#define REX_B(s) ((s)->rex_b)
14ce26e7 46#else
14ce26e7
FB
47#define CODE64(s) 0
48#define REX_X(s) 0
49#define REX_B(s) 0
50#endif
51
bec93d72
RH
52#ifdef TARGET_X86_64
53# define ctztl ctz64
54# define clztl clz64
55#else
56# define ctztl ctz32
57# define clztl clz32
58#endif
59
57fec1fe
FB
60//#define MACRO_TEST 1
61
57fec1fe 62/* global register indexes */
a7812ae4 63static TCGv_ptr cpu_env;
a3251186 64static TCGv cpu_A0;
988c3eb0 65static TCGv cpu_cc_dst, cpu_cc_src, cpu_cc_src2, cpu_cc_srcT;
a7812ae4 66static TCGv_i32 cpu_cc_op;
cc739bb0 67static TCGv cpu_regs[CPU_NB_REGS];
1e4840bf 68/* local temps */
3b9d3cf1 69static TCGv cpu_T[2];
57fec1fe 70/* local register indexes (only used inside old micro ops) */
a7812ae4
PB
71static TCGv cpu_tmp0, cpu_tmp4;
72static TCGv_ptr cpu_ptr0, cpu_ptr1;
73static TCGv_i32 cpu_tmp2_i32, cpu_tmp3_i32;
74static TCGv_i64 cpu_tmp1_i64;
57fec1fe 75
1a7ff922
PB
76static uint8_t gen_opc_cc_op[OPC_BUF_SIZE];
77
022c62cb 78#include "exec/gen-icount.h"
2e70f6ef 79
57fec1fe
FB
80#ifdef TARGET_X86_64
81static int x86_64_hregs;
ae063a68
FB
82#endif
83
2c0262af
FB
84typedef struct DisasContext {
85 /* current insn context */
86 int override; /* -1 if no override */
87 int prefix;
88 int aflag, dflag;
14ce26e7 89 target_ulong pc; /* pc = eip + cs_base */
2c0262af
FB
90 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
91 static state change (stop translation) */
92 /* current block context */
14ce26e7 93 target_ulong cs_base; /* base of CS segment */
2c0262af
FB
94 int pe; /* protected mode */
95 int code32; /* 32 bit code segment */
14ce26e7
FB
96#ifdef TARGET_X86_64
97 int lma; /* long mode active */
98 int code64; /* 64 bit code segment */
99 int rex_x, rex_b;
100#endif
701ed211
RH
101 int vex_l; /* vex vector length */
102 int vex_v; /* vex vvvv register, without 1's compliment. */
2c0262af 103 int ss32; /* 32 bit stack segment */
fee71888 104 CCOp cc_op; /* current CC operation */
e207582f 105 bool cc_op_dirty;
2c0262af
FB
106 int addseg; /* non zero if either DS/ES/SS have a non zero base */
107 int f_st; /* currently unused */
108 int vm86; /* vm86 mode */
109 int cpl;
110 int iopl;
111 int tf; /* TF cpu flag */
34865134 112 int singlestep_enabled; /* "hardware" single step enabled */
2c0262af
FB
113 int jmp_opt; /* use direct block chaining for direct jumps */
114 int mem_index; /* select memory access functions */
c068688b 115 uint64_t flags; /* all execution flags */
2c0262af
FB
116 struct TranslationBlock *tb;
117 int popl_esp_hack; /* for correct popl with esp base handling */
14ce26e7
FB
118 int rip_offset; /* only used in x86_64, but left for simplicity */
119 int cpuid_features;
3d7374c5 120 int cpuid_ext_features;
e771edab 121 int cpuid_ext2_features;
12e26b75 122 int cpuid_ext3_features;
a9321a4d 123 int cpuid_7_0_ebx_features;
2c0262af
FB
124} DisasContext;
125
126static void gen_eob(DisasContext *s);
14ce26e7
FB
127static void gen_jmp(DisasContext *s, target_ulong eip);
128static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
63633fe6 129static void gen_op(DisasContext *s1, int op, int ot, int d);
2c0262af
FB
130
131/* i386 arith/logic operations */
132enum {
5fafdf24
TS
133 OP_ADDL,
134 OP_ORL,
135 OP_ADCL,
2c0262af 136 OP_SBBL,
5fafdf24
TS
137 OP_ANDL,
138 OP_SUBL,
139 OP_XORL,
2c0262af
FB
140 OP_CMPL,
141};
142
143/* i386 shift ops */
144enum {
5fafdf24
TS
145 OP_ROL,
146 OP_ROR,
147 OP_RCL,
148 OP_RCR,
149 OP_SHL,
150 OP_SHR,
2c0262af
FB
151 OP_SHL1, /* undocumented */
152 OP_SAR = 7,
153};
154
8e1c85e3
FB
155enum {
156 JCC_O,
157 JCC_B,
158 JCC_Z,
159 JCC_BE,
160 JCC_S,
161 JCC_P,
162 JCC_L,
163 JCC_LE,
164};
165
2c0262af
FB
166/* operand size */
167enum {
168 OT_BYTE = 0,
169 OT_WORD,
5fafdf24 170 OT_LONG,
2c0262af
FB
171 OT_QUAD,
172};
173
174enum {
175 /* I386 int registers */
176 OR_EAX, /* MUST be even numbered */
177 OR_ECX,
178 OR_EDX,
179 OR_EBX,
180 OR_ESP,
181 OR_EBP,
182 OR_ESI,
183 OR_EDI,
14ce26e7
FB
184
185 OR_TMP0 = 16, /* temporary operand register */
2c0262af
FB
186 OR_TMP1,
187 OR_A0, /* temporary register used when doing address evaluation */
2c0262af
FB
188};
189
b666265b 190enum {
a3251186
RH
191 USES_CC_DST = 1,
192 USES_CC_SRC = 2,
988c3eb0
RH
193 USES_CC_SRC2 = 4,
194 USES_CC_SRCT = 8,
b666265b
RH
195};
196
197/* Bit set if the global variable is live after setting CC_OP to X. */
198static const uint8_t cc_op_live[CC_OP_NB] = {
988c3eb0 199 [CC_OP_DYNAMIC] = USES_CC_DST | USES_CC_SRC | USES_CC_SRC2,
b666265b
RH
200 [CC_OP_EFLAGS] = USES_CC_SRC,
201 [CC_OP_MULB ... CC_OP_MULQ] = USES_CC_DST | USES_CC_SRC,
202 [CC_OP_ADDB ... CC_OP_ADDQ] = USES_CC_DST | USES_CC_SRC,
988c3eb0 203 [CC_OP_ADCB ... CC_OP_ADCQ] = USES_CC_DST | USES_CC_SRC | USES_CC_SRC2,
a3251186 204 [CC_OP_SUBB ... CC_OP_SUBQ] = USES_CC_DST | USES_CC_SRC | USES_CC_SRCT,
988c3eb0 205 [CC_OP_SBBB ... CC_OP_SBBQ] = USES_CC_DST | USES_CC_SRC | USES_CC_SRC2,
b666265b
RH
206 [CC_OP_LOGICB ... CC_OP_LOGICQ] = USES_CC_DST,
207 [CC_OP_INCB ... CC_OP_INCQ] = USES_CC_DST | USES_CC_SRC,
208 [CC_OP_DECB ... CC_OP_DECQ] = USES_CC_DST | USES_CC_SRC,
209 [CC_OP_SHLB ... CC_OP_SHLQ] = USES_CC_DST | USES_CC_SRC,
210 [CC_OP_SARB ... CC_OP_SARQ] = USES_CC_DST | USES_CC_SRC,
bc4b43dc 211 [CC_OP_BMILGB ... CC_OP_BMILGQ] = USES_CC_DST | USES_CC_SRC,
cd7f97ca
RH
212 [CC_OP_ADCX] = USES_CC_DST | USES_CC_SRC,
213 [CC_OP_ADOX] = USES_CC_SRC | USES_CC_SRC2,
214 [CC_OP_ADCOX] = USES_CC_DST | USES_CC_SRC | USES_CC_SRC2,
436ff2d2 215 [CC_OP_CLR] = 0,
b666265b
RH
216};
217
e207582f 218static void set_cc_op(DisasContext *s, CCOp op)
3ca51d07 219{
b666265b
RH
220 int dead;
221
222 if (s->cc_op == op) {
223 return;
224 }
225
226 /* Discard CC computation that will no longer be used. */
227 dead = cc_op_live[s->cc_op] & ~cc_op_live[op];
228 if (dead & USES_CC_DST) {
229 tcg_gen_discard_tl(cpu_cc_dst);
e207582f 230 }
b666265b
RH
231 if (dead & USES_CC_SRC) {
232 tcg_gen_discard_tl(cpu_cc_src);
233 }
988c3eb0
RH
234 if (dead & USES_CC_SRC2) {
235 tcg_gen_discard_tl(cpu_cc_src2);
236 }
a3251186
RH
237 if (dead & USES_CC_SRCT) {
238 tcg_gen_discard_tl(cpu_cc_srcT);
239 }
b666265b 240
e2f515cf
RH
241 if (op == CC_OP_DYNAMIC) {
242 /* The DYNAMIC setting is translator only, and should never be
243 stored. Thus we always consider it clean. */
244 s->cc_op_dirty = false;
245 } else {
246 /* Discard any computed CC_OP value (see shifts). */
247 if (s->cc_op == CC_OP_DYNAMIC) {
248 tcg_gen_discard_i32(cpu_cc_op);
249 }
250 s->cc_op_dirty = true;
251 }
b666265b 252 s->cc_op = op;
e207582f
RH
253}
254
e207582f
RH
255static void gen_update_cc_op(DisasContext *s)
256{
257 if (s->cc_op_dirty) {
773cdfcc 258 tcg_gen_movi_i32(cpu_cc_op, s->cc_op);
e207582f
RH
259 s->cc_op_dirty = false;
260 }
3ca51d07
RH
261}
262
57fec1fe
FB
263static inline void gen_op_movl_T0_0(void)
264{
265 tcg_gen_movi_tl(cpu_T[0], 0);
266}
267
268static inline void gen_op_movl_T0_im(int32_t val)
269{
270 tcg_gen_movi_tl(cpu_T[0], val);
271}
272
273static inline void gen_op_movl_T0_imu(uint32_t val)
274{
275 tcg_gen_movi_tl(cpu_T[0], val);
276}
277
278static inline void gen_op_movl_T1_im(int32_t val)
279{
280 tcg_gen_movi_tl(cpu_T[1], val);
281}
282
283static inline void gen_op_movl_T1_imu(uint32_t val)
284{
285 tcg_gen_movi_tl(cpu_T[1], val);
286}
287
288static inline void gen_op_movl_A0_im(uint32_t val)
289{
290 tcg_gen_movi_tl(cpu_A0, val);
291}
292
293#ifdef TARGET_X86_64
294static inline void gen_op_movq_A0_im(int64_t val)
295{
296 tcg_gen_movi_tl(cpu_A0, val);
297}
298#endif
299
300static inline void gen_movtl_T0_im(target_ulong val)
301{
302 tcg_gen_movi_tl(cpu_T[0], val);
303}
304
305static inline void gen_movtl_T1_im(target_ulong val)
306{
307 tcg_gen_movi_tl(cpu_T[1], val);
308}
309
310static inline void gen_op_andl_T0_ffff(void)
311{
312 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
313}
314
315static inline void gen_op_andl_T0_im(uint32_t val)
316{
317 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
318}
319
320static inline void gen_op_movl_T0_T1(void)
321{
322 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
323}
324
325static inline void gen_op_andl_A0_ffff(void)
326{
327 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
328}
329
14ce26e7
FB
330#ifdef TARGET_X86_64
331
332#define NB_OP_SIZES 4
333
14ce26e7
FB
334#else /* !TARGET_X86_64 */
335
336#define NB_OP_SIZES 3
337
14ce26e7
FB
338#endif /* !TARGET_X86_64 */
339
e2542fe2 340#if defined(HOST_WORDS_BIGENDIAN)
57fec1fe
FB
341#define REG_B_OFFSET (sizeof(target_ulong) - 1)
342#define REG_H_OFFSET (sizeof(target_ulong) - 2)
343#define REG_W_OFFSET (sizeof(target_ulong) - 2)
344#define REG_L_OFFSET (sizeof(target_ulong) - 4)
345#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
14ce26e7 346#else
57fec1fe
FB
347#define REG_B_OFFSET 0
348#define REG_H_OFFSET 1
349#define REG_W_OFFSET 0
350#define REG_L_OFFSET 0
351#define REG_LH_OFFSET 4
14ce26e7 352#endif
57fec1fe 353
96d7073f
PM
354/* In instruction encodings for byte register accesses the
355 * register number usually indicates "low 8 bits of register N";
356 * however there are some special cases where N 4..7 indicates
357 * [AH, CH, DH, BH], ie "bits 15..8 of register N-4". Return
358 * true for this special case, false otherwise.
359 */
360static inline bool byte_reg_is_xH(int reg)
361{
362 if (reg < 4) {
363 return false;
364 }
365#ifdef TARGET_X86_64
366 if (reg >= 8 || x86_64_hregs) {
367 return false;
368 }
369#endif
370 return true;
371}
372
1e4840bf 373static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
57fec1fe
FB
374{
375 switch(ot) {
376 case OT_BYTE:
96d7073f 377 if (!byte_reg_is_xH(reg)) {
c832e3de 378 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], t0, 0, 8);
57fec1fe 379 } else {
c832e3de 380 tcg_gen_deposit_tl(cpu_regs[reg - 4], cpu_regs[reg - 4], t0, 8, 8);
57fec1fe
FB
381 }
382 break;
383 case OT_WORD:
c832e3de 384 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], t0, 0, 16);
57fec1fe 385 break;
cc739bb0 386 default: /* XXX this shouldn't be reached; abort? */
57fec1fe 387 case OT_LONG:
cc739bb0
LD
388 /* For x86_64, this sets the higher half of register to zero.
389 For i386, this is equivalent to a mov. */
390 tcg_gen_ext32u_tl(cpu_regs[reg], t0);
57fec1fe 391 break;
cc739bb0 392#ifdef TARGET_X86_64
57fec1fe 393 case OT_QUAD:
cc739bb0 394 tcg_gen_mov_tl(cpu_regs[reg], t0);
57fec1fe 395 break;
14ce26e7 396#endif
57fec1fe
FB
397 }
398}
2c0262af 399
57fec1fe
FB
400static inline void gen_op_mov_reg_T0(int ot, int reg)
401{
1e4840bf 402 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
57fec1fe
FB
403}
404
405static inline void gen_op_mov_reg_T1(int ot, int reg)
406{
1e4840bf 407 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
57fec1fe
FB
408}
409
410static inline void gen_op_mov_reg_A0(int size, int reg)
411{
412 switch(size) {
93ab25d7 413 case OT_BYTE:
c832e3de 414 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_A0, 0, 16);
57fec1fe 415 break;
cc739bb0 416 default: /* XXX this shouldn't be reached; abort? */
93ab25d7 417 case OT_WORD:
cc739bb0
LD
418 /* For x86_64, this sets the higher half of register to zero.
419 For i386, this is equivalent to a mov. */
420 tcg_gen_ext32u_tl(cpu_regs[reg], cpu_A0);
57fec1fe 421 break;
cc739bb0 422#ifdef TARGET_X86_64
93ab25d7 423 case OT_LONG:
cc739bb0 424 tcg_gen_mov_tl(cpu_regs[reg], cpu_A0);
57fec1fe 425 break;
14ce26e7 426#endif
57fec1fe
FB
427 }
428}
429
1e4840bf 430static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
57fec1fe 431{
96d7073f
PM
432 if (ot == OT_BYTE && byte_reg_is_xH(reg)) {
433 tcg_gen_shri_tl(t0, cpu_regs[reg - 4], 8);
434 tcg_gen_ext8u_tl(t0, t0);
435 } else {
cc739bb0 436 tcg_gen_mov_tl(t0, cpu_regs[reg]);
57fec1fe
FB
437 }
438}
439
1e4840bf
FB
440static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
441{
442 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
443}
444
57fec1fe
FB
445static inline void gen_op_movl_A0_reg(int reg)
446{
cc739bb0 447 tcg_gen_mov_tl(cpu_A0, cpu_regs[reg]);
57fec1fe
FB
448}
449
450static inline void gen_op_addl_A0_im(int32_t val)
451{
452 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
14ce26e7 453#ifdef TARGET_X86_64
57fec1fe 454 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
14ce26e7 455#endif
57fec1fe 456}
2c0262af 457
14ce26e7 458#ifdef TARGET_X86_64
57fec1fe
FB
459static inline void gen_op_addq_A0_im(int64_t val)
460{
461 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
462}
14ce26e7 463#endif
57fec1fe
FB
464
465static void gen_add_A0_im(DisasContext *s, int val)
466{
467#ifdef TARGET_X86_64
468 if (CODE64(s))
469 gen_op_addq_A0_im(val);
470 else
471#endif
472 gen_op_addl_A0_im(val);
473}
2c0262af 474
57fec1fe 475static inline void gen_op_addl_T0_T1(void)
2c0262af 476{
57fec1fe
FB
477 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
478}
479
480static inline void gen_op_jmp_T0(void)
481{
317ac620 482 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, eip));
57fec1fe
FB
483}
484
6e0d8677 485static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
57fec1fe 486{
6e0d8677 487 switch(size) {
93ab25d7 488 case OT_BYTE:
cc739bb0 489 tcg_gen_addi_tl(cpu_tmp0, cpu_regs[reg], val);
c832e3de 490 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_tmp0, 0, 16);
6e0d8677 491 break;
93ab25d7 492 case OT_WORD:
cc739bb0
LD
493 tcg_gen_addi_tl(cpu_tmp0, cpu_regs[reg], val);
494 /* For x86_64, this sets the higher half of register to zero.
495 For i386, this is equivalent to a nop. */
496 tcg_gen_ext32u_tl(cpu_tmp0, cpu_tmp0);
497 tcg_gen_mov_tl(cpu_regs[reg], cpu_tmp0);
6e0d8677
FB
498 break;
499#ifdef TARGET_X86_64
93ab25d7 500 case OT_LONG:
cc739bb0 501 tcg_gen_addi_tl(cpu_regs[reg], cpu_regs[reg], val);
6e0d8677
FB
502 break;
503#endif
504 }
57fec1fe
FB
505}
506
6e0d8677 507static inline void gen_op_add_reg_T0(int size, int reg)
57fec1fe 508{
6e0d8677 509 switch(size) {
93ab25d7 510 case OT_BYTE:
cc739bb0 511 tcg_gen_add_tl(cpu_tmp0, cpu_regs[reg], cpu_T[0]);
c832e3de 512 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_tmp0, 0, 16);
6e0d8677 513 break;
93ab25d7 514 case OT_WORD:
cc739bb0
LD
515 tcg_gen_add_tl(cpu_tmp0, cpu_regs[reg], cpu_T[0]);
516 /* For x86_64, this sets the higher half of register to zero.
517 For i386, this is equivalent to a nop. */
518 tcg_gen_ext32u_tl(cpu_tmp0, cpu_tmp0);
519 tcg_gen_mov_tl(cpu_regs[reg], cpu_tmp0);
6e0d8677 520 break;
14ce26e7 521#ifdef TARGET_X86_64
93ab25d7 522 case OT_LONG:
cc739bb0 523 tcg_gen_add_tl(cpu_regs[reg], cpu_regs[reg], cpu_T[0]);
6e0d8677 524 break;
14ce26e7 525#endif
6e0d8677
FB
526 }
527}
57fec1fe 528
57fec1fe
FB
529static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
530{
cc739bb0
LD
531 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[reg]);
532 if (shift != 0)
57fec1fe
FB
533 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
534 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
cc739bb0
LD
535 /* For x86_64, this sets the higher half of register to zero.
536 For i386, this is equivalent to a nop. */
537 tcg_gen_ext32u_tl(cpu_A0, cpu_A0);
57fec1fe 538}
2c0262af 539
57fec1fe
FB
540static inline void gen_op_movl_A0_seg(int reg)
541{
317ac620 542 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUX86State, segs[reg].base) + REG_L_OFFSET);
57fec1fe 543}
2c0262af 544
7162ab21 545static inline void gen_op_addl_A0_seg(DisasContext *s, int reg)
57fec1fe 546{
317ac620 547 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State, segs[reg].base));
57fec1fe 548#ifdef TARGET_X86_64
7162ab21
VC
549 if (CODE64(s)) {
550 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
551 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
552 } else {
553 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
554 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
555 }
556#else
557 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
57fec1fe
FB
558#endif
559}
2c0262af 560
14ce26e7 561#ifdef TARGET_X86_64
57fec1fe
FB
562static inline void gen_op_movq_A0_seg(int reg)
563{
317ac620 564 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUX86State, segs[reg].base));
57fec1fe 565}
14ce26e7 566
57fec1fe
FB
567static inline void gen_op_addq_A0_seg(int reg)
568{
317ac620 569 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State, segs[reg].base));
57fec1fe
FB
570 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
571}
572
573static inline void gen_op_movq_A0_reg(int reg)
574{
cc739bb0 575 tcg_gen_mov_tl(cpu_A0, cpu_regs[reg]);
57fec1fe
FB
576}
577
578static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
579{
cc739bb0
LD
580 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[reg]);
581 if (shift != 0)
57fec1fe
FB
582 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
583 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
584}
14ce26e7
FB
585#endif
586
57fec1fe
FB
587static inline void gen_op_lds_T0_A0(int idx)
588{
589 int mem_index = (idx >> 2) - 1;
590 switch(idx & 3) {
93ab25d7 591 case OT_BYTE:
57fec1fe
FB
592 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
593 break;
93ab25d7 594 case OT_WORD:
57fec1fe
FB
595 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
596 break;
597 default:
93ab25d7 598 case OT_LONG:
57fec1fe
FB
599 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
600 break;
601 }
602}
2c0262af 603
1e4840bf 604static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
57fec1fe
FB
605{
606 int mem_index = (idx >> 2) - 1;
607 switch(idx & 3) {
93ab25d7 608 case OT_BYTE:
1e4840bf 609 tcg_gen_qemu_ld8u(t0, a0, mem_index);
57fec1fe 610 break;
93ab25d7 611 case OT_WORD:
1e4840bf 612 tcg_gen_qemu_ld16u(t0, a0, mem_index);
57fec1fe 613 break;
93ab25d7 614 case OT_LONG:
1e4840bf 615 tcg_gen_qemu_ld32u(t0, a0, mem_index);
57fec1fe
FB
616 break;
617 default:
93ab25d7 618 case OT_QUAD:
a7812ae4
PB
619 /* Should never happen on 32-bit targets. */
620#ifdef TARGET_X86_64
1e4840bf 621 tcg_gen_qemu_ld64(t0, a0, mem_index);
a7812ae4 622#endif
57fec1fe
FB
623 break;
624 }
625}
2c0262af 626
1e4840bf
FB
627/* XXX: always use ldu or lds */
628static inline void gen_op_ld_T0_A0(int idx)
629{
630 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
631}
632
57fec1fe
FB
633static inline void gen_op_ldu_T0_A0(int idx)
634{
1e4840bf 635 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
57fec1fe 636}
2c0262af 637
57fec1fe 638static inline void gen_op_ld_T1_A0(int idx)
1e4840bf
FB
639{
640 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
641}
642
643static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
57fec1fe
FB
644{
645 int mem_index = (idx >> 2) - 1;
646 switch(idx & 3) {
93ab25d7 647 case OT_BYTE:
1e4840bf 648 tcg_gen_qemu_st8(t0, a0, mem_index);
57fec1fe 649 break;
93ab25d7 650 case OT_WORD:
1e4840bf 651 tcg_gen_qemu_st16(t0, a0, mem_index);
57fec1fe 652 break;
93ab25d7 653 case OT_LONG:
1e4840bf 654 tcg_gen_qemu_st32(t0, a0, mem_index);
57fec1fe
FB
655 break;
656 default:
93ab25d7 657 case OT_QUAD:
a7812ae4
PB
658 /* Should never happen on 32-bit targets. */
659#ifdef TARGET_X86_64
1e4840bf 660 tcg_gen_qemu_st64(t0, a0, mem_index);
a7812ae4 661#endif
57fec1fe
FB
662 break;
663 }
664}
4f31916f 665
57fec1fe
FB
666static inline void gen_op_st_T0_A0(int idx)
667{
1e4840bf 668 gen_op_st_v(idx, cpu_T[0], cpu_A0);
57fec1fe 669}
4f31916f 670
57fec1fe
FB
671static inline void gen_op_st_T1_A0(int idx)
672{
1e4840bf 673 gen_op_st_v(idx, cpu_T[1], cpu_A0);
57fec1fe 674}
4f31916f 675
14ce26e7
FB
676static inline void gen_jmp_im(target_ulong pc)
677{
57fec1fe 678 tcg_gen_movi_tl(cpu_tmp0, pc);
317ac620 679 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State, eip));
14ce26e7
FB
680}
681
2c0262af
FB
682static inline void gen_string_movl_A0_ESI(DisasContext *s)
683{
684 int override;
685
686 override = s->override;
14ce26e7
FB
687#ifdef TARGET_X86_64
688 if (s->aflag == 2) {
689 if (override >= 0) {
57fec1fe
FB
690 gen_op_movq_A0_seg(override);
691 gen_op_addq_A0_reg_sN(0, R_ESI);
14ce26e7 692 } else {
57fec1fe 693 gen_op_movq_A0_reg(R_ESI);
14ce26e7
FB
694 }
695 } else
696#endif
2c0262af
FB
697 if (s->aflag) {
698 /* 32 bit address */
699 if (s->addseg && override < 0)
700 override = R_DS;
701 if (override >= 0) {
57fec1fe
FB
702 gen_op_movl_A0_seg(override);
703 gen_op_addl_A0_reg_sN(0, R_ESI);
2c0262af 704 } else {
57fec1fe 705 gen_op_movl_A0_reg(R_ESI);
2c0262af
FB
706 }
707 } else {
708 /* 16 address, always override */
709 if (override < 0)
710 override = R_DS;
57fec1fe 711 gen_op_movl_A0_reg(R_ESI);
2c0262af 712 gen_op_andl_A0_ffff();
7162ab21 713 gen_op_addl_A0_seg(s, override);
2c0262af
FB
714 }
715}
716
717static inline void gen_string_movl_A0_EDI(DisasContext *s)
718{
14ce26e7
FB
719#ifdef TARGET_X86_64
720 if (s->aflag == 2) {
57fec1fe 721 gen_op_movq_A0_reg(R_EDI);
14ce26e7
FB
722 } else
723#endif
2c0262af
FB
724 if (s->aflag) {
725 if (s->addseg) {
57fec1fe
FB
726 gen_op_movl_A0_seg(R_ES);
727 gen_op_addl_A0_reg_sN(0, R_EDI);
2c0262af 728 } else {
57fec1fe 729 gen_op_movl_A0_reg(R_EDI);
2c0262af
FB
730 }
731 } else {
57fec1fe 732 gen_op_movl_A0_reg(R_EDI);
2c0262af 733 gen_op_andl_A0_ffff();
7162ab21 734 gen_op_addl_A0_seg(s, R_ES);
2c0262af
FB
735 }
736}
737
6e0d8677
FB
738static inline void gen_op_movl_T0_Dshift(int ot)
739{
317ac620 740 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, df));
6e0d8677 741 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
2c0262af
FB
742};
743
d824df34 744static TCGv gen_ext_tl(TCGv dst, TCGv src, int size, bool sign)
6e0d8677 745{
d824df34 746 switch (size) {
6e0d8677 747 case OT_BYTE:
d824df34
PB
748 if (sign) {
749 tcg_gen_ext8s_tl(dst, src);
750 } else {
751 tcg_gen_ext8u_tl(dst, src);
752 }
753 return dst;
6e0d8677 754 case OT_WORD:
d824df34
PB
755 if (sign) {
756 tcg_gen_ext16s_tl(dst, src);
757 } else {
758 tcg_gen_ext16u_tl(dst, src);
759 }
760 return dst;
761#ifdef TARGET_X86_64
6e0d8677 762 case OT_LONG:
d824df34
PB
763 if (sign) {
764 tcg_gen_ext32s_tl(dst, src);
765 } else {
766 tcg_gen_ext32u_tl(dst, src);
767 }
768 return dst;
769#endif
6e0d8677 770 default:
d824df34 771 return src;
6e0d8677
FB
772 }
773}
3b46e624 774
d824df34
PB
775static void gen_extu(int ot, TCGv reg)
776{
777 gen_ext_tl(reg, reg, ot, false);
778}
779
6e0d8677
FB
780static void gen_exts(int ot, TCGv reg)
781{
d824df34 782 gen_ext_tl(reg, reg, ot, true);
6e0d8677 783}
2c0262af 784
6e0d8677
FB
785static inline void gen_op_jnz_ecx(int size, int label1)
786{
cc739bb0 787 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[R_ECX]);
6e0d8677 788 gen_extu(size + 1, cpu_tmp0);
cb63669a 789 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
6e0d8677
FB
790}
791
792static inline void gen_op_jz_ecx(int size, int label1)
793{
cc739bb0 794 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[R_ECX]);
6e0d8677 795 gen_extu(size + 1, cpu_tmp0);
cb63669a 796 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
6e0d8677 797}
2c0262af 798
a7812ae4
PB
799static void gen_helper_in_func(int ot, TCGv v, TCGv_i32 n)
800{
801 switch (ot) {
93ab25d7
PB
802 case OT_BYTE:
803 gen_helper_inb(v, n);
804 break;
805 case OT_WORD:
806 gen_helper_inw(v, n);
807 break;
808 case OT_LONG:
809 gen_helper_inl(v, n);
810 break;
a7812ae4 811 }
a7812ae4 812}
2c0262af 813
a7812ae4
PB
814static void gen_helper_out_func(int ot, TCGv_i32 v, TCGv_i32 n)
815{
816 switch (ot) {
93ab25d7
PB
817 case OT_BYTE:
818 gen_helper_outb(v, n);
819 break;
820 case OT_WORD:
821 gen_helper_outw(v, n);
822 break;
823 case OT_LONG:
824 gen_helper_outl(v, n);
825 break;
a7812ae4 826 }
a7812ae4 827}
f115e911 828
b8b6a50b
FB
829static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
830 uint32_t svm_flags)
f115e911 831{
b8b6a50b
FB
832 int state_saved;
833 target_ulong next_eip;
834
835 state_saved = 0;
f115e911 836 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
773cdfcc 837 gen_update_cc_op(s);
14ce26e7 838 gen_jmp_im(cur_eip);
b8b6a50b 839 state_saved = 1;
b6abf97d 840 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
a7812ae4 841 switch (ot) {
93ab25d7 842 case OT_BYTE:
4a7443be
BS
843 gen_helper_check_iob(cpu_env, cpu_tmp2_i32);
844 break;
93ab25d7 845 case OT_WORD:
4a7443be
BS
846 gen_helper_check_iow(cpu_env, cpu_tmp2_i32);
847 break;
93ab25d7 848 case OT_LONG:
4a7443be
BS
849 gen_helper_check_iol(cpu_env, cpu_tmp2_i32);
850 break;
a7812ae4 851 }
b8b6a50b 852 }
872929aa 853 if(s->flags & HF_SVMI_MASK) {
b8b6a50b 854 if (!state_saved) {
773cdfcc 855 gen_update_cc_op(s);
b8b6a50b 856 gen_jmp_im(cur_eip);
b8b6a50b
FB
857 }
858 svm_flags |= (1 << (4 + ot));
859 next_eip = s->pc - s->cs_base;
b6abf97d 860 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
052e80d5
BS
861 gen_helper_svm_check_io(cpu_env, cpu_tmp2_i32,
862 tcg_const_i32(svm_flags),
a7812ae4 863 tcg_const_i32(next_eip - cur_eip));
f115e911
FB
864 }
865}
866
2c0262af
FB
867static inline void gen_movs(DisasContext *s, int ot)
868{
869 gen_string_movl_A0_ESI(s);
57fec1fe 870 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 871 gen_string_movl_A0_EDI(s);
57fec1fe 872 gen_op_st_T0_A0(ot + s->mem_index);
6e0d8677
FB
873 gen_op_movl_T0_Dshift(ot);
874 gen_op_add_reg_T0(s->aflag, R_ESI);
875 gen_op_add_reg_T0(s->aflag, R_EDI);
2c0262af
FB
876}
877
b6abf97d
FB
878static void gen_op_update1_cc(void)
879{
b6abf97d
FB
880 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
881}
882
883static void gen_op_update2_cc(void)
884{
885 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
886 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
887}
888
988c3eb0
RH
889static void gen_op_update3_cc(TCGv reg)
890{
891 tcg_gen_mov_tl(cpu_cc_src2, reg);
892 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
893 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
894}
895
b6abf97d
FB
896static inline void gen_op_testl_T0_T1_cc(void)
897{
b6abf97d
FB
898 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
899}
900
901static void gen_op_update_neg_cc(void)
902{
b6abf97d 903 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
a3251186
RH
904 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
905 tcg_gen_movi_tl(cpu_cc_srcT, 0);
b6abf97d
FB
906}
907
d229edce
RH
908/* compute all eflags to cc_src */
909static void gen_compute_eflags(DisasContext *s)
8e1c85e3 910{
988c3eb0 911 TCGv zero, dst, src1, src2;
db9f2597
RH
912 int live, dead;
913
d229edce
RH
914 if (s->cc_op == CC_OP_EFLAGS) {
915 return;
916 }
436ff2d2
RH
917 if (s->cc_op == CC_OP_CLR) {
918 tcg_gen_movi_tl(cpu_cc_src, CC_Z);
919 set_cc_op(s, CC_OP_EFLAGS);
920 return;
921 }
db9f2597
RH
922
923 TCGV_UNUSED(zero);
924 dst = cpu_cc_dst;
925 src1 = cpu_cc_src;
988c3eb0 926 src2 = cpu_cc_src2;
db9f2597
RH
927
928 /* Take care to not read values that are not live. */
929 live = cc_op_live[s->cc_op] & ~USES_CC_SRCT;
988c3eb0 930 dead = live ^ (USES_CC_DST | USES_CC_SRC | USES_CC_SRC2);
db9f2597
RH
931 if (dead) {
932 zero = tcg_const_tl(0);
933 if (dead & USES_CC_DST) {
934 dst = zero;
935 }
936 if (dead & USES_CC_SRC) {
937 src1 = zero;
938 }
988c3eb0
RH
939 if (dead & USES_CC_SRC2) {
940 src2 = zero;
941 }
db9f2597
RH
942 }
943
773cdfcc 944 gen_update_cc_op(s);
988c3eb0 945 gen_helper_cc_compute_all(cpu_cc_src, dst, src1, src2, cpu_cc_op);
d229edce 946 set_cc_op(s, CC_OP_EFLAGS);
db9f2597
RH
947
948 if (dead) {
949 tcg_temp_free(zero);
950 }
8e1c85e3
FB
951}
952
bec93d72
RH
953typedef struct CCPrepare {
954 TCGCond cond;
955 TCGv reg;
956 TCGv reg2;
957 target_ulong imm;
958 target_ulong mask;
959 bool use_reg2;
960 bool no_setcond;
961} CCPrepare;
962
06847f1f 963/* compute eflags.C to reg */
bec93d72 964static CCPrepare gen_prepare_eflags_c(DisasContext *s, TCGv reg)
06847f1f
RH
965{
966 TCGv t0, t1;
bec93d72 967 int size, shift;
06847f1f
RH
968
969 switch (s->cc_op) {
970 case CC_OP_SUBB ... CC_OP_SUBQ:
a3251186 971 /* (DATA_TYPE)CC_SRCT < (DATA_TYPE)CC_SRC */
06847f1f
RH
972 size = s->cc_op - CC_OP_SUBB;
973 t1 = gen_ext_tl(cpu_tmp0, cpu_cc_src, size, false);
974 /* If no temporary was used, be careful not to alias t1 and t0. */
975 t0 = TCGV_EQUAL(t1, cpu_cc_src) ? cpu_tmp0 : reg;
a3251186 976 tcg_gen_mov_tl(t0, cpu_cc_srcT);
06847f1f
RH
977 gen_extu(size, t0);
978 goto add_sub;
979
980 case CC_OP_ADDB ... CC_OP_ADDQ:
981 /* (DATA_TYPE)CC_DST < (DATA_TYPE)CC_SRC */
982 size = s->cc_op - CC_OP_ADDB;
983 t1 = gen_ext_tl(cpu_tmp0, cpu_cc_src, size, false);
984 t0 = gen_ext_tl(reg, cpu_cc_dst, size, false);
985 add_sub:
bec93d72
RH
986 return (CCPrepare) { .cond = TCG_COND_LTU, .reg = t0,
987 .reg2 = t1, .mask = -1, .use_reg2 = true };
06847f1f 988
06847f1f 989 case CC_OP_LOGICB ... CC_OP_LOGICQ:
436ff2d2 990 case CC_OP_CLR:
bec93d72 991 return (CCPrepare) { .cond = TCG_COND_NEVER, .mask = -1 };
06847f1f
RH
992
993 case CC_OP_INCB ... CC_OP_INCQ:
994 case CC_OP_DECB ... CC_OP_DECQ:
bec93d72
RH
995 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
996 .mask = -1, .no_setcond = true };
06847f1f
RH
997
998 case CC_OP_SHLB ... CC_OP_SHLQ:
999 /* (CC_SRC >> (DATA_BITS - 1)) & 1 */
1000 size = s->cc_op - CC_OP_SHLB;
bec93d72
RH
1001 shift = (8 << size) - 1;
1002 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1003 .mask = (target_ulong)1 << shift };
06847f1f
RH
1004
1005 case CC_OP_MULB ... CC_OP_MULQ:
bec93d72
RH
1006 return (CCPrepare) { .cond = TCG_COND_NE,
1007 .reg = cpu_cc_src, .mask = -1 };
06847f1f 1008
bc4b43dc
RH
1009 case CC_OP_BMILGB ... CC_OP_BMILGQ:
1010 size = s->cc_op - CC_OP_BMILGB;
1011 t0 = gen_ext_tl(reg, cpu_cc_src, size, false);
1012 return (CCPrepare) { .cond = TCG_COND_EQ, .reg = t0, .mask = -1 };
1013
cd7f97ca
RH
1014 case CC_OP_ADCX:
1015 case CC_OP_ADCOX:
1016 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_dst,
1017 .mask = -1, .no_setcond = true };
1018
06847f1f
RH
1019 case CC_OP_EFLAGS:
1020 case CC_OP_SARB ... CC_OP_SARQ:
1021 /* CC_SRC & 1 */
bec93d72
RH
1022 return (CCPrepare) { .cond = TCG_COND_NE,
1023 .reg = cpu_cc_src, .mask = CC_C };
06847f1f
RH
1024
1025 default:
1026 /* The need to compute only C from CC_OP_DYNAMIC is important
1027 in efficiently implementing e.g. INC at the start of a TB. */
1028 gen_update_cc_op(s);
988c3eb0
RH
1029 gen_helper_cc_compute_c(reg, cpu_cc_dst, cpu_cc_src,
1030 cpu_cc_src2, cpu_cc_op);
bec93d72
RH
1031 return (CCPrepare) { .cond = TCG_COND_NE, .reg = reg,
1032 .mask = -1, .no_setcond = true };
06847f1f
RH
1033 }
1034}
1035
1608ecca 1036/* compute eflags.P to reg */
bec93d72 1037static CCPrepare gen_prepare_eflags_p(DisasContext *s, TCGv reg)
1608ecca 1038{
d229edce 1039 gen_compute_eflags(s);
bec93d72
RH
1040 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1041 .mask = CC_P };
1608ecca
PB
1042}
1043
1044/* compute eflags.S to reg */
bec93d72 1045static CCPrepare gen_prepare_eflags_s(DisasContext *s, TCGv reg)
1608ecca 1046{
086c4077
RH
1047 switch (s->cc_op) {
1048 case CC_OP_DYNAMIC:
1049 gen_compute_eflags(s);
1050 /* FALLTHRU */
1051 case CC_OP_EFLAGS:
cd7f97ca
RH
1052 case CC_OP_ADCX:
1053 case CC_OP_ADOX:
1054 case CC_OP_ADCOX:
bec93d72
RH
1055 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1056 .mask = CC_S };
436ff2d2
RH
1057 case CC_OP_CLR:
1058 return (CCPrepare) { .cond = TCG_COND_NEVER, .mask = -1 };
086c4077
RH
1059 default:
1060 {
1061 int size = (s->cc_op - CC_OP_ADDB) & 3;
1062 TCGv t0 = gen_ext_tl(reg, cpu_cc_dst, size, true);
bec93d72 1063 return (CCPrepare) { .cond = TCG_COND_LT, .reg = t0, .mask = -1 };
086c4077 1064 }
086c4077 1065 }
1608ecca
PB
1066}
1067
1068/* compute eflags.O to reg */
bec93d72 1069static CCPrepare gen_prepare_eflags_o(DisasContext *s, TCGv reg)
1608ecca 1070{
cd7f97ca
RH
1071 switch (s->cc_op) {
1072 case CC_OP_ADOX:
1073 case CC_OP_ADCOX:
1074 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src2,
1075 .mask = -1, .no_setcond = true };
436ff2d2
RH
1076 case CC_OP_CLR:
1077 return (CCPrepare) { .cond = TCG_COND_NEVER, .mask = -1 };
cd7f97ca
RH
1078 default:
1079 gen_compute_eflags(s);
1080 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1081 .mask = CC_O };
1082 }
1608ecca
PB
1083}
1084
1085/* compute eflags.Z to reg */
bec93d72 1086static CCPrepare gen_prepare_eflags_z(DisasContext *s, TCGv reg)
1608ecca 1087{
086c4077
RH
1088 switch (s->cc_op) {
1089 case CC_OP_DYNAMIC:
1090 gen_compute_eflags(s);
1091 /* FALLTHRU */
1092 case CC_OP_EFLAGS:
cd7f97ca
RH
1093 case CC_OP_ADCX:
1094 case CC_OP_ADOX:
1095 case CC_OP_ADCOX:
bec93d72
RH
1096 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1097 .mask = CC_Z };
436ff2d2
RH
1098 case CC_OP_CLR:
1099 return (CCPrepare) { .cond = TCG_COND_ALWAYS, .mask = -1 };
086c4077
RH
1100 default:
1101 {
1102 int size = (s->cc_op - CC_OP_ADDB) & 3;
1103 TCGv t0 = gen_ext_tl(reg, cpu_cc_dst, size, false);
bec93d72 1104 return (CCPrepare) { .cond = TCG_COND_EQ, .reg = t0, .mask = -1 };
086c4077 1105 }
bec93d72
RH
1106 }
1107}
1108
c365395e
PB
1109/* perform a conditional store into register 'reg' according to jump opcode
1110 value 'b'. In the fast case, T0 is guaranted not to be used. */
276e6b5f 1111static CCPrepare gen_prepare_cc(DisasContext *s, int b, TCGv reg)
8e1c85e3 1112{
c365395e 1113 int inv, jcc_op, size, cond;
276e6b5f 1114 CCPrepare cc;
c365395e
PB
1115 TCGv t0;
1116
1117 inv = b & 1;
8e1c85e3 1118 jcc_op = (b >> 1) & 7;
c365395e
PB
1119
1120 switch (s->cc_op) {
69d1aa31
RH
1121 case CC_OP_SUBB ... CC_OP_SUBQ:
1122 /* We optimize relational operators for the cmp/jcc case. */
c365395e
PB
1123 size = s->cc_op - CC_OP_SUBB;
1124 switch (jcc_op) {
1125 case JCC_BE:
a3251186 1126 tcg_gen_mov_tl(cpu_tmp4, cpu_cc_srcT);
c365395e
PB
1127 gen_extu(size, cpu_tmp4);
1128 t0 = gen_ext_tl(cpu_tmp0, cpu_cc_src, size, false);
276e6b5f
RH
1129 cc = (CCPrepare) { .cond = TCG_COND_LEU, .reg = cpu_tmp4,
1130 .reg2 = t0, .mask = -1, .use_reg2 = true };
c365395e 1131 break;
8e1c85e3 1132
c365395e 1133 case JCC_L:
276e6b5f 1134 cond = TCG_COND_LT;
c365395e
PB
1135 goto fast_jcc_l;
1136 case JCC_LE:
276e6b5f 1137 cond = TCG_COND_LE;
c365395e 1138 fast_jcc_l:
a3251186 1139 tcg_gen_mov_tl(cpu_tmp4, cpu_cc_srcT);
c365395e
PB
1140 gen_exts(size, cpu_tmp4);
1141 t0 = gen_ext_tl(cpu_tmp0, cpu_cc_src, size, true);
276e6b5f
RH
1142 cc = (CCPrepare) { .cond = cond, .reg = cpu_tmp4,
1143 .reg2 = t0, .mask = -1, .use_reg2 = true };
c365395e 1144 break;
8e1c85e3 1145
c365395e 1146 default:
8e1c85e3 1147 goto slow_jcc;
c365395e 1148 }
8e1c85e3 1149 break;
c365395e 1150
8e1c85e3
FB
1151 default:
1152 slow_jcc:
69d1aa31
RH
1153 /* This actually generates good code for JC, JZ and JS. */
1154 switch (jcc_op) {
1155 case JCC_O:
1156 cc = gen_prepare_eflags_o(s, reg);
1157 break;
1158 case JCC_B:
1159 cc = gen_prepare_eflags_c(s, reg);
1160 break;
1161 case JCC_Z:
1162 cc = gen_prepare_eflags_z(s, reg);
1163 break;
1164 case JCC_BE:
1165 gen_compute_eflags(s);
1166 cc = (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1167 .mask = CC_Z | CC_C };
1168 break;
1169 case JCC_S:
1170 cc = gen_prepare_eflags_s(s, reg);
1171 break;
1172 case JCC_P:
1173 cc = gen_prepare_eflags_p(s, reg);
1174 break;
1175 case JCC_L:
1176 gen_compute_eflags(s);
1177 if (TCGV_EQUAL(reg, cpu_cc_src)) {
1178 reg = cpu_tmp0;
1179 }
1180 tcg_gen_shri_tl(reg, cpu_cc_src, 4); /* CC_O -> CC_S */
1181 tcg_gen_xor_tl(reg, reg, cpu_cc_src);
1182 cc = (CCPrepare) { .cond = TCG_COND_NE, .reg = reg,
1183 .mask = CC_S };
1184 break;
1185 default:
1186 case JCC_LE:
1187 gen_compute_eflags(s);
1188 if (TCGV_EQUAL(reg, cpu_cc_src)) {
1189 reg = cpu_tmp0;
1190 }
1191 tcg_gen_shri_tl(reg, cpu_cc_src, 4); /* CC_O -> CC_S */
1192 tcg_gen_xor_tl(reg, reg, cpu_cc_src);
1193 cc = (CCPrepare) { .cond = TCG_COND_NE, .reg = reg,
1194 .mask = CC_S | CC_Z };
1195 break;
1196 }
c365395e 1197 break;
8e1c85e3 1198 }
276e6b5f
RH
1199
1200 if (inv) {
1201 cc.cond = tcg_invert_cond(cc.cond);
1202 }
1203 return cc;
8e1c85e3
FB
1204}
1205
cc8b6f5b
PB
1206static void gen_setcc1(DisasContext *s, int b, TCGv reg)
1207{
1208 CCPrepare cc = gen_prepare_cc(s, b, reg);
1209
1210 if (cc.no_setcond) {
1211 if (cc.cond == TCG_COND_EQ) {
1212 tcg_gen_xori_tl(reg, cc.reg, 1);
1213 } else {
1214 tcg_gen_mov_tl(reg, cc.reg);
1215 }
1216 return;
1217 }
1218
1219 if (cc.cond == TCG_COND_NE && !cc.use_reg2 && cc.imm == 0 &&
1220 cc.mask != 0 && (cc.mask & (cc.mask - 1)) == 0) {
1221 tcg_gen_shri_tl(reg, cc.reg, ctztl(cc.mask));
1222 tcg_gen_andi_tl(reg, reg, 1);
1223 return;
1224 }
1225 if (cc.mask != -1) {
1226 tcg_gen_andi_tl(reg, cc.reg, cc.mask);
1227 cc.reg = reg;
1228 }
1229 if (cc.use_reg2) {
1230 tcg_gen_setcond_tl(cc.cond, reg, cc.reg, cc.reg2);
1231 } else {
1232 tcg_gen_setcondi_tl(cc.cond, reg, cc.reg, cc.imm);
1233 }
1234}
1235
1236static inline void gen_compute_eflags_c(DisasContext *s, TCGv reg)
1237{
1238 gen_setcc1(s, JCC_B << 1, reg);
1239}
276e6b5f 1240
8e1c85e3
FB
1241/* generate a conditional jump to label 'l1' according to jump opcode
1242 value 'b'. In the fast case, T0 is guaranted not to be used. */
dc259201
RH
1243static inline void gen_jcc1_noeob(DisasContext *s, int b, int l1)
1244{
1245 CCPrepare cc = gen_prepare_cc(s, b, cpu_T[0]);
1246
1247 if (cc.mask != -1) {
1248 tcg_gen_andi_tl(cpu_T[0], cc.reg, cc.mask);
1249 cc.reg = cpu_T[0];
1250 }
1251 if (cc.use_reg2) {
1252 tcg_gen_brcond_tl(cc.cond, cc.reg, cc.reg2, l1);
1253 } else {
1254 tcg_gen_brcondi_tl(cc.cond, cc.reg, cc.imm, l1);
1255 }
1256}
1257
1258/* Generate a conditional jump to label 'l1' according to jump opcode
1259 value 'b'. In the fast case, T0 is guaranted not to be used.
1260 A translation block must end soon. */
b27fc131 1261static inline void gen_jcc1(DisasContext *s, int b, int l1)
8e1c85e3 1262{
943131ca 1263 CCPrepare cc = gen_prepare_cc(s, b, cpu_T[0]);
8e1c85e3 1264
dc259201 1265 gen_update_cc_op(s);
943131ca
PB
1266 if (cc.mask != -1) {
1267 tcg_gen_andi_tl(cpu_T[0], cc.reg, cc.mask);
1268 cc.reg = cpu_T[0];
1269 }
dc259201 1270 set_cc_op(s, CC_OP_DYNAMIC);
943131ca
PB
1271 if (cc.use_reg2) {
1272 tcg_gen_brcond_tl(cc.cond, cc.reg, cc.reg2, l1);
1273 } else {
1274 tcg_gen_brcondi_tl(cc.cond, cc.reg, cc.imm, l1);
8e1c85e3
FB
1275 }
1276}
1277
14ce26e7
FB
1278/* XXX: does not work with gdbstub "ice" single step - not a
1279 serious problem */
1280static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
2c0262af 1281{
14ce26e7
FB
1282 int l1, l2;
1283
1284 l1 = gen_new_label();
1285 l2 = gen_new_label();
6e0d8677 1286 gen_op_jnz_ecx(s->aflag, l1);
14ce26e7
FB
1287 gen_set_label(l2);
1288 gen_jmp_tb(s, next_eip, 1);
1289 gen_set_label(l1);
1290 return l2;
2c0262af
FB
1291}
1292
1293static inline void gen_stos(DisasContext *s, int ot)
1294{
57fec1fe 1295 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
2c0262af 1296 gen_string_movl_A0_EDI(s);
57fec1fe 1297 gen_op_st_T0_A0(ot + s->mem_index);
6e0d8677
FB
1298 gen_op_movl_T0_Dshift(ot);
1299 gen_op_add_reg_T0(s->aflag, R_EDI);
2c0262af
FB
1300}
1301
1302static inline void gen_lods(DisasContext *s, int ot)
1303{
1304 gen_string_movl_A0_ESI(s);
57fec1fe
FB
1305 gen_op_ld_T0_A0(ot + s->mem_index);
1306 gen_op_mov_reg_T0(ot, R_EAX);
6e0d8677
FB
1307 gen_op_movl_T0_Dshift(ot);
1308 gen_op_add_reg_T0(s->aflag, R_ESI);
2c0262af
FB
1309}
1310
1311static inline void gen_scas(DisasContext *s, int ot)
1312{
2c0262af 1313 gen_string_movl_A0_EDI(s);
57fec1fe 1314 gen_op_ld_T1_A0(ot + s->mem_index);
63633fe6 1315 gen_op(s, OP_CMPL, ot, R_EAX);
6e0d8677
FB
1316 gen_op_movl_T0_Dshift(ot);
1317 gen_op_add_reg_T0(s->aflag, R_EDI);
2c0262af
FB
1318}
1319
1320static inline void gen_cmps(DisasContext *s, int ot)
1321{
2c0262af 1322 gen_string_movl_A0_EDI(s);
57fec1fe 1323 gen_op_ld_T1_A0(ot + s->mem_index);
63633fe6
RH
1324 gen_string_movl_A0_ESI(s);
1325 gen_op(s, OP_CMPL, ot, OR_TMP0);
6e0d8677
FB
1326 gen_op_movl_T0_Dshift(ot);
1327 gen_op_add_reg_T0(s->aflag, R_ESI);
1328 gen_op_add_reg_T0(s->aflag, R_EDI);
2c0262af
FB
1329}
1330
1331static inline void gen_ins(DisasContext *s, int ot)
1332{
2e70f6ef
PB
1333 if (use_icount)
1334 gen_io_start();
2c0262af 1335 gen_string_movl_A0_EDI(s);
6e0d8677
FB
1336 /* Note: we must do this dummy write first to be restartable in
1337 case of page fault. */
9772c73b 1338 gen_op_movl_T0_0();
57fec1fe 1339 gen_op_st_T0_A0(ot + s->mem_index);
b8b6a50b 1340 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
b6abf97d
FB
1341 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1342 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
a7812ae4 1343 gen_helper_in_func(ot, cpu_T[0], cpu_tmp2_i32);
57fec1fe 1344 gen_op_st_T0_A0(ot + s->mem_index);
6e0d8677
FB
1345 gen_op_movl_T0_Dshift(ot);
1346 gen_op_add_reg_T0(s->aflag, R_EDI);
2e70f6ef
PB
1347 if (use_icount)
1348 gen_io_end();
2c0262af
FB
1349}
1350
1351static inline void gen_outs(DisasContext *s, int ot)
1352{
2e70f6ef
PB
1353 if (use_icount)
1354 gen_io_start();
2c0262af 1355 gen_string_movl_A0_ESI(s);
57fec1fe 1356 gen_op_ld_T0_A0(ot + s->mem_index);
b8b6a50b
FB
1357
1358 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
b6abf97d
FB
1359 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1360 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1361 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
a7812ae4 1362 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
b8b6a50b 1363
6e0d8677
FB
1364 gen_op_movl_T0_Dshift(ot);
1365 gen_op_add_reg_T0(s->aflag, R_ESI);
2e70f6ef
PB
1366 if (use_icount)
1367 gen_io_end();
2c0262af
FB
1368}
1369
1370/* same method as Valgrind : we generate jumps to current or next
1371 instruction */
1372#define GEN_REPZ(op) \
1373static inline void gen_repz_ ## op(DisasContext *s, int ot, \
14ce26e7 1374 target_ulong cur_eip, target_ulong next_eip) \
2c0262af 1375{ \
14ce26e7 1376 int l2;\
2c0262af 1377 gen_update_cc_op(s); \
14ce26e7 1378 l2 = gen_jz_ecx_string(s, next_eip); \
2c0262af 1379 gen_ ## op(s, ot); \
6e0d8677 1380 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
2c0262af
FB
1381 /* a loop would cause two single step exceptions if ECX = 1 \
1382 before rep string_insn */ \
1383 if (!s->jmp_opt) \
6e0d8677 1384 gen_op_jz_ecx(s->aflag, l2); \
2c0262af
FB
1385 gen_jmp(s, cur_eip); \
1386}
1387
1388#define GEN_REPZ2(op) \
1389static inline void gen_repz_ ## op(DisasContext *s, int ot, \
14ce26e7
FB
1390 target_ulong cur_eip, \
1391 target_ulong next_eip, \
2c0262af
FB
1392 int nz) \
1393{ \
14ce26e7 1394 int l2;\
2c0262af 1395 gen_update_cc_op(s); \
14ce26e7 1396 l2 = gen_jz_ecx_string(s, next_eip); \
2c0262af 1397 gen_ ## op(s, ot); \
6e0d8677 1398 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
773cdfcc 1399 gen_update_cc_op(s); \
b27fc131 1400 gen_jcc1(s, (JCC_Z << 1) | (nz ^ 1), l2); \
2c0262af 1401 if (!s->jmp_opt) \
6e0d8677 1402 gen_op_jz_ecx(s->aflag, l2); \
2c0262af
FB
1403 gen_jmp(s, cur_eip); \
1404}
1405
1406GEN_REPZ(movs)
1407GEN_REPZ(stos)
1408GEN_REPZ(lods)
1409GEN_REPZ(ins)
1410GEN_REPZ(outs)
1411GEN_REPZ2(scas)
1412GEN_REPZ2(cmps)
1413
a7812ae4
PB
1414static void gen_helper_fp_arith_ST0_FT0(int op)
1415{
1416 switch (op) {
d3eb5eae
BS
1417 case 0:
1418 gen_helper_fadd_ST0_FT0(cpu_env);
1419 break;
1420 case 1:
1421 gen_helper_fmul_ST0_FT0(cpu_env);
1422 break;
1423 case 2:
1424 gen_helper_fcom_ST0_FT0(cpu_env);
1425 break;
1426 case 3:
1427 gen_helper_fcom_ST0_FT0(cpu_env);
1428 break;
1429 case 4:
1430 gen_helper_fsub_ST0_FT0(cpu_env);
1431 break;
1432 case 5:
1433 gen_helper_fsubr_ST0_FT0(cpu_env);
1434 break;
1435 case 6:
1436 gen_helper_fdiv_ST0_FT0(cpu_env);
1437 break;
1438 case 7:
1439 gen_helper_fdivr_ST0_FT0(cpu_env);
1440 break;
a7812ae4
PB
1441 }
1442}
2c0262af
FB
1443
1444/* NOTE the exception in "r" op ordering */
a7812ae4
PB
1445static void gen_helper_fp_arith_STN_ST0(int op, int opreg)
1446{
1447 TCGv_i32 tmp = tcg_const_i32(opreg);
1448 switch (op) {
d3eb5eae
BS
1449 case 0:
1450 gen_helper_fadd_STN_ST0(cpu_env, tmp);
1451 break;
1452 case 1:
1453 gen_helper_fmul_STN_ST0(cpu_env, tmp);
1454 break;
1455 case 4:
1456 gen_helper_fsubr_STN_ST0(cpu_env, tmp);
1457 break;
1458 case 5:
1459 gen_helper_fsub_STN_ST0(cpu_env, tmp);
1460 break;
1461 case 6:
1462 gen_helper_fdivr_STN_ST0(cpu_env, tmp);
1463 break;
1464 case 7:
1465 gen_helper_fdiv_STN_ST0(cpu_env, tmp);
1466 break;
a7812ae4
PB
1467 }
1468}
2c0262af
FB
1469
1470/* if d == OR_TMP0, it means memory operand (address in A0) */
1471static void gen_op(DisasContext *s1, int op, int ot, int d)
1472{
2c0262af 1473 if (d != OR_TMP0) {
57fec1fe 1474 gen_op_mov_TN_reg(ot, 0, d);
2c0262af 1475 } else {
57fec1fe 1476 gen_op_ld_T0_A0(ot + s1->mem_index);
2c0262af
FB
1477 }
1478 switch(op) {
1479 case OP_ADCL:
cc8b6f5b 1480 gen_compute_eflags_c(s1, cpu_tmp4);
cad3a37d
FB
1481 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1482 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1483 if (d != OR_TMP0)
1484 gen_op_mov_reg_T0(ot, d);
1485 else
1486 gen_op_st_T0_A0(ot + s1->mem_index);
988c3eb0
RH
1487 gen_op_update3_cc(cpu_tmp4);
1488 set_cc_op(s1, CC_OP_ADCB + ot);
cad3a37d 1489 break;
2c0262af 1490 case OP_SBBL:
cc8b6f5b 1491 gen_compute_eflags_c(s1, cpu_tmp4);
cad3a37d
FB
1492 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1493 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1494 if (d != OR_TMP0)
57fec1fe 1495 gen_op_mov_reg_T0(ot, d);
cad3a37d
FB
1496 else
1497 gen_op_st_T0_A0(ot + s1->mem_index);
988c3eb0
RH
1498 gen_op_update3_cc(cpu_tmp4);
1499 set_cc_op(s1, CC_OP_SBBB + ot);
cad3a37d 1500 break;
2c0262af
FB
1501 case OP_ADDL:
1502 gen_op_addl_T0_T1();
cad3a37d
FB
1503 if (d != OR_TMP0)
1504 gen_op_mov_reg_T0(ot, d);
1505 else
1506 gen_op_st_T0_A0(ot + s1->mem_index);
1507 gen_op_update2_cc();
3ca51d07 1508 set_cc_op(s1, CC_OP_ADDB + ot);
2c0262af
FB
1509 break;
1510 case OP_SUBL:
a3251186 1511 tcg_gen_mov_tl(cpu_cc_srcT, cpu_T[0]);
57fec1fe 1512 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
cad3a37d
FB
1513 if (d != OR_TMP0)
1514 gen_op_mov_reg_T0(ot, d);
1515 else
1516 gen_op_st_T0_A0(ot + s1->mem_index);
1517 gen_op_update2_cc();
3ca51d07 1518 set_cc_op(s1, CC_OP_SUBB + ot);
2c0262af
FB
1519 break;
1520 default:
1521 case OP_ANDL:
57fec1fe 1522 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
cad3a37d
FB
1523 if (d != OR_TMP0)
1524 gen_op_mov_reg_T0(ot, d);
1525 else
1526 gen_op_st_T0_A0(ot + s1->mem_index);
1527 gen_op_update1_cc();
3ca51d07 1528 set_cc_op(s1, CC_OP_LOGICB + ot);
57fec1fe 1529 break;
2c0262af 1530 case OP_ORL:
57fec1fe 1531 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
cad3a37d
FB
1532 if (d != OR_TMP0)
1533 gen_op_mov_reg_T0(ot, d);
1534 else
1535 gen_op_st_T0_A0(ot + s1->mem_index);
1536 gen_op_update1_cc();
3ca51d07 1537 set_cc_op(s1, CC_OP_LOGICB + ot);
57fec1fe 1538 break;
2c0262af 1539 case OP_XORL:
57fec1fe 1540 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
cad3a37d
FB
1541 if (d != OR_TMP0)
1542 gen_op_mov_reg_T0(ot, d);
1543 else
1544 gen_op_st_T0_A0(ot + s1->mem_index);
1545 gen_op_update1_cc();
3ca51d07 1546 set_cc_op(s1, CC_OP_LOGICB + ot);
2c0262af
FB
1547 break;
1548 case OP_CMPL:
63633fe6 1549 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
a3251186 1550 tcg_gen_mov_tl(cpu_cc_srcT, cpu_T[0]);
63633fe6 1551 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
3ca51d07 1552 set_cc_op(s1, CC_OP_SUBB + ot);
2c0262af
FB
1553 break;
1554 }
b6abf97d
FB
1555}
1556
2c0262af
FB
1557/* if d == OR_TMP0, it means memory operand (address in A0) */
1558static void gen_inc(DisasContext *s1, int ot, int d, int c)
1559{
1560 if (d != OR_TMP0)
57fec1fe 1561 gen_op_mov_TN_reg(ot, 0, d);
2c0262af 1562 else
57fec1fe 1563 gen_op_ld_T0_A0(ot + s1->mem_index);
cc8b6f5b 1564 gen_compute_eflags_c(s1, cpu_cc_src);
2c0262af 1565 if (c > 0) {
b6abf97d 1566 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
3ca51d07 1567 set_cc_op(s1, CC_OP_INCB + ot);
2c0262af 1568 } else {
b6abf97d 1569 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
3ca51d07 1570 set_cc_op(s1, CC_OP_DECB + ot);
2c0262af
FB
1571 }
1572 if (d != OR_TMP0)
57fec1fe 1573 gen_op_mov_reg_T0(ot, d);
2c0262af 1574 else
57fec1fe 1575 gen_op_st_T0_A0(ot + s1->mem_index);
cd31fefa 1576 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
2c0262af
FB
1577}
1578
f437d0a3
RH
1579static void gen_shift_flags(DisasContext *s, int ot, TCGv result, TCGv shm1,
1580 TCGv count, bool is_right)
1581{
1582 TCGv_i32 z32, s32, oldop;
1583 TCGv z_tl;
1584
1585 /* Store the results into the CC variables. If we know that the
1586 variable must be dead, store unconditionally. Otherwise we'll
1587 need to not disrupt the current contents. */
1588 z_tl = tcg_const_tl(0);
1589 if (cc_op_live[s->cc_op] & USES_CC_DST) {
1590 tcg_gen_movcond_tl(TCG_COND_NE, cpu_cc_dst, count, z_tl,
1591 result, cpu_cc_dst);
1592 } else {
1593 tcg_gen_mov_tl(cpu_cc_dst, result);
1594 }
1595 if (cc_op_live[s->cc_op] & USES_CC_SRC) {
1596 tcg_gen_movcond_tl(TCG_COND_NE, cpu_cc_src, count, z_tl,
1597 shm1, cpu_cc_src);
1598 } else {
1599 tcg_gen_mov_tl(cpu_cc_src, shm1);
1600 }
1601 tcg_temp_free(z_tl);
1602
1603 /* Get the two potential CC_OP values into temporaries. */
1604 tcg_gen_movi_i32(cpu_tmp2_i32, (is_right ? CC_OP_SARB : CC_OP_SHLB) + ot);
1605 if (s->cc_op == CC_OP_DYNAMIC) {
1606 oldop = cpu_cc_op;
1607 } else {
1608 tcg_gen_movi_i32(cpu_tmp3_i32, s->cc_op);
1609 oldop = cpu_tmp3_i32;
1610 }
1611
1612 /* Conditionally store the CC_OP value. */
1613 z32 = tcg_const_i32(0);
1614 s32 = tcg_temp_new_i32();
1615 tcg_gen_trunc_tl_i32(s32, count);
1616 tcg_gen_movcond_i32(TCG_COND_NE, cpu_cc_op, s32, z32, cpu_tmp2_i32, oldop);
1617 tcg_temp_free_i32(z32);
1618 tcg_temp_free_i32(s32);
1619
1620 /* The CC_OP value is no longer predictable. */
1621 set_cc_op(s, CC_OP_DYNAMIC);
1622}
1623
b6abf97d
FB
1624static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1625 int is_right, int is_arith)
2c0262af 1626{
a41f62f5 1627 target_ulong mask = (ot == OT_QUAD ? 0x3f : 0x1f);
3b46e624 1628
b6abf97d 1629 /* load */
82786041 1630 if (op1 == OR_TMP0) {
b6abf97d 1631 gen_op_ld_T0_A0(ot + s->mem_index);
82786041 1632 } else {
b6abf97d 1633 gen_op_mov_TN_reg(ot, 0, op1);
82786041 1634 }
b6abf97d 1635
a41f62f5
RH
1636 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1637 tcg_gen_subi_tl(cpu_tmp0, cpu_T[1], 1);
b6abf97d
FB
1638
1639 if (is_right) {
1640 if (is_arith) {
f484d386 1641 gen_exts(ot, cpu_T[0]);
a41f62f5
RH
1642 tcg_gen_sar_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
1643 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
b6abf97d 1644 } else {
cad3a37d 1645 gen_extu(ot, cpu_T[0]);
a41f62f5
RH
1646 tcg_gen_shr_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
1647 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
b6abf97d
FB
1648 }
1649 } else {
a41f62f5
RH
1650 tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
1651 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
b6abf97d
FB
1652 }
1653
1654 /* store */
82786041 1655 if (op1 == OR_TMP0) {
b6abf97d 1656 gen_op_st_T0_A0(ot + s->mem_index);
82786041 1657 } else {
b6abf97d 1658 gen_op_mov_reg_T0(ot, op1);
82786041
RH
1659 }
1660
f437d0a3 1661 gen_shift_flags(s, ot, cpu_T[0], cpu_tmp0, cpu_T[1], is_right);
b6abf97d
FB
1662}
1663
c1c37968
FB
1664static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1665 int is_right, int is_arith)
1666{
a41f62f5 1667 int mask = (ot == OT_QUAD ? 0x3f : 0x1f);
c1c37968
FB
1668
1669 /* load */
1670 if (op1 == OR_TMP0)
1671 gen_op_ld_T0_A0(ot + s->mem_index);
1672 else
1673 gen_op_mov_TN_reg(ot, 0, op1);
1674
1675 op2 &= mask;
1676 if (op2 != 0) {
1677 if (is_right) {
1678 if (is_arith) {
1679 gen_exts(ot, cpu_T[0]);
2a449d14 1680 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
c1c37968
FB
1681 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1682 } else {
1683 gen_extu(ot, cpu_T[0]);
2a449d14 1684 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
c1c37968
FB
1685 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1686 }
1687 } else {
2a449d14 1688 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
c1c37968
FB
1689 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1690 }
1691 }
1692
1693 /* store */
1694 if (op1 == OR_TMP0)
1695 gen_op_st_T0_A0(ot + s->mem_index);
1696 else
1697 gen_op_mov_reg_T0(ot, op1);
1698
1699 /* update eflags if non zero shift */
1700 if (op2 != 0) {
2a449d14 1701 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
c1c37968 1702 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3ca51d07 1703 set_cc_op(s, (is_right ? CC_OP_SARB : CC_OP_SHLB) + ot);
c1c37968
FB
1704 }
1705}
1706
b6abf97d
FB
1707static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1708{
1709 if (arg2 >= 0)
1710 tcg_gen_shli_tl(ret, arg1, arg2);
1711 else
1712 tcg_gen_shri_tl(ret, arg1, -arg2);
1713}
1714
34d80a55 1715static void gen_rot_rm_T1(DisasContext *s, int ot, int op1, int is_right)
b6abf97d 1716{
34d80a55
RH
1717 target_ulong mask = (ot == OT_QUAD ? 0x3f : 0x1f);
1718 TCGv_i32 t0, t1;
b6abf97d
FB
1719
1720 /* load */
1e4840bf 1721 if (op1 == OR_TMP0) {
34d80a55 1722 gen_op_ld_T0_A0(ot + s->mem_index);
1e4840bf 1723 } else {
34d80a55 1724 gen_op_mov_TN_reg(ot, 0, op1);
1e4840bf 1725 }
b6abf97d 1726
34d80a55 1727 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
b6abf97d 1728
34d80a55
RH
1729 switch (ot) {
1730 case OT_BYTE:
1731 /* Replicate the 8-bit input so that a 32-bit rotate works. */
1732 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
1733 tcg_gen_muli_tl(cpu_T[0], cpu_T[0], 0x01010101);
1734 goto do_long;
1735 case OT_WORD:
1736 /* Replicate the 16-bit input so that a 32-bit rotate works. */
1737 tcg_gen_deposit_tl(cpu_T[0], cpu_T[0], cpu_T[0], 16, 16);
1738 goto do_long;
1739 do_long:
1740#ifdef TARGET_X86_64
1741 case OT_LONG:
1742 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1743 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
1744 if (is_right) {
1745 tcg_gen_rotr_i32(cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
1746 } else {
1747 tcg_gen_rotl_i32(cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
1748 }
1749 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
1750 break;
1751#endif
1752 default:
1753 if (is_right) {
1754 tcg_gen_rotr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1755 } else {
1756 tcg_gen_rotl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1757 }
1758 break;
b6abf97d 1759 }
b6abf97d 1760
b6abf97d 1761 /* store */
1e4840bf 1762 if (op1 == OR_TMP0) {
34d80a55 1763 gen_op_st_T0_A0(ot + s->mem_index);
1e4840bf 1764 } else {
34d80a55 1765 gen_op_mov_reg_T0(ot, op1);
1e4840bf 1766 }
b6abf97d 1767
34d80a55
RH
1768 /* We'll need the flags computed into CC_SRC. */
1769 gen_compute_eflags(s);
b6abf97d 1770
34d80a55
RH
1771 /* The value that was "rotated out" is now present at the other end
1772 of the word. Compute C into CC_DST and O into CC_SRC2. Note that
1773 since we've computed the flags into CC_SRC, these variables are
1774 currently dead. */
b6abf97d 1775 if (is_right) {
34d80a55
RH
1776 tcg_gen_shri_tl(cpu_cc_src2, cpu_T[0], mask - 1);
1777 tcg_gen_shri_tl(cpu_cc_dst, cpu_T[0], mask);
1778 } else {
1779 tcg_gen_shri_tl(cpu_cc_src2, cpu_T[0], mask);
1780 tcg_gen_andi_tl(cpu_cc_dst, cpu_T[0], 1);
b6abf97d 1781 }
34d80a55
RH
1782 tcg_gen_andi_tl(cpu_cc_src2, cpu_cc_src2, 1);
1783 tcg_gen_xor_tl(cpu_cc_src2, cpu_cc_src2, cpu_cc_dst);
1784
1785 /* Now conditionally store the new CC_OP value. If the shift count
1786 is 0 we keep the CC_OP_EFLAGS setting so that only CC_SRC is live.
1787 Otherwise reuse CC_OP_ADCOX which have the C and O flags split out
1788 exactly as we computed above. */
1789 t0 = tcg_const_i32(0);
1790 t1 = tcg_temp_new_i32();
1791 tcg_gen_trunc_tl_i32(t1, cpu_T[1]);
1792 tcg_gen_movi_i32(cpu_tmp2_i32, CC_OP_ADCOX);
1793 tcg_gen_movi_i32(cpu_tmp3_i32, CC_OP_EFLAGS);
1794 tcg_gen_movcond_i32(TCG_COND_NE, cpu_cc_op, t1, t0,
1795 cpu_tmp2_i32, cpu_tmp3_i32);
1796 tcg_temp_free_i32(t0);
1797 tcg_temp_free_i32(t1);
1798
1799 /* The CC_OP value is no longer predictable. */
1800 set_cc_op(s, CC_OP_DYNAMIC);
b6abf97d
FB
1801}
1802
8cd6345d 1803static void gen_rot_rm_im(DisasContext *s, int ot, int op1, int op2,
1804 int is_right)
1805{
34d80a55
RH
1806 int mask = (ot == OT_QUAD ? 0x3f : 0x1f);
1807 int shift;
8cd6345d 1808
1809 /* load */
1810 if (op1 == OR_TMP0) {
34d80a55 1811 gen_op_ld_T0_A0(ot + s->mem_index);
8cd6345d 1812 } else {
34d80a55 1813 gen_op_mov_TN_reg(ot, 0, op1);
8cd6345d 1814 }
1815
8cd6345d 1816 op2 &= mask;
8cd6345d 1817 if (op2 != 0) {
34d80a55
RH
1818 switch (ot) {
1819#ifdef TARGET_X86_64
1820 case OT_LONG:
1821 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1822 if (is_right) {
1823 tcg_gen_rotri_i32(cpu_tmp2_i32, cpu_tmp2_i32, op2);
1824 } else {
1825 tcg_gen_rotli_i32(cpu_tmp2_i32, cpu_tmp2_i32, op2);
1826 }
1827 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
1828 break;
1829#endif
1830 default:
1831 if (is_right) {
1832 tcg_gen_rotri_tl(cpu_T[0], cpu_T[0], op2);
1833 } else {
1834 tcg_gen_rotli_tl(cpu_T[0], cpu_T[0], op2);
1835 }
1836 break;
1837 case OT_BYTE:
1838 mask = 7;
1839 goto do_shifts;
1840 case OT_WORD:
1841 mask = 15;
1842 do_shifts:
1843 shift = op2 & mask;
1844 if (is_right) {
1845 shift = mask + 1 - shift;
1846 }
1847 gen_extu(ot, cpu_T[0]);
1848 tcg_gen_shli_tl(cpu_tmp0, cpu_T[0], shift);
1849 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], mask + 1 - shift);
1850 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1851 break;
8cd6345d 1852 }
8cd6345d 1853 }
1854
1855 /* store */
1856 if (op1 == OR_TMP0) {
34d80a55 1857 gen_op_st_T0_A0(ot + s->mem_index);
8cd6345d 1858 } else {
34d80a55 1859 gen_op_mov_reg_T0(ot, op1);
8cd6345d 1860 }
1861
1862 if (op2 != 0) {
34d80a55 1863 /* Compute the flags into CC_SRC. */
d229edce 1864 gen_compute_eflags(s);
0ff6addd 1865
34d80a55
RH
1866 /* The value that was "rotated out" is now present at the other end
1867 of the word. Compute C into CC_DST and O into CC_SRC2. Note that
1868 since we've computed the flags into CC_SRC, these variables are
1869 currently dead. */
8cd6345d 1870 if (is_right) {
34d80a55
RH
1871 tcg_gen_shri_tl(cpu_cc_src2, cpu_T[0], mask - 1);
1872 tcg_gen_shri_tl(cpu_cc_dst, cpu_T[0], mask);
1873 } else {
1874 tcg_gen_shri_tl(cpu_cc_src2, cpu_T[0], mask);
1875 tcg_gen_andi_tl(cpu_cc_dst, cpu_T[0], 1);
8cd6345d 1876 }
34d80a55
RH
1877 tcg_gen_andi_tl(cpu_cc_src2, cpu_cc_src2, 1);
1878 tcg_gen_xor_tl(cpu_cc_src2, cpu_cc_src2, cpu_cc_dst);
1879 set_cc_op(s, CC_OP_ADCOX);
8cd6345d 1880 }
8cd6345d 1881}
1882
b6abf97d
FB
1883/* XXX: add faster immediate = 1 case */
1884static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1885 int is_right)
1886{
d229edce 1887 gen_compute_eflags(s);
c7b3c873 1888 assert(s->cc_op == CC_OP_EFLAGS);
b6abf97d
FB
1889
1890 /* load */
1891 if (op1 == OR_TMP0)
1892 gen_op_ld_T0_A0(ot + s->mem_index);
1893 else
1894 gen_op_mov_TN_reg(ot, 0, op1);
1895
a7812ae4
PB
1896 if (is_right) {
1897 switch (ot) {
93ab25d7 1898 case OT_BYTE:
7923057b
BS
1899 gen_helper_rcrb(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1900 break;
93ab25d7 1901 case OT_WORD:
7923057b
BS
1902 gen_helper_rcrw(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1903 break;
93ab25d7 1904 case OT_LONG:
7923057b
BS
1905 gen_helper_rcrl(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1906 break;
a7812ae4 1907#ifdef TARGET_X86_64
93ab25d7 1908 case OT_QUAD:
7923057b
BS
1909 gen_helper_rcrq(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1910 break;
a7812ae4
PB
1911#endif
1912 }
1913 } else {
1914 switch (ot) {
93ab25d7 1915 case OT_BYTE:
7923057b
BS
1916 gen_helper_rclb(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1917 break;
93ab25d7 1918 case OT_WORD:
7923057b
BS
1919 gen_helper_rclw(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1920 break;
93ab25d7 1921 case OT_LONG:
7923057b
BS
1922 gen_helper_rcll(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1923 break;
a7812ae4 1924#ifdef TARGET_X86_64
93ab25d7 1925 case OT_QUAD:
7923057b
BS
1926 gen_helper_rclq(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1927 break;
a7812ae4
PB
1928#endif
1929 }
1930 }
b6abf97d
FB
1931 /* store */
1932 if (op1 == OR_TMP0)
1933 gen_op_st_T0_A0(ot + s->mem_index);
1934 else
1935 gen_op_mov_reg_T0(ot, op1);
b6abf97d
FB
1936}
1937
1938/* XXX: add faster immediate case */
3b9d3cf1 1939static void gen_shiftd_rm_T1(DisasContext *s, int ot, int op1,
f437d0a3 1940 bool is_right, TCGv count_in)
b6abf97d 1941{
f437d0a3
RH
1942 target_ulong mask = (ot == OT_QUAD ? 63 : 31);
1943 TCGv count;
b6abf97d
FB
1944
1945 /* load */
1e4840bf 1946 if (op1 == OR_TMP0) {
f437d0a3 1947 gen_op_ld_T0_A0(ot + s->mem_index);
1e4840bf 1948 } else {
f437d0a3 1949 gen_op_mov_TN_reg(ot, 0, op1);
1e4840bf 1950 }
b6abf97d 1951
f437d0a3
RH
1952 count = tcg_temp_new();
1953 tcg_gen_andi_tl(count, count_in, mask);
1e4840bf 1954
f437d0a3
RH
1955 switch (ot) {
1956 case OT_WORD:
1957 /* Note: we implement the Intel behaviour for shift count > 16.
1958 This means "shrdw C, B, A" shifts A:B:A >> C. Build the B:A
1959 portion by constructing it as a 32-bit value. */
b6abf97d 1960 if (is_right) {
f437d0a3
RH
1961 tcg_gen_deposit_tl(cpu_tmp0, cpu_T[0], cpu_T[1], 16, 16);
1962 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
1963 tcg_gen_mov_tl(cpu_T[0], cpu_tmp0);
b6abf97d 1964 } else {
f437d0a3 1965 tcg_gen_deposit_tl(cpu_T[1], cpu_T[0], cpu_T[1], 16, 16);
b6abf97d 1966 }
f437d0a3
RH
1967 /* FALLTHRU */
1968#ifdef TARGET_X86_64
1969 case OT_LONG:
1970 /* Concatenate the two 32-bit values and use a 64-bit shift. */
1971 tcg_gen_subi_tl(cpu_tmp0, count, 1);
b6abf97d 1972 if (is_right) {
f437d0a3
RH
1973 tcg_gen_concat_tl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1974 tcg_gen_shr_i64(cpu_tmp0, cpu_T[0], cpu_tmp0);
1975 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], count);
1976 } else {
1977 tcg_gen_concat_tl_i64(cpu_T[0], cpu_T[1], cpu_T[0]);
1978 tcg_gen_shl_i64(cpu_tmp0, cpu_T[0], cpu_tmp0);
1979 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], count);
1980 tcg_gen_shri_i64(cpu_tmp0, cpu_tmp0, 32);
1981 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], 32);
1982 }
1983 break;
1984#endif
1985 default:
1986 tcg_gen_subi_tl(cpu_tmp0, count, 1);
1987 if (is_right) {
1988 tcg_gen_shr_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
b6abf97d 1989
f437d0a3
RH
1990 tcg_gen_subfi_tl(cpu_tmp4, mask + 1, count);
1991 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], count);
1992 tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp4);
b6abf97d 1993 } else {
f437d0a3
RH
1994 tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
1995 if (ot == OT_WORD) {
1996 /* Only needed if count > 16, for Intel behaviour. */
1997 tcg_gen_subfi_tl(cpu_tmp4, 33, count);
1998 tcg_gen_shr_tl(cpu_tmp4, cpu_T[1], cpu_tmp4);
1999 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, cpu_tmp4);
2000 }
2001
2002 tcg_gen_subfi_tl(cpu_tmp4, mask + 1, count);
2003 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], count);
2004 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp4);
b6abf97d 2005 }
f437d0a3
RH
2006 tcg_gen_movi_tl(cpu_tmp4, 0);
2007 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_T[1], count, cpu_tmp4,
2008 cpu_tmp4, cpu_T[1]);
2009 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2010 break;
b6abf97d 2011 }
b6abf97d 2012
b6abf97d 2013 /* store */
1e4840bf 2014 if (op1 == OR_TMP0) {
f437d0a3 2015 gen_op_st_T0_A0(ot + s->mem_index);
b6abf97d 2016 } else {
f437d0a3 2017 gen_op_mov_reg_T0(ot, op1);
b6abf97d 2018 }
1e4840bf 2019
f437d0a3
RH
2020 gen_shift_flags(s, ot, cpu_T[0], cpu_tmp0, count, is_right);
2021 tcg_temp_free(count);
b6abf97d
FB
2022}
2023
2024static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2025{
2026 if (s != OR_TMP1)
2027 gen_op_mov_TN_reg(ot, 1, s);
2028 switch(op) {
2029 case OP_ROL:
2030 gen_rot_rm_T1(s1, ot, d, 0);
2031 break;
2032 case OP_ROR:
2033 gen_rot_rm_T1(s1, ot, d, 1);
2034 break;
2035 case OP_SHL:
2036 case OP_SHL1:
2037 gen_shift_rm_T1(s1, ot, d, 0, 0);
2038 break;
2039 case OP_SHR:
2040 gen_shift_rm_T1(s1, ot, d, 1, 0);
2041 break;
2042 case OP_SAR:
2043 gen_shift_rm_T1(s1, ot, d, 1, 1);
2044 break;
2045 case OP_RCL:
2046 gen_rotc_rm_T1(s1, ot, d, 0);
2047 break;
2048 case OP_RCR:
2049 gen_rotc_rm_T1(s1, ot, d, 1);
2050 break;
2051 }
2c0262af
FB
2052}
2053
2054static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2055{
c1c37968 2056 switch(op) {
8cd6345d 2057 case OP_ROL:
2058 gen_rot_rm_im(s1, ot, d, c, 0);
2059 break;
2060 case OP_ROR:
2061 gen_rot_rm_im(s1, ot, d, c, 1);
2062 break;
c1c37968
FB
2063 case OP_SHL:
2064 case OP_SHL1:
2065 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2066 break;
2067 case OP_SHR:
2068 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2069 break;
2070 case OP_SAR:
2071 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2072 break;
2073 default:
2074 /* currently not optimized */
2075 gen_op_movl_T1_im(c);
2076 gen_shift(s1, op, ot, d, OR_TMP1);
2077 break;
2078 }
2c0262af
FB
2079}
2080
0af10c86
BS
2081static void gen_lea_modrm(CPUX86State *env, DisasContext *s, int modrm,
2082 int *reg_ptr, int *offset_ptr)
2c0262af 2083{
14ce26e7 2084 target_long disp;
2c0262af 2085 int havesib;
14ce26e7 2086 int base;
2c0262af
FB
2087 int index;
2088 int scale;
2089 int opreg;
2090 int mod, rm, code, override, must_add_seg;
2091
2092 override = s->override;
2093 must_add_seg = s->addseg;
2094 if (override >= 0)
2095 must_add_seg = 1;
2096 mod = (modrm >> 6) & 3;
2097 rm = modrm & 7;
2098
2099 if (s->aflag) {
2100
2101 havesib = 0;
2102 base = rm;
2103 index = 0;
2104 scale = 0;
3b46e624 2105
2c0262af
FB
2106 if (base == 4) {
2107 havesib = 1;
0af10c86 2108 code = cpu_ldub_code(env, s->pc++);
2c0262af 2109 scale = (code >> 6) & 3;
14ce26e7
FB
2110 index = ((code >> 3) & 7) | REX_X(s);
2111 base = (code & 7);
2c0262af 2112 }
14ce26e7 2113 base |= REX_B(s);
2c0262af
FB
2114
2115 switch (mod) {
2116 case 0:
14ce26e7 2117 if ((base & 7) == 5) {
2c0262af 2118 base = -1;
0af10c86 2119 disp = (int32_t)cpu_ldl_code(env, s->pc);
2c0262af 2120 s->pc += 4;
14ce26e7
FB
2121 if (CODE64(s) && !havesib) {
2122 disp += s->pc + s->rip_offset;
2123 }
2c0262af
FB
2124 } else {
2125 disp = 0;
2126 }
2127 break;
2128 case 1:
0af10c86 2129 disp = (int8_t)cpu_ldub_code(env, s->pc++);
2c0262af
FB
2130 break;
2131 default:
2132 case 2:
0af10c86 2133 disp = (int32_t)cpu_ldl_code(env, s->pc);
2c0262af
FB
2134 s->pc += 4;
2135 break;
2136 }
3b46e624 2137
2c0262af
FB
2138 if (base >= 0) {
2139 /* for correct popl handling with esp */
2140 if (base == 4 && s->popl_esp_hack)
2141 disp += s->popl_esp_hack;
14ce26e7
FB
2142#ifdef TARGET_X86_64
2143 if (s->aflag == 2) {
57fec1fe 2144 gen_op_movq_A0_reg(base);
14ce26e7 2145 if (disp != 0) {
57fec1fe 2146 gen_op_addq_A0_im(disp);
14ce26e7 2147 }
5fafdf24 2148 } else
14ce26e7
FB
2149#endif
2150 {
57fec1fe 2151 gen_op_movl_A0_reg(base);
14ce26e7
FB
2152 if (disp != 0)
2153 gen_op_addl_A0_im(disp);
2154 }
2c0262af 2155 } else {
14ce26e7
FB
2156#ifdef TARGET_X86_64
2157 if (s->aflag == 2) {
57fec1fe 2158 gen_op_movq_A0_im(disp);
5fafdf24 2159 } else
14ce26e7
FB
2160#endif
2161 {
2162 gen_op_movl_A0_im(disp);
2163 }
2c0262af 2164 }
b16f827b
AJ
2165 /* index == 4 means no index */
2166 if (havesib && (index != 4)) {
14ce26e7
FB
2167#ifdef TARGET_X86_64
2168 if (s->aflag == 2) {
57fec1fe 2169 gen_op_addq_A0_reg_sN(scale, index);
5fafdf24 2170 } else
14ce26e7
FB
2171#endif
2172 {
57fec1fe 2173 gen_op_addl_A0_reg_sN(scale, index);
14ce26e7 2174 }
2c0262af
FB
2175 }
2176 if (must_add_seg) {
2177 if (override < 0) {
2178 if (base == R_EBP || base == R_ESP)
2179 override = R_SS;
2180 else
2181 override = R_DS;
2182 }
14ce26e7
FB
2183#ifdef TARGET_X86_64
2184 if (s->aflag == 2) {
57fec1fe 2185 gen_op_addq_A0_seg(override);
5fafdf24 2186 } else
14ce26e7
FB
2187#endif
2188 {
7162ab21 2189 gen_op_addl_A0_seg(s, override);
14ce26e7 2190 }
2c0262af
FB
2191 }
2192 } else {
2193 switch (mod) {
2194 case 0:
2195 if (rm == 6) {
0af10c86 2196 disp = cpu_lduw_code(env, s->pc);
2c0262af
FB
2197 s->pc += 2;
2198 gen_op_movl_A0_im(disp);
2199 rm = 0; /* avoid SS override */
2200 goto no_rm;
2201 } else {
2202 disp = 0;
2203 }
2204 break;
2205 case 1:
0af10c86 2206 disp = (int8_t)cpu_ldub_code(env, s->pc++);
2c0262af
FB
2207 break;
2208 default:
2209 case 2:
0af10c86 2210 disp = cpu_lduw_code(env, s->pc);
2c0262af
FB
2211 s->pc += 2;
2212 break;
2213 }
2214 switch(rm) {
2215 case 0:
57fec1fe
FB
2216 gen_op_movl_A0_reg(R_EBX);
2217 gen_op_addl_A0_reg_sN(0, R_ESI);
2c0262af
FB
2218 break;
2219 case 1:
57fec1fe
FB
2220 gen_op_movl_A0_reg(R_EBX);
2221 gen_op_addl_A0_reg_sN(0, R_EDI);
2c0262af
FB
2222 break;
2223 case 2:
57fec1fe
FB
2224 gen_op_movl_A0_reg(R_EBP);
2225 gen_op_addl_A0_reg_sN(0, R_ESI);
2c0262af
FB
2226 break;
2227 case 3:
57fec1fe
FB
2228 gen_op_movl_A0_reg(R_EBP);
2229 gen_op_addl_A0_reg_sN(0, R_EDI);
2c0262af
FB
2230 break;
2231 case 4:
57fec1fe 2232 gen_op_movl_A0_reg(R_ESI);
2c0262af
FB
2233 break;
2234 case 5:
57fec1fe 2235 gen_op_movl_A0_reg(R_EDI);
2c0262af
FB
2236 break;
2237 case 6:
57fec1fe 2238 gen_op_movl_A0_reg(R_EBP);
2c0262af
FB
2239 break;
2240 default:
2241 case 7:
57fec1fe 2242 gen_op_movl_A0_reg(R_EBX);
2c0262af
FB
2243 break;
2244 }
2245 if (disp != 0)
2246 gen_op_addl_A0_im(disp);
2247 gen_op_andl_A0_ffff();
2248 no_rm:
2249 if (must_add_seg) {
2250 if (override < 0) {
2251 if (rm == 2 || rm == 3 || rm == 6)
2252 override = R_SS;
2253 else
2254 override = R_DS;
2255 }
7162ab21 2256 gen_op_addl_A0_seg(s, override);
2c0262af
FB
2257 }
2258 }
2259
2260 opreg = OR_A0;
2261 disp = 0;
2262 *reg_ptr = opreg;
2263 *offset_ptr = disp;
2264}
2265
0af10c86 2266static void gen_nop_modrm(CPUX86State *env, DisasContext *s, int modrm)
e17a36ce
FB
2267{
2268 int mod, rm, base, code;
2269
2270 mod = (modrm >> 6) & 3;
2271 if (mod == 3)
2272 return;
2273 rm = modrm & 7;
2274
2275 if (s->aflag) {
2276
2277 base = rm;
3b46e624 2278
e17a36ce 2279 if (base == 4) {
0af10c86 2280 code = cpu_ldub_code(env, s->pc++);
e17a36ce
FB
2281 base = (code & 7);
2282 }
3b46e624 2283
e17a36ce
FB
2284 switch (mod) {
2285 case 0:
2286 if (base == 5) {
2287 s->pc += 4;
2288 }
2289 break;
2290 case 1:
2291 s->pc++;
2292 break;
2293 default:
2294 case 2:
2295 s->pc += 4;
2296 break;
2297 }
2298 } else {
2299 switch (mod) {
2300 case 0:
2301 if (rm == 6) {
2302 s->pc += 2;
2303 }
2304 break;
2305 case 1:
2306 s->pc++;
2307 break;
2308 default:
2309 case 2:
2310 s->pc += 2;
2311 break;
2312 }
2313 }
2314}
2315
664e0f19
FB
2316/* used for LEA and MOV AX, mem */
2317static void gen_add_A0_ds_seg(DisasContext *s)
2318{
2319 int override, must_add_seg;
2320 must_add_seg = s->addseg;
2321 override = R_DS;
2322 if (s->override >= 0) {
2323 override = s->override;
2324 must_add_seg = 1;
664e0f19
FB
2325 }
2326 if (must_add_seg) {
8f091a59
FB
2327#ifdef TARGET_X86_64
2328 if (CODE64(s)) {
57fec1fe 2329 gen_op_addq_A0_seg(override);
5fafdf24 2330 } else
8f091a59
FB
2331#endif
2332 {
7162ab21 2333 gen_op_addl_A0_seg(s, override);
8f091a59 2334 }
664e0f19
FB
2335 }
2336}
2337
222a3336 2338/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2c0262af 2339 OR_TMP0 */
0af10c86
BS
2340static void gen_ldst_modrm(CPUX86State *env, DisasContext *s, int modrm,
2341 int ot, int reg, int is_store)
2c0262af
FB
2342{
2343 int mod, rm, opreg, disp;
2344
2345 mod = (modrm >> 6) & 3;
14ce26e7 2346 rm = (modrm & 7) | REX_B(s);
2c0262af
FB
2347 if (mod == 3) {
2348 if (is_store) {
2349 if (reg != OR_TMP0)
57fec1fe
FB
2350 gen_op_mov_TN_reg(ot, 0, reg);
2351 gen_op_mov_reg_T0(ot, rm);
2c0262af 2352 } else {
57fec1fe 2353 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af 2354 if (reg != OR_TMP0)
57fec1fe 2355 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
2356 }
2357 } else {
0af10c86 2358 gen_lea_modrm(env, s, modrm, &opreg, &disp);
2c0262af
FB
2359 if (is_store) {
2360 if (reg != OR_TMP0)
57fec1fe
FB
2361 gen_op_mov_TN_reg(ot, 0, reg);
2362 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 2363 } else {
57fec1fe 2364 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 2365 if (reg != OR_TMP0)
57fec1fe 2366 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
2367 }
2368 }
2369}
2370
0af10c86 2371static inline uint32_t insn_get(CPUX86State *env, DisasContext *s, int ot)
2c0262af
FB
2372{
2373 uint32_t ret;
2374
2375 switch(ot) {
2376 case OT_BYTE:
0af10c86 2377 ret = cpu_ldub_code(env, s->pc);
2c0262af
FB
2378 s->pc++;
2379 break;
2380 case OT_WORD:
0af10c86 2381 ret = cpu_lduw_code(env, s->pc);
2c0262af
FB
2382 s->pc += 2;
2383 break;
2384 default:
2385 case OT_LONG:
0af10c86 2386 ret = cpu_ldl_code(env, s->pc);
2c0262af
FB
2387 s->pc += 4;
2388 break;
2389 }
2390 return ret;
2391}
2392
14ce26e7
FB
2393static inline int insn_const_size(unsigned int ot)
2394{
2395 if (ot <= OT_LONG)
2396 return 1 << ot;
2397 else
2398 return 4;
2399}
2400
6e256c93
FB
2401static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2402{
2403 TranslationBlock *tb;
2404 target_ulong pc;
2405
2406 pc = s->cs_base + eip;
2407 tb = s->tb;
2408 /* NOTE: we handle the case where the TB spans two pages here */
2409 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2410 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2411 /* jump to same page: we can use a direct jump */
57fec1fe 2412 tcg_gen_goto_tb(tb_num);
6e256c93 2413 gen_jmp_im(eip);
4b4a72e5 2414 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
6e256c93
FB
2415 } else {
2416 /* jump to another page: currently not optimized */
2417 gen_jmp_im(eip);
2418 gen_eob(s);
2419 }
2420}
2421
5fafdf24 2422static inline void gen_jcc(DisasContext *s, int b,
14ce26e7 2423 target_ulong val, target_ulong next_eip)
2c0262af 2424{
b27fc131 2425 int l1, l2;
3b46e624 2426
2c0262af 2427 if (s->jmp_opt) {
14ce26e7 2428 l1 = gen_new_label();
b27fc131 2429 gen_jcc1(s, b, l1);
dc259201 2430
6e256c93 2431 gen_goto_tb(s, 0, next_eip);
14ce26e7
FB
2432
2433 gen_set_label(l1);
6e256c93 2434 gen_goto_tb(s, 1, val);
5779406a 2435 s->is_jmp = DISAS_TB_JUMP;
2c0262af 2436 } else {
14ce26e7
FB
2437 l1 = gen_new_label();
2438 l2 = gen_new_label();
b27fc131 2439 gen_jcc1(s, b, l1);
8e1c85e3 2440
14ce26e7 2441 gen_jmp_im(next_eip);
8e1c85e3
FB
2442 tcg_gen_br(l2);
2443
14ce26e7
FB
2444 gen_set_label(l1);
2445 gen_jmp_im(val);
2446 gen_set_label(l2);
2c0262af
FB
2447 gen_eob(s);
2448 }
2449}
2450
f32d3781
PB
2451static void gen_cmovcc1(CPUX86State *env, DisasContext *s, int ot, int b,
2452 int modrm, int reg)
2453{
57eb0cc8 2454 CCPrepare cc;
f32d3781 2455
57eb0cc8 2456 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
f32d3781 2457
57eb0cc8
RH
2458 cc = gen_prepare_cc(s, b, cpu_T[1]);
2459 if (cc.mask != -1) {
2460 TCGv t0 = tcg_temp_new();
2461 tcg_gen_andi_tl(t0, cc.reg, cc.mask);
2462 cc.reg = t0;
2463 }
2464 if (!cc.use_reg2) {
2465 cc.reg2 = tcg_const_tl(cc.imm);
f32d3781
PB
2466 }
2467
57eb0cc8
RH
2468 tcg_gen_movcond_tl(cc.cond, cpu_T[0], cc.reg, cc.reg2,
2469 cpu_T[0], cpu_regs[reg]);
2470 gen_op_mov_reg_T0(ot, reg);
2471
2472 if (cc.mask != -1) {
2473 tcg_temp_free(cc.reg);
2474 }
2475 if (!cc.use_reg2) {
2476 tcg_temp_free(cc.reg2);
2477 }
f32d3781
PB
2478}
2479
3bd7da9e
FB
2480static inline void gen_op_movl_T0_seg(int seg_reg)
2481{
2482 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2483 offsetof(CPUX86State,segs[seg_reg].selector));
2484}
2485
2486static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2487{
2488 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2489 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2490 offsetof(CPUX86State,segs[seg_reg].selector));
2491 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2492 tcg_gen_st_tl(cpu_T[0], cpu_env,
2493 offsetof(CPUX86State,segs[seg_reg].base));
2494}
2495
2c0262af
FB
2496/* move T0 to seg_reg and compute if the CPU state may change. Never
2497 call this function with seg_reg == R_CS */
14ce26e7 2498static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2c0262af 2499{
3415a4dd
FB
2500 if (s->pe && !s->vm86) {
2501 /* XXX: optimize by finding processor state dynamically */
773cdfcc 2502 gen_update_cc_op(s);
14ce26e7 2503 gen_jmp_im(cur_eip);
b6abf97d 2504 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2 2505 gen_helper_load_seg(cpu_env, tcg_const_i32(seg_reg), cpu_tmp2_i32);
dc196a57
FB
2506 /* abort translation because the addseg value may change or
2507 because ss32 may change. For R_SS, translation must always
2508 stop as a special handling must be done to disable hardware
2509 interrupts for the next instruction */
2510 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
5779406a 2511 s->is_jmp = DISAS_TB_JUMP;
3415a4dd 2512 } else {
3bd7da9e 2513 gen_op_movl_seg_T0_vm(seg_reg);
dc196a57 2514 if (seg_reg == R_SS)
5779406a 2515 s->is_jmp = DISAS_TB_JUMP;
3415a4dd 2516 }
2c0262af
FB
2517}
2518
0573fbfc
TS
2519static inline int svm_is_rep(int prefixes)
2520{
2521 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2522}
2523
872929aa 2524static inline void
0573fbfc 2525gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
b8b6a50b 2526 uint32_t type, uint64_t param)
0573fbfc 2527{
872929aa
FB
2528 /* no SVM activated; fast case */
2529 if (likely(!(s->flags & HF_SVMI_MASK)))
2530 return;
773cdfcc 2531 gen_update_cc_op(s);
872929aa 2532 gen_jmp_im(pc_start - s->cs_base);
052e80d5 2533 gen_helper_svm_check_intercept_param(cpu_env, tcg_const_i32(type),
a7812ae4 2534 tcg_const_i64(param));
0573fbfc
TS
2535}
2536
872929aa 2537static inline void
0573fbfc
TS
2538gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2539{
872929aa 2540 gen_svm_check_intercept_param(s, pc_start, type, 0);
0573fbfc
TS
2541}
2542
4f31916f
FB
2543static inline void gen_stack_update(DisasContext *s, int addend)
2544{
14ce26e7
FB
2545#ifdef TARGET_X86_64
2546 if (CODE64(s)) {
6e0d8677 2547 gen_op_add_reg_im(2, R_ESP, addend);
14ce26e7
FB
2548 } else
2549#endif
4f31916f 2550 if (s->ss32) {
6e0d8677 2551 gen_op_add_reg_im(1, R_ESP, addend);
4f31916f 2552 } else {
6e0d8677 2553 gen_op_add_reg_im(0, R_ESP, addend);
4f31916f
FB
2554 }
2555}
2556
2c0262af
FB
2557/* generate a push. It depends on ss32, addseg and dflag */
2558static void gen_push_T0(DisasContext *s)
2559{
14ce26e7
FB
2560#ifdef TARGET_X86_64
2561 if (CODE64(s)) {
57fec1fe 2562 gen_op_movq_A0_reg(R_ESP);
8f091a59 2563 if (s->dflag) {
57fec1fe
FB
2564 gen_op_addq_A0_im(-8);
2565 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
8f091a59 2566 } else {
57fec1fe
FB
2567 gen_op_addq_A0_im(-2);
2568 gen_op_st_T0_A0(OT_WORD + s->mem_index);
8f091a59 2569 }
57fec1fe 2570 gen_op_mov_reg_A0(2, R_ESP);
5fafdf24 2571 } else
14ce26e7
FB
2572#endif
2573 {
57fec1fe 2574 gen_op_movl_A0_reg(R_ESP);
14ce26e7 2575 if (!s->dflag)
57fec1fe 2576 gen_op_addl_A0_im(-2);
14ce26e7 2577 else
57fec1fe 2578 gen_op_addl_A0_im(-4);
14ce26e7
FB
2579 if (s->ss32) {
2580 if (s->addseg) {
bbf662ee 2581 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
7162ab21 2582 gen_op_addl_A0_seg(s, R_SS);
14ce26e7
FB
2583 }
2584 } else {
2585 gen_op_andl_A0_ffff();
bbf662ee 2586 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
7162ab21 2587 gen_op_addl_A0_seg(s, R_SS);
2c0262af 2588 }
57fec1fe 2589 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
14ce26e7 2590 if (s->ss32 && !s->addseg)
57fec1fe 2591 gen_op_mov_reg_A0(1, R_ESP);
14ce26e7 2592 else
57fec1fe 2593 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2c0262af
FB
2594 }
2595}
2596
4f31916f
FB
2597/* generate a push. It depends on ss32, addseg and dflag */
2598/* slower version for T1, only used for call Ev */
2599static void gen_push_T1(DisasContext *s)
2c0262af 2600{
14ce26e7
FB
2601#ifdef TARGET_X86_64
2602 if (CODE64(s)) {
57fec1fe 2603 gen_op_movq_A0_reg(R_ESP);
8f091a59 2604 if (s->dflag) {
57fec1fe
FB
2605 gen_op_addq_A0_im(-8);
2606 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
8f091a59 2607 } else {
57fec1fe
FB
2608 gen_op_addq_A0_im(-2);
2609 gen_op_st_T0_A0(OT_WORD + s->mem_index);
8f091a59 2610 }
57fec1fe 2611 gen_op_mov_reg_A0(2, R_ESP);
5fafdf24 2612 } else
14ce26e7
FB
2613#endif
2614 {
57fec1fe 2615 gen_op_movl_A0_reg(R_ESP);
14ce26e7 2616 if (!s->dflag)
57fec1fe 2617 gen_op_addl_A0_im(-2);
14ce26e7 2618 else
57fec1fe 2619 gen_op_addl_A0_im(-4);
14ce26e7
FB
2620 if (s->ss32) {
2621 if (s->addseg) {
7162ab21 2622 gen_op_addl_A0_seg(s, R_SS);
14ce26e7
FB
2623 }
2624 } else {
2625 gen_op_andl_A0_ffff();
7162ab21 2626 gen_op_addl_A0_seg(s, R_SS);
2c0262af 2627 }
57fec1fe 2628 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
3b46e624 2629
14ce26e7 2630 if (s->ss32 && !s->addseg)
57fec1fe 2631 gen_op_mov_reg_A0(1, R_ESP);
14ce26e7
FB
2632 else
2633 gen_stack_update(s, (-2) << s->dflag);
2c0262af
FB
2634 }
2635}
2636
4f31916f
FB
2637/* two step pop is necessary for precise exceptions */
2638static void gen_pop_T0(DisasContext *s)
2c0262af 2639{
14ce26e7
FB
2640#ifdef TARGET_X86_64
2641 if (CODE64(s)) {
57fec1fe
FB
2642 gen_op_movq_A0_reg(R_ESP);
2643 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
5fafdf24 2644 } else
14ce26e7
FB
2645#endif
2646 {
57fec1fe 2647 gen_op_movl_A0_reg(R_ESP);
14ce26e7
FB
2648 if (s->ss32) {
2649 if (s->addseg)
7162ab21 2650 gen_op_addl_A0_seg(s, R_SS);
14ce26e7
FB
2651 } else {
2652 gen_op_andl_A0_ffff();
7162ab21 2653 gen_op_addl_A0_seg(s, R_SS);
14ce26e7 2654 }
57fec1fe 2655 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2c0262af
FB
2656 }
2657}
2658
2659static void gen_pop_update(DisasContext *s)
2660{
14ce26e7 2661#ifdef TARGET_X86_64
8f091a59 2662 if (CODE64(s) && s->dflag) {
14ce26e7
FB
2663 gen_stack_update(s, 8);
2664 } else
2665#endif
2666 {
2667 gen_stack_update(s, 2 << s->dflag);
2668 }
2c0262af
FB
2669}
2670
2671static void gen_stack_A0(DisasContext *s)
2672{
57fec1fe 2673 gen_op_movl_A0_reg(R_ESP);
2c0262af
FB
2674 if (!s->ss32)
2675 gen_op_andl_A0_ffff();
bbf662ee 2676 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2c0262af 2677 if (s->addseg)
7162ab21 2678 gen_op_addl_A0_seg(s, R_SS);
2c0262af
FB
2679}
2680
2681/* NOTE: wrap around in 16 bit not fully handled */
2682static void gen_pusha(DisasContext *s)
2683{
2684 int i;
57fec1fe 2685 gen_op_movl_A0_reg(R_ESP);
2c0262af
FB
2686 gen_op_addl_A0_im(-16 << s->dflag);
2687 if (!s->ss32)
2688 gen_op_andl_A0_ffff();
bbf662ee 2689 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2c0262af 2690 if (s->addseg)
7162ab21 2691 gen_op_addl_A0_seg(s, R_SS);
2c0262af 2692 for(i = 0;i < 8; i++) {
57fec1fe
FB
2693 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2694 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2c0262af
FB
2695 gen_op_addl_A0_im(2 << s->dflag);
2696 }
57fec1fe 2697 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2c0262af
FB
2698}
2699
2700/* NOTE: wrap around in 16 bit not fully handled */
2701static void gen_popa(DisasContext *s)
2702{
2703 int i;
57fec1fe 2704 gen_op_movl_A0_reg(R_ESP);
2c0262af
FB
2705 if (!s->ss32)
2706 gen_op_andl_A0_ffff();
bbf662ee
FB
2707 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2708 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2c0262af 2709 if (s->addseg)
7162ab21 2710 gen_op_addl_A0_seg(s, R_SS);
2c0262af
FB
2711 for(i = 0;i < 8; i++) {
2712 /* ESP is not reloaded */
2713 if (i != 3) {
57fec1fe
FB
2714 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2715 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2c0262af
FB
2716 }
2717 gen_op_addl_A0_im(2 << s->dflag);
2718 }
57fec1fe 2719 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2c0262af
FB
2720}
2721
2c0262af
FB
2722static void gen_enter(DisasContext *s, int esp_addend, int level)
2723{
61a8c4ec 2724 int ot, opsize;
2c0262af 2725
2c0262af 2726 level &= 0x1f;
8f091a59
FB
2727#ifdef TARGET_X86_64
2728 if (CODE64(s)) {
2729 ot = s->dflag ? OT_QUAD : OT_WORD;
2730 opsize = 1 << ot;
3b46e624 2731
57fec1fe 2732 gen_op_movl_A0_reg(R_ESP);
8f091a59 2733 gen_op_addq_A0_im(-opsize);
bbf662ee 2734 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
8f091a59
FB
2735
2736 /* push bp */
57fec1fe
FB
2737 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2738 gen_op_st_T0_A0(ot + s->mem_index);
8f091a59 2739 if (level) {
b5b38f61 2740 /* XXX: must save state */
2999a0b2 2741 gen_helper_enter64_level(cpu_env, tcg_const_i32(level),
a7812ae4
PB
2742 tcg_const_i32((ot == OT_QUAD)),
2743 cpu_T[1]);
8f091a59 2744 }
57fec1fe 2745 gen_op_mov_reg_T1(ot, R_EBP);
bbf662ee 2746 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
57fec1fe 2747 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
5fafdf24 2748 } else
8f091a59
FB
2749#endif
2750 {
2751 ot = s->dflag + OT_WORD;
2752 opsize = 2 << s->dflag;
3b46e624 2753
57fec1fe 2754 gen_op_movl_A0_reg(R_ESP);
8f091a59
FB
2755 gen_op_addl_A0_im(-opsize);
2756 if (!s->ss32)
2757 gen_op_andl_A0_ffff();
bbf662ee 2758 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
8f091a59 2759 if (s->addseg)
7162ab21 2760 gen_op_addl_A0_seg(s, R_SS);
8f091a59 2761 /* push bp */
57fec1fe
FB
2762 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2763 gen_op_st_T0_A0(ot + s->mem_index);
8f091a59 2764 if (level) {
b5b38f61 2765 /* XXX: must save state */
2999a0b2 2766 gen_helper_enter_level(cpu_env, tcg_const_i32(level),
a7812ae4
PB
2767 tcg_const_i32(s->dflag),
2768 cpu_T[1]);
8f091a59 2769 }
57fec1fe 2770 gen_op_mov_reg_T1(ot, R_EBP);
bbf662ee 2771 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
57fec1fe 2772 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2c0262af 2773 }
2c0262af
FB
2774}
2775
14ce26e7 2776static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2c0262af 2777{
773cdfcc 2778 gen_update_cc_op(s);
14ce26e7 2779 gen_jmp_im(cur_eip);
77b2bc2c 2780 gen_helper_raise_exception(cpu_env, tcg_const_i32(trapno));
5779406a 2781 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
2782}
2783
2784/* an interrupt is different from an exception because of the
7f75ffd3 2785 privilege checks */
5fafdf24 2786static void gen_interrupt(DisasContext *s, int intno,
14ce26e7 2787 target_ulong cur_eip, target_ulong next_eip)
2c0262af 2788{
773cdfcc 2789 gen_update_cc_op(s);
14ce26e7 2790 gen_jmp_im(cur_eip);
77b2bc2c 2791 gen_helper_raise_interrupt(cpu_env, tcg_const_i32(intno),
a7812ae4 2792 tcg_const_i32(next_eip - cur_eip));
5779406a 2793 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
2794}
2795
14ce26e7 2796static void gen_debug(DisasContext *s, target_ulong cur_eip)
2c0262af 2797{
773cdfcc 2798 gen_update_cc_op(s);
14ce26e7 2799 gen_jmp_im(cur_eip);
4a7443be 2800 gen_helper_debug(cpu_env);
5779406a 2801 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
2802}
2803
2804/* generate a generic end of block. Trace exception is also generated
2805 if needed */
2806static void gen_eob(DisasContext *s)
2807{
773cdfcc 2808 gen_update_cc_op(s);
a2cc3b24 2809 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
f0967a1a 2810 gen_helper_reset_inhibit_irq(cpu_env);
a2cc3b24 2811 }
a2397807 2812 if (s->tb->flags & HF_RF_MASK) {
f0967a1a 2813 gen_helper_reset_rf(cpu_env);
a2397807 2814 }
34865134 2815 if (s->singlestep_enabled) {
4a7443be 2816 gen_helper_debug(cpu_env);
34865134 2817 } else if (s->tf) {
4a7443be 2818 gen_helper_single_step(cpu_env);
2c0262af 2819 } else {
57fec1fe 2820 tcg_gen_exit_tb(0);
2c0262af 2821 }
5779406a 2822 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
2823}
2824
2825/* generate a jump to eip. No segment change must happen before as a
2826 direct call to the next block may occur */
14ce26e7 2827static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2c0262af 2828{
a3251186
RH
2829 gen_update_cc_op(s);
2830 set_cc_op(s, CC_OP_DYNAMIC);
2c0262af 2831 if (s->jmp_opt) {
6e256c93 2832 gen_goto_tb(s, tb_num, eip);
5779406a 2833 s->is_jmp = DISAS_TB_JUMP;
2c0262af 2834 } else {
14ce26e7 2835 gen_jmp_im(eip);
2c0262af
FB
2836 gen_eob(s);
2837 }
2838}
2839
14ce26e7
FB
2840static void gen_jmp(DisasContext *s, target_ulong eip)
2841{
2842 gen_jmp_tb(s, eip, 0);
2843}
2844
8686c490
FB
2845static inline void gen_ldq_env_A0(int idx, int offset)
2846{
2847 int mem_index = (idx >> 2) - 1;
b6abf97d
FB
2848 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2849 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
8686c490 2850}
664e0f19 2851
8686c490
FB
2852static inline void gen_stq_env_A0(int idx, int offset)
2853{
2854 int mem_index = (idx >> 2) - 1;
b6abf97d
FB
2855 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2856 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
8686c490 2857}
664e0f19 2858
8686c490
FB
2859static inline void gen_ldo_env_A0(int idx, int offset)
2860{
2861 int mem_index = (idx >> 2) - 1;
b6abf97d
FB
2862 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2863 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
8686c490 2864 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
b6abf97d
FB
2865 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2866 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
8686c490 2867}
14ce26e7 2868
8686c490
FB
2869static inline void gen_sto_env_A0(int idx, int offset)
2870{
2871 int mem_index = (idx >> 2) - 1;
b6abf97d
FB
2872 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2873 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
8686c490 2874 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
b6abf97d
FB
2875 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2876 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
8686c490 2877}
14ce26e7 2878
5af45186
FB
2879static inline void gen_op_movo(int d_offset, int s_offset)
2880{
b6abf97d
FB
2881 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2882 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2883 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2884 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
5af45186
FB
2885}
2886
2887static inline void gen_op_movq(int d_offset, int s_offset)
2888{
b6abf97d
FB
2889 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2890 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
5af45186
FB
2891}
2892
2893static inline void gen_op_movl(int d_offset, int s_offset)
2894{
b6abf97d
FB
2895 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2896 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
5af45186
FB
2897}
2898
2899static inline void gen_op_movq_env_0(int d_offset)
2900{
b6abf97d
FB
2901 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2902 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
5af45186 2903}
664e0f19 2904
d3eb5eae
BS
2905typedef void (*SSEFunc_i_ep)(TCGv_i32 val, TCGv_ptr env, TCGv_ptr reg);
2906typedef void (*SSEFunc_l_ep)(TCGv_i64 val, TCGv_ptr env, TCGv_ptr reg);
2907typedef void (*SSEFunc_0_epi)(TCGv_ptr env, TCGv_ptr reg, TCGv_i32 val);
2908typedef void (*SSEFunc_0_epl)(TCGv_ptr env, TCGv_ptr reg, TCGv_i64 val);
2909typedef void (*SSEFunc_0_epp)(TCGv_ptr env, TCGv_ptr reg_a, TCGv_ptr reg_b);
2910typedef void (*SSEFunc_0_eppi)(TCGv_ptr env, TCGv_ptr reg_a, TCGv_ptr reg_b,
2911 TCGv_i32 val);
c4baa050 2912typedef void (*SSEFunc_0_ppi)(TCGv_ptr reg_a, TCGv_ptr reg_b, TCGv_i32 val);
d3eb5eae
BS
2913typedef void (*SSEFunc_0_eppt)(TCGv_ptr env, TCGv_ptr reg_a, TCGv_ptr reg_b,
2914 TCGv val);
c4baa050 2915
5af45186
FB
2916#define SSE_SPECIAL ((void *)1)
2917#define SSE_DUMMY ((void *)2)
664e0f19 2918
a7812ae4
PB
2919#define MMX_OP2(x) { gen_helper_ ## x ## _mmx, gen_helper_ ## x ## _xmm }
2920#define SSE_FOP(x) { gen_helper_ ## x ## ps, gen_helper_ ## x ## pd, \
2921 gen_helper_ ## x ## ss, gen_helper_ ## x ## sd, }
5af45186 2922
d3eb5eae 2923static const SSEFunc_0_epp sse_op_table1[256][4] = {
a35f3ec7
AJ
2924 /* 3DNow! extensions */
2925 [0x0e] = { SSE_DUMMY }, /* femms */
2926 [0x0f] = { SSE_DUMMY }, /* pf... */
664e0f19
FB
2927 /* pure SSE operations */
2928 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2929 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
465e9838 2930 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
664e0f19 2931 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
a7812ae4
PB
2932 [0x14] = { gen_helper_punpckldq_xmm, gen_helper_punpcklqdq_xmm },
2933 [0x15] = { gen_helper_punpckhdq_xmm, gen_helper_punpckhqdq_xmm },
664e0f19
FB
2934 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2935 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2936
2937 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2938 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2939 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
d9f4bb27 2940 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd, movntss, movntsd */
664e0f19
FB
2941 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2942 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
a7812ae4
PB
2943 [0x2e] = { gen_helper_ucomiss, gen_helper_ucomisd },
2944 [0x2f] = { gen_helper_comiss, gen_helper_comisd },
664e0f19
FB
2945 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2946 [0x51] = SSE_FOP(sqrt),
a7812ae4
PB
2947 [0x52] = { gen_helper_rsqrtps, NULL, gen_helper_rsqrtss, NULL },
2948 [0x53] = { gen_helper_rcpps, NULL, gen_helper_rcpss, NULL },
2949 [0x54] = { gen_helper_pand_xmm, gen_helper_pand_xmm }, /* andps, andpd */
2950 [0x55] = { gen_helper_pandn_xmm, gen_helper_pandn_xmm }, /* andnps, andnpd */
2951 [0x56] = { gen_helper_por_xmm, gen_helper_por_xmm }, /* orps, orpd */
2952 [0x57] = { gen_helper_pxor_xmm, gen_helper_pxor_xmm }, /* xorps, xorpd */
664e0f19
FB
2953 [0x58] = SSE_FOP(add),
2954 [0x59] = SSE_FOP(mul),
a7812ae4
PB
2955 [0x5a] = { gen_helper_cvtps2pd, gen_helper_cvtpd2ps,
2956 gen_helper_cvtss2sd, gen_helper_cvtsd2ss },
2957 [0x5b] = { gen_helper_cvtdq2ps, gen_helper_cvtps2dq, gen_helper_cvttps2dq },
664e0f19
FB
2958 [0x5c] = SSE_FOP(sub),
2959 [0x5d] = SSE_FOP(min),
2960 [0x5e] = SSE_FOP(div),
2961 [0x5f] = SSE_FOP(max),
2962
2963 [0xc2] = SSE_FOP(cmpeq),
d3eb5eae
BS
2964 [0xc6] = { (SSEFunc_0_epp)gen_helper_shufps,
2965 (SSEFunc_0_epp)gen_helper_shufpd }, /* XXX: casts */
664e0f19 2966
7073fbad
RH
2967 /* SSSE3, SSE4, MOVBE, CRC32, BMI1, BMI2, ADX. */
2968 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2969 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
4242b1bd 2970
664e0f19
FB
2971 /* MMX ops and their SSE extensions */
2972 [0x60] = MMX_OP2(punpcklbw),
2973 [0x61] = MMX_OP2(punpcklwd),
2974 [0x62] = MMX_OP2(punpckldq),
2975 [0x63] = MMX_OP2(packsswb),
2976 [0x64] = MMX_OP2(pcmpgtb),
2977 [0x65] = MMX_OP2(pcmpgtw),
2978 [0x66] = MMX_OP2(pcmpgtl),
2979 [0x67] = MMX_OP2(packuswb),
2980 [0x68] = MMX_OP2(punpckhbw),
2981 [0x69] = MMX_OP2(punpckhwd),
2982 [0x6a] = MMX_OP2(punpckhdq),
2983 [0x6b] = MMX_OP2(packssdw),
a7812ae4
PB
2984 [0x6c] = { NULL, gen_helper_punpcklqdq_xmm },
2985 [0x6d] = { NULL, gen_helper_punpckhqdq_xmm },
664e0f19
FB
2986 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2987 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
d3eb5eae
BS
2988 [0x70] = { (SSEFunc_0_epp)gen_helper_pshufw_mmx,
2989 (SSEFunc_0_epp)gen_helper_pshufd_xmm,
2990 (SSEFunc_0_epp)gen_helper_pshufhw_xmm,
2991 (SSEFunc_0_epp)gen_helper_pshuflw_xmm }, /* XXX: casts */
664e0f19
FB
2992 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2993 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2994 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2995 [0x74] = MMX_OP2(pcmpeqb),
2996 [0x75] = MMX_OP2(pcmpeqw),
2997 [0x76] = MMX_OP2(pcmpeql),
a35f3ec7 2998 [0x77] = { SSE_DUMMY }, /* emms */
d9f4bb27
AP
2999 [0x78] = { NULL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* extrq_i, insertq_i */
3000 [0x79] = { NULL, gen_helper_extrq_r, NULL, gen_helper_insertq_r },
a7812ae4
PB
3001 [0x7c] = { NULL, gen_helper_haddpd, NULL, gen_helper_haddps },
3002 [0x7d] = { NULL, gen_helper_hsubpd, NULL, gen_helper_hsubps },
664e0f19
FB
3003 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3004 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3005 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3006 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
a7812ae4 3007 [0xd0] = { NULL, gen_helper_addsubpd, NULL, gen_helper_addsubps },
664e0f19
FB
3008 [0xd1] = MMX_OP2(psrlw),
3009 [0xd2] = MMX_OP2(psrld),
3010 [0xd3] = MMX_OP2(psrlq),
3011 [0xd4] = MMX_OP2(paddq),
3012 [0xd5] = MMX_OP2(pmullw),
3013 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3014 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3015 [0xd8] = MMX_OP2(psubusb),
3016 [0xd9] = MMX_OP2(psubusw),
3017 [0xda] = MMX_OP2(pminub),
3018 [0xdb] = MMX_OP2(pand),
3019 [0xdc] = MMX_OP2(paddusb),
3020 [0xdd] = MMX_OP2(paddusw),
3021 [0xde] = MMX_OP2(pmaxub),
3022 [0xdf] = MMX_OP2(pandn),
3023 [0xe0] = MMX_OP2(pavgb),
3024 [0xe1] = MMX_OP2(psraw),
3025 [0xe2] = MMX_OP2(psrad),
3026 [0xe3] = MMX_OP2(pavgw),
3027 [0xe4] = MMX_OP2(pmulhuw),
3028 [0xe5] = MMX_OP2(pmulhw),
a7812ae4 3029 [0xe6] = { NULL, gen_helper_cvttpd2dq, gen_helper_cvtdq2pd, gen_helper_cvtpd2dq },
664e0f19
FB
3030 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3031 [0xe8] = MMX_OP2(psubsb),
3032 [0xe9] = MMX_OP2(psubsw),
3033 [0xea] = MMX_OP2(pminsw),
3034 [0xeb] = MMX_OP2(por),
3035 [0xec] = MMX_OP2(paddsb),
3036 [0xed] = MMX_OP2(paddsw),
3037 [0xee] = MMX_OP2(pmaxsw),
3038 [0xef] = MMX_OP2(pxor),
465e9838 3039 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
664e0f19
FB
3040 [0xf1] = MMX_OP2(psllw),
3041 [0xf2] = MMX_OP2(pslld),
3042 [0xf3] = MMX_OP2(psllq),
3043 [0xf4] = MMX_OP2(pmuludq),
3044 [0xf5] = MMX_OP2(pmaddwd),
3045 [0xf6] = MMX_OP2(psadbw),
d3eb5eae
BS
3046 [0xf7] = { (SSEFunc_0_epp)gen_helper_maskmov_mmx,
3047 (SSEFunc_0_epp)gen_helper_maskmov_xmm }, /* XXX: casts */
664e0f19
FB
3048 [0xf8] = MMX_OP2(psubb),
3049 [0xf9] = MMX_OP2(psubw),
3050 [0xfa] = MMX_OP2(psubl),
3051 [0xfb] = MMX_OP2(psubq),
3052 [0xfc] = MMX_OP2(paddb),
3053 [0xfd] = MMX_OP2(paddw),
3054 [0xfe] = MMX_OP2(paddl),
3055};
3056
d3eb5eae 3057static const SSEFunc_0_epp sse_op_table2[3 * 8][2] = {
664e0f19
FB
3058 [0 + 2] = MMX_OP2(psrlw),
3059 [0 + 4] = MMX_OP2(psraw),
3060 [0 + 6] = MMX_OP2(psllw),
3061 [8 + 2] = MMX_OP2(psrld),
3062 [8 + 4] = MMX_OP2(psrad),
3063 [8 + 6] = MMX_OP2(pslld),
3064 [16 + 2] = MMX_OP2(psrlq),
a7812ae4 3065 [16 + 3] = { NULL, gen_helper_psrldq_xmm },
664e0f19 3066 [16 + 6] = MMX_OP2(psllq),
a7812ae4 3067 [16 + 7] = { NULL, gen_helper_pslldq_xmm },
664e0f19
FB
3068};
3069
d3eb5eae 3070static const SSEFunc_0_epi sse_op_table3ai[] = {
a7812ae4 3071 gen_helper_cvtsi2ss,
11f8cdbc 3072 gen_helper_cvtsi2sd
c4baa050 3073};
a7812ae4 3074
11f8cdbc 3075#ifdef TARGET_X86_64
d3eb5eae 3076static const SSEFunc_0_epl sse_op_table3aq[] = {
11f8cdbc
SW
3077 gen_helper_cvtsq2ss,
3078 gen_helper_cvtsq2sd
3079};
3080#endif
3081
d3eb5eae 3082static const SSEFunc_i_ep sse_op_table3bi[] = {
a7812ae4 3083 gen_helper_cvttss2si,
a7812ae4 3084 gen_helper_cvtss2si,
bedc2ac1 3085 gen_helper_cvttsd2si,
11f8cdbc 3086 gen_helper_cvtsd2si
664e0f19 3087};
3b46e624 3088
11f8cdbc 3089#ifdef TARGET_X86_64
d3eb5eae 3090static const SSEFunc_l_ep sse_op_table3bq[] = {
11f8cdbc 3091 gen_helper_cvttss2sq,
11f8cdbc 3092 gen_helper_cvtss2sq,
bedc2ac1 3093 gen_helper_cvttsd2sq,
11f8cdbc
SW
3094 gen_helper_cvtsd2sq
3095};
3096#endif
3097
d3eb5eae 3098static const SSEFunc_0_epp sse_op_table4[8][4] = {
664e0f19
FB
3099 SSE_FOP(cmpeq),
3100 SSE_FOP(cmplt),
3101 SSE_FOP(cmple),
3102 SSE_FOP(cmpunord),
3103 SSE_FOP(cmpneq),
3104 SSE_FOP(cmpnlt),
3105 SSE_FOP(cmpnle),
3106 SSE_FOP(cmpord),
3107};
3b46e624 3108
d3eb5eae 3109static const SSEFunc_0_epp sse_op_table5[256] = {
a7812ae4
PB
3110 [0x0c] = gen_helper_pi2fw,
3111 [0x0d] = gen_helper_pi2fd,
3112 [0x1c] = gen_helper_pf2iw,
3113 [0x1d] = gen_helper_pf2id,
3114 [0x8a] = gen_helper_pfnacc,
3115 [0x8e] = gen_helper_pfpnacc,
3116 [0x90] = gen_helper_pfcmpge,
3117 [0x94] = gen_helper_pfmin,
3118 [0x96] = gen_helper_pfrcp,
3119 [0x97] = gen_helper_pfrsqrt,
3120 [0x9a] = gen_helper_pfsub,
3121 [0x9e] = gen_helper_pfadd,
3122 [0xa0] = gen_helper_pfcmpgt,
3123 [0xa4] = gen_helper_pfmax,
3124 [0xa6] = gen_helper_movq, /* pfrcpit1; no need to actually increase precision */
3125 [0xa7] = gen_helper_movq, /* pfrsqit1 */
3126 [0xaa] = gen_helper_pfsubr,
3127 [0xae] = gen_helper_pfacc,
3128 [0xb0] = gen_helper_pfcmpeq,
3129 [0xb4] = gen_helper_pfmul,
3130 [0xb6] = gen_helper_movq, /* pfrcpit2 */
3131 [0xb7] = gen_helper_pmulhrw_mmx,
3132 [0xbb] = gen_helper_pswapd,
3133 [0xbf] = gen_helper_pavgb_mmx /* pavgusb */
a35f3ec7
AJ
3134};
3135
d3eb5eae
BS
3136struct SSEOpHelper_epp {
3137 SSEFunc_0_epp op[2];
c4baa050
BS
3138 uint32_t ext_mask;
3139};
3140
d3eb5eae
BS
3141struct SSEOpHelper_eppi {
3142 SSEFunc_0_eppi op[2];
c4baa050 3143 uint32_t ext_mask;
222a3336 3144};
c4baa050 3145
222a3336 3146#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
a7812ae4
PB
3147#define SSE41_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3148#define SSE42_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
222a3336 3149#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
e71827bc
AJ
3150#define PCLMULQDQ_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, \
3151 CPUID_EXT_PCLMULQDQ }
d640045a 3152#define AESNI_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_AES }
c4baa050 3153
d3eb5eae 3154static const struct SSEOpHelper_epp sse_op_table6[256] = {
222a3336
AZ
3155 [0x00] = SSSE3_OP(pshufb),
3156 [0x01] = SSSE3_OP(phaddw),
3157 [0x02] = SSSE3_OP(phaddd),
3158 [0x03] = SSSE3_OP(phaddsw),
3159 [0x04] = SSSE3_OP(pmaddubsw),
3160 [0x05] = SSSE3_OP(phsubw),
3161 [0x06] = SSSE3_OP(phsubd),
3162 [0x07] = SSSE3_OP(phsubsw),
3163 [0x08] = SSSE3_OP(psignb),
3164 [0x09] = SSSE3_OP(psignw),
3165 [0x0a] = SSSE3_OP(psignd),
3166 [0x0b] = SSSE3_OP(pmulhrsw),
3167 [0x10] = SSE41_OP(pblendvb),
3168 [0x14] = SSE41_OP(blendvps),
3169 [0x15] = SSE41_OP(blendvpd),
3170 [0x17] = SSE41_OP(ptest),
3171 [0x1c] = SSSE3_OP(pabsb),
3172 [0x1d] = SSSE3_OP(pabsw),
3173 [0x1e] = SSSE3_OP(pabsd),
3174 [0x20] = SSE41_OP(pmovsxbw),
3175 [0x21] = SSE41_OP(pmovsxbd),
3176 [0x22] = SSE41_OP(pmovsxbq),
3177 [0x23] = SSE41_OP(pmovsxwd),
3178 [0x24] = SSE41_OP(pmovsxwq),
3179 [0x25] = SSE41_OP(pmovsxdq),
3180 [0x28] = SSE41_OP(pmuldq),
3181 [0x29] = SSE41_OP(pcmpeqq),
3182 [0x2a] = SSE41_SPECIAL, /* movntqda */
3183 [0x2b] = SSE41_OP(packusdw),
3184 [0x30] = SSE41_OP(pmovzxbw),
3185 [0x31] = SSE41_OP(pmovzxbd),
3186 [0x32] = SSE41_OP(pmovzxbq),
3187 [0x33] = SSE41_OP(pmovzxwd),
3188 [0x34] = SSE41_OP(pmovzxwq),
3189 [0x35] = SSE41_OP(pmovzxdq),
3190 [0x37] = SSE42_OP(pcmpgtq),
3191 [0x38] = SSE41_OP(pminsb),
3192 [0x39] = SSE41_OP(pminsd),
3193 [0x3a] = SSE41_OP(pminuw),
3194 [0x3b] = SSE41_OP(pminud),
3195 [0x3c] = SSE41_OP(pmaxsb),
3196 [0x3d] = SSE41_OP(pmaxsd),
3197 [0x3e] = SSE41_OP(pmaxuw),
3198 [0x3f] = SSE41_OP(pmaxud),
3199 [0x40] = SSE41_OP(pmulld),
3200 [0x41] = SSE41_OP(phminposuw),
d640045a
AJ
3201 [0xdb] = AESNI_OP(aesimc),
3202 [0xdc] = AESNI_OP(aesenc),
3203 [0xdd] = AESNI_OP(aesenclast),
3204 [0xde] = AESNI_OP(aesdec),
3205 [0xdf] = AESNI_OP(aesdeclast),
4242b1bd
AZ
3206};
3207
d3eb5eae 3208static const struct SSEOpHelper_eppi sse_op_table7[256] = {
222a3336
AZ
3209 [0x08] = SSE41_OP(roundps),
3210 [0x09] = SSE41_OP(roundpd),
3211 [0x0a] = SSE41_OP(roundss),
3212 [0x0b] = SSE41_OP(roundsd),
3213 [0x0c] = SSE41_OP(blendps),
3214 [0x0d] = SSE41_OP(blendpd),
3215 [0x0e] = SSE41_OP(pblendw),
3216 [0x0f] = SSSE3_OP(palignr),
3217 [0x14] = SSE41_SPECIAL, /* pextrb */
3218 [0x15] = SSE41_SPECIAL, /* pextrw */
3219 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3220 [0x17] = SSE41_SPECIAL, /* extractps */
3221 [0x20] = SSE41_SPECIAL, /* pinsrb */
3222 [0x21] = SSE41_SPECIAL, /* insertps */
3223 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3224 [0x40] = SSE41_OP(dpps),
3225 [0x41] = SSE41_OP(dppd),
3226 [0x42] = SSE41_OP(mpsadbw),
e71827bc 3227 [0x44] = PCLMULQDQ_OP(pclmulqdq),
222a3336
AZ
3228 [0x60] = SSE42_OP(pcmpestrm),
3229 [0x61] = SSE42_OP(pcmpestri),
3230 [0x62] = SSE42_OP(pcmpistrm),
3231 [0x63] = SSE42_OP(pcmpistri),
d640045a 3232 [0xdf] = AESNI_OP(aeskeygenassist),
4242b1bd
AZ
3233};
3234
0af10c86
BS
3235static void gen_sse(CPUX86State *env, DisasContext *s, int b,
3236 target_ulong pc_start, int rex_r)
664e0f19
FB
3237{
3238 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3239 int modrm, mod, rm, reg, reg_addr, offset_addr;
d3eb5eae
BS
3240 SSEFunc_0_epp sse_fn_epp;
3241 SSEFunc_0_eppi sse_fn_eppi;
c4baa050 3242 SSEFunc_0_ppi sse_fn_ppi;
d3eb5eae 3243 SSEFunc_0_eppt sse_fn_eppt;
664e0f19
FB
3244
3245 b &= 0xff;
5fafdf24 3246 if (s->prefix & PREFIX_DATA)
664e0f19 3247 b1 = 1;
5fafdf24 3248 else if (s->prefix & PREFIX_REPZ)
664e0f19 3249 b1 = 2;
5fafdf24 3250 else if (s->prefix & PREFIX_REPNZ)
664e0f19
FB
3251 b1 = 3;
3252 else
3253 b1 = 0;
d3eb5eae
BS
3254 sse_fn_epp = sse_op_table1[b][b1];
3255 if (!sse_fn_epp) {
664e0f19 3256 goto illegal_op;
c4baa050 3257 }
a35f3ec7 3258 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
664e0f19
FB
3259 is_xmm = 1;
3260 } else {
3261 if (b1 == 0) {
3262 /* MMX case */
3263 is_xmm = 0;
3264 } else {
3265 is_xmm = 1;
3266 }
3267 }
3268 /* simple MMX/SSE operation */
3269 if (s->flags & HF_TS_MASK) {
3270 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3271 return;
3272 }
3273 if (s->flags & HF_EM_MASK) {
3274 illegal_op:
3275 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3276 return;
3277 }
3278 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
4242b1bd
AZ
3279 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3280 goto illegal_op;
e771edab
AJ
3281 if (b == 0x0e) {
3282 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3283 goto illegal_op;
3284 /* femms */
d3eb5eae 3285 gen_helper_emms(cpu_env);
e771edab
AJ
3286 return;
3287 }
3288 if (b == 0x77) {
3289 /* emms */
d3eb5eae 3290 gen_helper_emms(cpu_env);
664e0f19
FB
3291 return;
3292 }
3293 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3294 the static cpu state) */
3295 if (!is_xmm) {
d3eb5eae 3296 gen_helper_enter_mmx(cpu_env);
664e0f19
FB
3297 }
3298
0af10c86 3299 modrm = cpu_ldub_code(env, s->pc++);
664e0f19
FB
3300 reg = ((modrm >> 3) & 7);
3301 if (is_xmm)
3302 reg |= rex_r;
3303 mod = (modrm >> 6) & 3;
d3eb5eae 3304 if (sse_fn_epp == SSE_SPECIAL) {
664e0f19
FB
3305 b |= (b1 << 8);
3306 switch(b) {
3307 case 0x0e7: /* movntq */
5fafdf24 3308 if (mod == 3)
664e0f19 3309 goto illegal_op;
0af10c86 3310 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3311 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
664e0f19
FB
3312 break;
3313 case 0x1e7: /* movntdq */
3314 case 0x02b: /* movntps */
3315 case 0x12b: /* movntps */
2e21e749
T
3316 if (mod == 3)
3317 goto illegal_op;
0af10c86 3318 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2e21e749
T
3319 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3320 break;
465e9838
FB
3321 case 0x3f0: /* lddqu */
3322 if (mod == 3)
664e0f19 3323 goto illegal_op;
0af10c86 3324 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
c2254920 3325 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
664e0f19 3326 break;
d9f4bb27
AP
3327 case 0x22b: /* movntss */
3328 case 0x32b: /* movntsd */
3329 if (mod == 3)
3330 goto illegal_op;
0af10c86 3331 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
d9f4bb27
AP
3332 if (b1 & 1) {
3333 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,
3334 xmm_regs[reg]));
3335 } else {
3336 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3337 xmm_regs[reg].XMM_L(0)));
3338 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3339 }
3340 break;
664e0f19 3341 case 0x6e: /* movd mm, ea */
dabd98dd
FB
3342#ifdef TARGET_X86_64
3343 if (s->dflag == 2) {
0af10c86 3344 gen_ldst_modrm(env, s, modrm, OT_QUAD, OR_TMP0, 0);
5af45186 3345 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
5fafdf24 3346 } else
dabd98dd
FB
3347#endif
3348 {
0af10c86 3349 gen_ldst_modrm(env, s, modrm, OT_LONG, OR_TMP0, 0);
5af45186
FB
3350 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3351 offsetof(CPUX86State,fpregs[reg].mmx));
a7812ae4
PB
3352 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3353 gen_helper_movl_mm_T0_mmx(cpu_ptr0, cpu_tmp2_i32);
dabd98dd 3354 }
664e0f19
FB
3355 break;
3356 case 0x16e: /* movd xmm, ea */
dabd98dd
FB
3357#ifdef TARGET_X86_64
3358 if (s->dflag == 2) {
0af10c86 3359 gen_ldst_modrm(env, s, modrm, OT_QUAD, OR_TMP0, 0);
5af45186
FB
3360 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3361 offsetof(CPUX86State,xmm_regs[reg]));
a7812ae4 3362 gen_helper_movq_mm_T0_xmm(cpu_ptr0, cpu_T[0]);
5fafdf24 3363 } else
dabd98dd
FB
3364#endif
3365 {
0af10c86 3366 gen_ldst_modrm(env, s, modrm, OT_LONG, OR_TMP0, 0);
5af45186
FB
3367 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3368 offsetof(CPUX86State,xmm_regs[reg]));
b6abf97d 3369 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
a7812ae4 3370 gen_helper_movl_mm_T0_xmm(cpu_ptr0, cpu_tmp2_i32);
dabd98dd 3371 }
664e0f19
FB
3372 break;
3373 case 0x6f: /* movq mm, ea */
3374 if (mod != 3) {
0af10c86 3375 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3376 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
664e0f19
FB
3377 } else {
3378 rm = (modrm & 7);
b6abf97d 3379 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
5af45186 3380 offsetof(CPUX86State,fpregs[rm].mmx));
b6abf97d 3381 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
5af45186 3382 offsetof(CPUX86State,fpregs[reg].mmx));
664e0f19
FB
3383 }
3384 break;
3385 case 0x010: /* movups */
3386 case 0x110: /* movupd */
3387 case 0x028: /* movaps */
3388 case 0x128: /* movapd */
3389 case 0x16f: /* movdqa xmm, ea */
3390 case 0x26f: /* movdqu xmm, ea */
3391 if (mod != 3) {
0af10c86 3392 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3393 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
664e0f19
FB
3394 } else {
3395 rm = (modrm & 7) | REX_B(s);
3396 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3397 offsetof(CPUX86State,xmm_regs[rm]));
3398 }
3399 break;
3400 case 0x210: /* movss xmm, ea */
3401 if (mod != 3) {
0af10c86 3402 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 3403 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
651ba608 3404 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
664e0f19 3405 gen_op_movl_T0_0();
651ba608
FB
3406 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3407 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3408 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
664e0f19
FB
3409 } else {
3410 rm = (modrm & 7) | REX_B(s);
3411 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3412 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3413 }
3414 break;
3415 case 0x310: /* movsd xmm, ea */
3416 if (mod != 3) {
0af10c86 3417 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3418 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19 3419 gen_op_movl_T0_0();
651ba608
FB
3420 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3421 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
664e0f19
FB
3422 } else {
3423 rm = (modrm & 7) | REX_B(s);
3424 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3425 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3426 }
3427 break;
3428 case 0x012: /* movlps */
3429 case 0x112: /* movlpd */
3430 if (mod != 3) {
0af10c86 3431 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3432 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3433 } else {
3434 /* movhlps */
3435 rm = (modrm & 7) | REX_B(s);
3436 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3437 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3438 }
3439 break;
465e9838
FB
3440 case 0x212: /* movsldup */
3441 if (mod != 3) {
0af10c86 3442 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3443 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
465e9838
FB
3444 } else {
3445 rm = (modrm & 7) | REX_B(s);
3446 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3447 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3448 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3449 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3450 }
3451 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3452 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3453 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3454 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3455 break;
3456 case 0x312: /* movddup */
3457 if (mod != 3) {
0af10c86 3458 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3459 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
465e9838
FB
3460 } else {
3461 rm = (modrm & 7) | REX_B(s);
3462 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3463 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3464 }
3465 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
ba6526df 3466 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
465e9838 3467 break;
664e0f19
FB
3468 case 0x016: /* movhps */
3469 case 0x116: /* movhpd */
3470 if (mod != 3) {
0af10c86 3471 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3472 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
664e0f19
FB
3473 } else {
3474 /* movlhps */
3475 rm = (modrm & 7) | REX_B(s);
3476 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3477 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3478 }
3479 break;
3480 case 0x216: /* movshdup */
3481 if (mod != 3) {
0af10c86 3482 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3483 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
664e0f19
FB
3484 } else {
3485 rm = (modrm & 7) | REX_B(s);
3486 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3487 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3488 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3489 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3490 }
3491 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3492 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3493 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3494 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3495 break;
d9f4bb27
AP
3496 case 0x178:
3497 case 0x378:
3498 {
3499 int bit_index, field_length;
3500
3501 if (b1 == 1 && reg != 0)
3502 goto illegal_op;
0af10c86
BS
3503 field_length = cpu_ldub_code(env, s->pc++) & 0x3F;
3504 bit_index = cpu_ldub_code(env, s->pc++) & 0x3F;
d9f4bb27
AP
3505 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3506 offsetof(CPUX86State,xmm_regs[reg]));
3507 if (b1 == 1)
d3eb5eae
BS
3508 gen_helper_extrq_i(cpu_env, cpu_ptr0,
3509 tcg_const_i32(bit_index),
3510 tcg_const_i32(field_length));
d9f4bb27 3511 else
d3eb5eae
BS
3512 gen_helper_insertq_i(cpu_env, cpu_ptr0,
3513 tcg_const_i32(bit_index),
3514 tcg_const_i32(field_length));
d9f4bb27
AP
3515 }
3516 break;
664e0f19 3517 case 0x7e: /* movd ea, mm */
dabd98dd
FB
3518#ifdef TARGET_X86_64
3519 if (s->dflag == 2) {
5af45186
FB
3520 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3521 offsetof(CPUX86State,fpregs[reg].mmx));
0af10c86 3522 gen_ldst_modrm(env, s, modrm, OT_QUAD, OR_TMP0, 1);
5fafdf24 3523 } else
dabd98dd
FB
3524#endif
3525 {
5af45186
FB
3526 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3527 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
0af10c86 3528 gen_ldst_modrm(env, s, modrm, OT_LONG, OR_TMP0, 1);
dabd98dd 3529 }
664e0f19
FB
3530 break;
3531 case 0x17e: /* movd ea, xmm */
dabd98dd
FB
3532#ifdef TARGET_X86_64
3533 if (s->dflag == 2) {
5af45186
FB
3534 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3535 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
0af10c86 3536 gen_ldst_modrm(env, s, modrm, OT_QUAD, OR_TMP0, 1);
5fafdf24 3537 } else
dabd98dd
FB
3538#endif
3539 {
5af45186
FB
3540 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3541 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
0af10c86 3542 gen_ldst_modrm(env, s, modrm, OT_LONG, OR_TMP0, 1);
dabd98dd 3543 }
664e0f19
FB
3544 break;
3545 case 0x27e: /* movq xmm, ea */
3546 if (mod != 3) {
0af10c86 3547 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3548 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3549 } else {
3550 rm = (modrm & 7) | REX_B(s);
3551 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3552 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3553 }
3554 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3555 break;
3556 case 0x7f: /* movq ea, mm */
3557 if (mod != 3) {
0af10c86 3558 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3559 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
664e0f19
FB
3560 } else {
3561 rm = (modrm & 7);
3562 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3563 offsetof(CPUX86State,fpregs[reg].mmx));
3564 }
3565 break;
3566 case 0x011: /* movups */
3567 case 0x111: /* movupd */
3568 case 0x029: /* movaps */
3569 case 0x129: /* movapd */
3570 case 0x17f: /* movdqa ea, xmm */
3571 case 0x27f: /* movdqu ea, xmm */
3572 if (mod != 3) {
0af10c86 3573 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3574 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
664e0f19
FB
3575 } else {
3576 rm = (modrm & 7) | REX_B(s);
3577 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3578 offsetof(CPUX86State,xmm_regs[reg]));
3579 }
3580 break;
3581 case 0x211: /* movss ea, xmm */
3582 if (mod != 3) {
0af10c86 3583 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
651ba608 3584 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
57fec1fe 3585 gen_op_st_T0_A0(OT_LONG + s->mem_index);
664e0f19
FB
3586 } else {
3587 rm = (modrm & 7) | REX_B(s);
3588 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3589 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3590 }
3591 break;
3592 case 0x311: /* movsd ea, xmm */
3593 if (mod != 3) {
0af10c86 3594 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3595 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3596 } else {
3597 rm = (modrm & 7) | REX_B(s);
3598 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3599 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3600 }
3601 break;
3602 case 0x013: /* movlps */
3603 case 0x113: /* movlpd */
3604 if (mod != 3) {
0af10c86 3605 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3606 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3607 } else {
3608 goto illegal_op;
3609 }
3610 break;
3611 case 0x017: /* movhps */
3612 case 0x117: /* movhpd */
3613 if (mod != 3) {
0af10c86 3614 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3615 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
664e0f19
FB
3616 } else {
3617 goto illegal_op;
3618 }
3619 break;
3620 case 0x71: /* shift mm, im */
3621 case 0x72:
3622 case 0x73:
3623 case 0x171: /* shift xmm, im */
3624 case 0x172:
3625 case 0x173:
c045af25
AK
3626 if (b1 >= 2) {
3627 goto illegal_op;
3628 }
0af10c86 3629 val = cpu_ldub_code(env, s->pc++);
664e0f19
FB
3630 if (is_xmm) {
3631 gen_op_movl_T0_im(val);
651ba608 3632 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
664e0f19 3633 gen_op_movl_T0_0();
651ba608 3634 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
664e0f19
FB
3635 op1_offset = offsetof(CPUX86State,xmm_t0);
3636 } else {
3637 gen_op_movl_T0_im(val);
651ba608 3638 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
664e0f19 3639 gen_op_movl_T0_0();
651ba608 3640 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
664e0f19
FB
3641 op1_offset = offsetof(CPUX86State,mmx_t0);
3642 }
d3eb5eae
BS
3643 sse_fn_epp = sse_op_table2[((b - 1) & 3) * 8 +
3644 (((modrm >> 3)) & 7)][b1];
3645 if (!sse_fn_epp) {
664e0f19 3646 goto illegal_op;
c4baa050 3647 }
664e0f19
FB
3648 if (is_xmm) {
3649 rm = (modrm & 7) | REX_B(s);
3650 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3651 } else {
3652 rm = (modrm & 7);
3653 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3654 }
5af45186
FB
3655 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3656 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
d3eb5eae 3657 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3658 break;
3659 case 0x050: /* movmskps */
664e0f19 3660 rm = (modrm & 7) | REX_B(s);
5af45186
FB
3661 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3662 offsetof(CPUX86State,xmm_regs[rm]));
d3eb5eae 3663 gen_helper_movmskps(cpu_tmp2_i32, cpu_env, cpu_ptr0);
b6abf97d 3664 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
57fec1fe 3665 gen_op_mov_reg_T0(OT_LONG, reg);
664e0f19
FB
3666 break;
3667 case 0x150: /* movmskpd */
664e0f19 3668 rm = (modrm & 7) | REX_B(s);
5af45186
FB
3669 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3670 offsetof(CPUX86State,xmm_regs[rm]));
d3eb5eae 3671 gen_helper_movmskpd(cpu_tmp2_i32, cpu_env, cpu_ptr0);
b6abf97d 3672 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
57fec1fe 3673 gen_op_mov_reg_T0(OT_LONG, reg);
664e0f19
FB
3674 break;
3675 case 0x02a: /* cvtpi2ps */
3676 case 0x12a: /* cvtpi2pd */
d3eb5eae 3677 gen_helper_enter_mmx(cpu_env);
664e0f19 3678 if (mod != 3) {
0af10c86 3679 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 3680 op2_offset = offsetof(CPUX86State,mmx_t0);
8686c490 3681 gen_ldq_env_A0(s->mem_index, op2_offset);
664e0f19
FB
3682 } else {
3683 rm = (modrm & 7);
3684 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3685 }
3686 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
5af45186
FB
3687 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3688 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
664e0f19
FB
3689 switch(b >> 8) {
3690 case 0x0:
d3eb5eae 3691 gen_helper_cvtpi2ps(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3692 break;
3693 default:
3694 case 0x1:
d3eb5eae 3695 gen_helper_cvtpi2pd(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3696 break;
3697 }
3698 break;
3699 case 0x22a: /* cvtsi2ss */
3700 case 0x32a: /* cvtsi2sd */
3701 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
0af10c86 3702 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
664e0f19 3703 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
5af45186 3704 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
28e10711 3705 if (ot == OT_LONG) {
d3eb5eae 3706 SSEFunc_0_epi sse_fn_epi = sse_op_table3ai[(b >> 8) & 1];
28e10711 3707 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 3708 sse_fn_epi(cpu_env, cpu_ptr0, cpu_tmp2_i32);
28e10711 3709 } else {
11f8cdbc 3710#ifdef TARGET_X86_64
d3eb5eae
BS
3711 SSEFunc_0_epl sse_fn_epl = sse_op_table3aq[(b >> 8) & 1];
3712 sse_fn_epl(cpu_env, cpu_ptr0, cpu_T[0]);
11f8cdbc
SW
3713#else
3714 goto illegal_op;
3715#endif
28e10711 3716 }
664e0f19
FB
3717 break;
3718 case 0x02c: /* cvttps2pi */
3719 case 0x12c: /* cvttpd2pi */
3720 case 0x02d: /* cvtps2pi */
3721 case 0x12d: /* cvtpd2pi */
d3eb5eae 3722 gen_helper_enter_mmx(cpu_env);
664e0f19 3723 if (mod != 3) {
0af10c86 3724 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 3725 op2_offset = offsetof(CPUX86State,xmm_t0);
8686c490 3726 gen_ldo_env_A0(s->mem_index, op2_offset);
664e0f19
FB
3727 } else {
3728 rm = (modrm & 7) | REX_B(s);
3729 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3730 }
3731 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
5af45186
FB
3732 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3733 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
664e0f19
FB
3734 switch(b) {
3735 case 0x02c:
d3eb5eae 3736 gen_helper_cvttps2pi(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3737 break;
3738 case 0x12c:
d3eb5eae 3739 gen_helper_cvttpd2pi(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3740 break;
3741 case 0x02d:
d3eb5eae 3742 gen_helper_cvtps2pi(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3743 break;
3744 case 0x12d:
d3eb5eae 3745 gen_helper_cvtpd2pi(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3746 break;
3747 }
3748 break;
3749 case 0x22c: /* cvttss2si */
3750 case 0x32c: /* cvttsd2si */
3751 case 0x22d: /* cvtss2si */
3752 case 0x32d: /* cvtsd2si */
3753 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
31313213 3754 if (mod != 3) {
0af10c86 3755 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
31313213 3756 if ((b >> 8) & 1) {
8686c490 3757 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
31313213 3758 } else {
57fec1fe 3759 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
651ba608 3760 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
31313213
FB
3761 }
3762 op2_offset = offsetof(CPUX86State,xmm_t0);
3763 } else {
3764 rm = (modrm & 7) | REX_B(s);
3765 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3766 }
5af45186
FB
3767 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3768 if (ot == OT_LONG) {
d3eb5eae 3769 SSEFunc_i_ep sse_fn_i_ep =
bedc2ac1 3770 sse_op_table3bi[((b >> 7) & 2) | (b & 1)];
d3eb5eae 3771 sse_fn_i_ep(cpu_tmp2_i32, cpu_env, cpu_ptr0);
b6abf97d 3772 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5af45186 3773 } else {
11f8cdbc 3774#ifdef TARGET_X86_64
d3eb5eae 3775 SSEFunc_l_ep sse_fn_l_ep =
bedc2ac1 3776 sse_op_table3bq[((b >> 7) & 2) | (b & 1)];
d3eb5eae 3777 sse_fn_l_ep(cpu_T[0], cpu_env, cpu_ptr0);
11f8cdbc
SW
3778#else
3779 goto illegal_op;
3780#endif
5af45186 3781 }
57fec1fe 3782 gen_op_mov_reg_T0(ot, reg);
664e0f19
FB
3783 break;
3784 case 0xc4: /* pinsrw */
5fafdf24 3785 case 0x1c4:
d1e42c5c 3786 s->rip_offset = 1;
0af10c86
BS
3787 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
3788 val = cpu_ldub_code(env, s->pc++);
664e0f19
FB
3789 if (b1) {
3790 val &= 7;
5af45186
FB
3791 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3792 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
664e0f19
FB
3793 } else {
3794 val &= 3;
5af45186
FB
3795 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3796 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
664e0f19
FB
3797 }
3798 break;
3799 case 0xc5: /* pextrw */
5fafdf24 3800 case 0x1c5:
664e0f19
FB
3801 if (mod != 3)
3802 goto illegal_op;
6dc2d0da 3803 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
0af10c86 3804 val = cpu_ldub_code(env, s->pc++);
664e0f19
FB
3805 if (b1) {
3806 val &= 7;
3807 rm = (modrm & 7) | REX_B(s);
5af45186
FB
3808 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3809 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
664e0f19
FB
3810 } else {
3811 val &= 3;
3812 rm = (modrm & 7);
5af45186
FB
3813 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3814 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
664e0f19
FB
3815 }
3816 reg = ((modrm >> 3) & 7) | rex_r;
6dc2d0da 3817 gen_op_mov_reg_T0(ot, reg);
664e0f19
FB
3818 break;
3819 case 0x1d6: /* movq ea, xmm */
3820 if (mod != 3) {
0af10c86 3821 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3822 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3823 } else {
3824 rm = (modrm & 7) | REX_B(s);
3825 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3826 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3827 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3828 }
3829 break;
3830 case 0x2d6: /* movq2dq */
d3eb5eae 3831 gen_helper_enter_mmx(cpu_env);
480c1cdb
FB
3832 rm = (modrm & 7);
3833 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3834 offsetof(CPUX86State,fpregs[rm].mmx));
3835 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
664e0f19
FB
3836 break;
3837 case 0x3d6: /* movdq2q */
d3eb5eae 3838 gen_helper_enter_mmx(cpu_env);
480c1cdb
FB
3839 rm = (modrm & 7) | REX_B(s);
3840 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3841 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
664e0f19
FB
3842 break;
3843 case 0xd7: /* pmovmskb */
3844 case 0x1d7:
3845 if (mod != 3)
3846 goto illegal_op;
3847 if (b1) {
3848 rm = (modrm & 7) | REX_B(s);
5af45186 3849 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
d3eb5eae 3850 gen_helper_pmovmskb_xmm(cpu_tmp2_i32, cpu_env, cpu_ptr0);
664e0f19
FB
3851 } else {
3852 rm = (modrm & 7);
5af45186 3853 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
d3eb5eae 3854 gen_helper_pmovmskb_mmx(cpu_tmp2_i32, cpu_env, cpu_ptr0);
664e0f19 3855 }
b6abf97d 3856 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
664e0f19 3857 reg = ((modrm >> 3) & 7) | rex_r;
57fec1fe 3858 gen_op_mov_reg_T0(OT_LONG, reg);
664e0f19 3859 break;
111994ee 3860
4242b1bd 3861 case 0x138:
000cacf6 3862 case 0x038:
4242b1bd 3863 b = modrm;
111994ee
RH
3864 if ((b & 0xf0) == 0xf0) {
3865 goto do_0f_38_fx;
3866 }
0af10c86 3867 modrm = cpu_ldub_code(env, s->pc++);
4242b1bd
AZ
3868 rm = modrm & 7;
3869 reg = ((modrm >> 3) & 7) | rex_r;
3870 mod = (modrm >> 6) & 3;
c045af25
AK
3871 if (b1 >= 2) {
3872 goto illegal_op;
3873 }
4242b1bd 3874
d3eb5eae
BS
3875 sse_fn_epp = sse_op_table6[b].op[b1];
3876 if (!sse_fn_epp) {
4242b1bd 3877 goto illegal_op;
c4baa050 3878 }
222a3336
AZ
3879 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
3880 goto illegal_op;
4242b1bd
AZ
3881
3882 if (b1) {
3883 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3884 if (mod == 3) {
3885 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3886 } else {
3887 op2_offset = offsetof(CPUX86State,xmm_t0);
0af10c86 3888 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
222a3336
AZ
3889 switch (b) {
3890 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
3891 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
3892 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
3893 gen_ldq_env_A0(s->mem_index, op2_offset +
3894 offsetof(XMMReg, XMM_Q(0)));
3895 break;
3896 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
3897 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
a7812ae4 3898 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
222a3336 3899 (s->mem_index >> 2) - 1);
a7812ae4 3900 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
222a3336
AZ
3901 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
3902 offsetof(XMMReg, XMM_L(0)));
3903 break;
3904 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
3905 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
3906 (s->mem_index >> 2) - 1);
3907 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
3908 offsetof(XMMReg, XMM_W(0)));
3909 break;
3910 case 0x2a: /* movntqda */
3911 gen_ldo_env_A0(s->mem_index, op1_offset);
3912 return;
3913 default:
3914 gen_ldo_env_A0(s->mem_index, op2_offset);
3915 }
4242b1bd
AZ
3916 }
3917 } else {
3918 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3919 if (mod == 3) {
3920 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3921 } else {
3922 op2_offset = offsetof(CPUX86State,mmx_t0);
0af10c86 3923 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
4242b1bd
AZ
3924 gen_ldq_env_A0(s->mem_index, op2_offset);
3925 }
3926 }
d3eb5eae 3927 if (sse_fn_epp == SSE_SPECIAL) {
222a3336 3928 goto illegal_op;
c4baa050 3929 }
222a3336 3930
4242b1bd
AZ
3931 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3932 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 3933 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
222a3336 3934
3ca51d07
RH
3935 if (b == 0x17) {
3936 set_cc_op(s, CC_OP_EFLAGS);
3937 }
4242b1bd 3938 break;
111994ee
RH
3939
3940 case 0x238:
3941 case 0x338:
3942 do_0f_38_fx:
3943 /* Various integer extensions at 0f 38 f[0-f]. */
3944 b = modrm | (b1 << 8);
0af10c86 3945 modrm = cpu_ldub_code(env, s->pc++);
222a3336
AZ
3946 reg = ((modrm >> 3) & 7) | rex_r;
3947
111994ee
RH
3948 switch (b) {
3949 case 0x3f0: /* crc32 Gd,Eb */
3950 case 0x3f1: /* crc32 Gd,Ey */
3951 do_crc32:
3952 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42)) {
3953 goto illegal_op;
3954 }
3955 if ((b & 0xff) == 0xf0) {
3956 ot = OT_BYTE;
3957 } else if (s->dflag != 2) {
3958 ot = (s->prefix & PREFIX_DATA ? OT_WORD : OT_LONG);
3959 } else {
3960 ot = OT_QUAD;
3961 }
4242b1bd 3962
111994ee
RH
3963 gen_op_mov_TN_reg(OT_LONG, 0, reg);
3964 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3965 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
3966 gen_helper_crc32(cpu_T[0], cpu_tmp2_i32,
3967 cpu_T[0], tcg_const_i32(8 << ot));
222a3336 3968
111994ee
RH
3969 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3970 gen_op_mov_reg_T0(ot, reg);
3971 break;
222a3336 3972
111994ee
RH
3973 case 0x1f0: /* crc32 or movbe */
3974 case 0x1f1:
3975 /* For these insns, the f3 prefix is supposed to have priority
3976 over the 66 prefix, but that's not what we implement above
3977 setting b1. */
3978 if (s->prefix & PREFIX_REPNZ) {
3979 goto do_crc32;
3980 }
3981 /* FALLTHRU */
3982 case 0x0f0: /* movbe Gy,My */
3983 case 0x0f1: /* movbe My,Gy */
3984 if (!(s->cpuid_ext_features & CPUID_EXT_MOVBE)) {
3985 goto illegal_op;
3986 }
3987 if (s->dflag != 2) {
3988 ot = (s->prefix & PREFIX_DATA ? OT_WORD : OT_LONG);
3989 } else {
3990 ot = OT_QUAD;
3991 }
3992
3993 /* Load the data incoming to the bswap. Note that the TCG
3994 implementation of bswap requires the input be zero
3995 extended. In the case of the loads, we simply know that
3996 gen_op_ld_v via gen_ldst_modrm does that already. */
3997 if ((b & 1) == 0) {
3998 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
3999 } else {
4000 switch (ot) {
4001 case OT_WORD:
4002 tcg_gen_ext16u_tl(cpu_T[0], cpu_regs[reg]);
4003 break;
4004 default:
4005 tcg_gen_ext32u_tl(cpu_T[0], cpu_regs[reg]);
4006 break;
4007 case OT_QUAD:
4008 tcg_gen_mov_tl(cpu_T[0], cpu_regs[reg]);
4009 break;
4010 }
4011 }
4012
4013 switch (ot) {
4014 case OT_WORD:
4015 tcg_gen_bswap16_tl(cpu_T[0], cpu_T[0]);
4016 break;
4017 default:
4018 tcg_gen_bswap32_tl(cpu_T[0], cpu_T[0]);
4019 break;
4020#ifdef TARGET_X86_64
4021 case OT_QUAD:
4022 tcg_gen_bswap64_tl(cpu_T[0], cpu_T[0]);
4023 break;
4024#endif
4025 }
4026
4027 if ((b & 1) == 0) {
4028 gen_op_mov_reg_T0(ot, reg);
4029 } else {
4030 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
4031 }
4032 break;
4033
7073fbad
RH
4034 case 0x0f2: /* andn Gy, By, Ey */
4035 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI1)
4036 || !(s->prefix & PREFIX_VEX)
4037 || s->vex_l != 0) {
4038 goto illegal_op;
4039 }
4040 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4041 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4042 tcg_gen_andc_tl(cpu_T[0], cpu_regs[s->vex_v], cpu_T[0]);
4043 gen_op_mov_reg_T0(ot, reg);
4044 gen_op_update1_cc();
4045 set_cc_op(s, CC_OP_LOGICB + ot);
4046 break;
4047
c7ab7565
RH
4048 case 0x0f7: /* bextr Gy, Ey, By */
4049 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI1)
4050 || !(s->prefix & PREFIX_VEX)
4051 || s->vex_l != 0) {
4052 goto illegal_op;
4053 }
4054 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4055 {
4056 TCGv bound, zero;
4057
4058 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4059 /* Extract START, and shift the operand.
4060 Shifts larger than operand size get zeros. */
4061 tcg_gen_ext8u_tl(cpu_A0, cpu_regs[s->vex_v]);
4062 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_A0);
4063
4064 bound = tcg_const_tl(ot == OT_QUAD ? 63 : 31);
4065 zero = tcg_const_tl(0);
4066 tcg_gen_movcond_tl(TCG_COND_LEU, cpu_T[0], cpu_A0, bound,
4067 cpu_T[0], zero);
4068 tcg_temp_free(zero);
4069
4070 /* Extract the LEN into a mask. Lengths larger than
4071 operand size get all ones. */
4072 tcg_gen_shri_tl(cpu_A0, cpu_regs[s->vex_v], 8);
4073 tcg_gen_ext8u_tl(cpu_A0, cpu_A0);
4074 tcg_gen_movcond_tl(TCG_COND_LEU, cpu_A0, cpu_A0, bound,
4075 cpu_A0, bound);
4076 tcg_temp_free(bound);
4077 tcg_gen_movi_tl(cpu_T[1], 1);
4078 tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_A0);
4079 tcg_gen_subi_tl(cpu_T[1], cpu_T[1], 1);
4080 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4081
4082 gen_op_mov_reg_T0(ot, reg);
4083 gen_op_update1_cc();
4084 set_cc_op(s, CC_OP_LOGICB + ot);
4085 }
4086 break;
4087
02ea1e6b
RH
4088 case 0x0f5: /* bzhi Gy, Ey, By */
4089 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4090 || !(s->prefix & PREFIX_VEX)
4091 || s->vex_l != 0) {
4092 goto illegal_op;
4093 }
4094 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4095 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4096 tcg_gen_ext8u_tl(cpu_T[1], cpu_regs[s->vex_v]);
4097 {
4098 TCGv bound = tcg_const_tl(ot == OT_QUAD ? 63 : 31);
4099 /* Note that since we're using BMILG (in order to get O
4100 cleared) we need to store the inverse into C. */
4101 tcg_gen_setcond_tl(TCG_COND_LT, cpu_cc_src,
4102 cpu_T[1], bound);
4103 tcg_gen_movcond_tl(TCG_COND_GT, cpu_T[1], cpu_T[1],
4104 bound, bound, cpu_T[1]);
4105 tcg_temp_free(bound);
4106 }
4107 tcg_gen_movi_tl(cpu_A0, -1);
4108 tcg_gen_shl_tl(cpu_A0, cpu_A0, cpu_T[1]);
4109 tcg_gen_andc_tl(cpu_T[0], cpu_T[0], cpu_A0);
4110 gen_op_mov_reg_T0(ot, reg);
4111 gen_op_update1_cc();
4112 set_cc_op(s, CC_OP_BMILGB + ot);
4113 break;
4114
5f1f4b17
RH
4115 case 0x3f6: /* mulx By, Gy, rdx, Ey */
4116 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4117 || !(s->prefix & PREFIX_VEX)
4118 || s->vex_l != 0) {
4119 goto illegal_op;
4120 }
4121 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4122 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4123 switch (ot) {
5f1f4b17 4124 default:
a4bcea3d
RH
4125 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4126 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_regs[R_EDX]);
4127 tcg_gen_mulu2_i32(cpu_tmp2_i32, cpu_tmp3_i32,
4128 cpu_tmp2_i32, cpu_tmp3_i32);
4129 tcg_gen_extu_i32_tl(cpu_regs[s->vex_v], cpu_tmp2_i32);
4130 tcg_gen_extu_i32_tl(cpu_regs[reg], cpu_tmp3_i32);
5f1f4b17
RH
4131 break;
4132#ifdef TARGET_X86_64
4133 case OT_QUAD:
a4bcea3d
RH
4134 tcg_gen_mulu2_i64(cpu_regs[s->vex_v], cpu_regs[reg],
4135 cpu_T[0], cpu_regs[R_EDX]);
5f1f4b17
RH
4136 break;
4137#endif
4138 }
4139 break;
4140
0592f74a
RH
4141 case 0x3f5: /* pdep Gy, By, Ey */
4142 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4143 || !(s->prefix & PREFIX_VEX)
4144 || s->vex_l != 0) {
4145 goto illegal_op;
4146 }
4147 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4148 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4149 /* Note that by zero-extending the mask operand, we
4150 automatically handle zero-extending the result. */
4151 if (s->dflag == 2) {
4152 tcg_gen_mov_tl(cpu_T[1], cpu_regs[s->vex_v]);
4153 } else {
4154 tcg_gen_ext32u_tl(cpu_T[1], cpu_regs[s->vex_v]);
4155 }
4156 gen_helper_pdep(cpu_regs[reg], cpu_T[0], cpu_T[1]);
4157 break;
4158
4159 case 0x2f5: /* pext Gy, By, Ey */
4160 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4161 || !(s->prefix & PREFIX_VEX)
4162 || s->vex_l != 0) {
4163 goto illegal_op;
4164 }
4165 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4166 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4167 /* Note that by zero-extending the mask operand, we
4168 automatically handle zero-extending the result. */
4169 if (s->dflag == 2) {
4170 tcg_gen_mov_tl(cpu_T[1], cpu_regs[s->vex_v]);
4171 } else {
4172 tcg_gen_ext32u_tl(cpu_T[1], cpu_regs[s->vex_v]);
4173 }
4174 gen_helper_pext(cpu_regs[reg], cpu_T[0], cpu_T[1]);
4175 break;
4176
cd7f97ca
RH
4177 case 0x1f6: /* adcx Gy, Ey */
4178 case 0x2f6: /* adox Gy, Ey */
4179 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_ADX)) {
4180 goto illegal_op;
4181 } else {
76f13133 4182 TCGv carry_in, carry_out, zero;
cd7f97ca
RH
4183 int end_op;
4184
4185 ot = (s->dflag == 2 ? OT_QUAD : OT_LONG);
4186 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4187
4188 /* Re-use the carry-out from a previous round. */
4189 TCGV_UNUSED(carry_in);
4190 carry_out = (b == 0x1f6 ? cpu_cc_dst : cpu_cc_src2);
4191 switch (s->cc_op) {
4192 case CC_OP_ADCX:
4193 if (b == 0x1f6) {
4194 carry_in = cpu_cc_dst;
4195 end_op = CC_OP_ADCX;
4196 } else {
4197 end_op = CC_OP_ADCOX;
4198 }
4199 break;
4200 case CC_OP_ADOX:
4201 if (b == 0x1f6) {
4202 end_op = CC_OP_ADCOX;
4203 } else {
4204 carry_in = cpu_cc_src2;
4205 end_op = CC_OP_ADOX;
4206 }
4207 break;
4208 case CC_OP_ADCOX:
4209 end_op = CC_OP_ADCOX;
4210 carry_in = carry_out;
4211 break;
4212 default:
c53de1a2 4213 end_op = (b == 0x1f6 ? CC_OP_ADCX : CC_OP_ADOX);
cd7f97ca
RH
4214 break;
4215 }
4216 /* If we can't reuse carry-out, get it out of EFLAGS. */
4217 if (TCGV_IS_UNUSED(carry_in)) {
4218 if (s->cc_op != CC_OP_ADCX && s->cc_op != CC_OP_ADOX) {
4219 gen_compute_eflags(s);
4220 }
4221 carry_in = cpu_tmp0;
4222 tcg_gen_shri_tl(carry_in, cpu_cc_src,
4223 ctz32(b == 0x1f6 ? CC_C : CC_O));
4224 tcg_gen_andi_tl(carry_in, carry_in, 1);
4225 }
4226
4227 switch (ot) {
4228#ifdef TARGET_X86_64
4229 case OT_LONG:
4230 /* If we know TL is 64-bit, and we want a 32-bit
4231 result, just do everything in 64-bit arithmetic. */
4232 tcg_gen_ext32u_i64(cpu_regs[reg], cpu_regs[reg]);
4233 tcg_gen_ext32u_i64(cpu_T[0], cpu_T[0]);
4234 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_regs[reg]);
4235 tcg_gen_add_i64(cpu_T[0], cpu_T[0], carry_in);
4236 tcg_gen_ext32u_i64(cpu_regs[reg], cpu_T[0]);
4237 tcg_gen_shri_i64(carry_out, cpu_T[0], 32);
4238 break;
4239#endif
4240 default:
4241 /* Otherwise compute the carry-out in two steps. */
76f13133
RH
4242 zero = tcg_const_tl(0);
4243 tcg_gen_add2_tl(cpu_T[0], carry_out,
4244 cpu_T[0], zero,
4245 carry_in, zero);
4246 tcg_gen_add2_tl(cpu_regs[reg], carry_out,
4247 cpu_regs[reg], carry_out,
4248 cpu_T[0], zero);
4249 tcg_temp_free(zero);
cd7f97ca
RH
4250 break;
4251 }
cd7f97ca
RH
4252 set_cc_op(s, end_op);
4253 }
4254 break;
4255
4a554890
RH
4256 case 0x1f7: /* shlx Gy, Ey, By */
4257 case 0x2f7: /* sarx Gy, Ey, By */
4258 case 0x3f7: /* shrx Gy, Ey, By */
4259 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4260 || !(s->prefix & PREFIX_VEX)
4261 || s->vex_l != 0) {
4262 goto illegal_op;
4263 }
4264 ot = (s->dflag == 2 ? OT_QUAD : OT_LONG);
4265 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4266 if (ot == OT_QUAD) {
4267 tcg_gen_andi_tl(cpu_T[1], cpu_regs[s->vex_v], 63);
4268 } else {
4269 tcg_gen_andi_tl(cpu_T[1], cpu_regs[s->vex_v], 31);
4270 }
4271 if (b == 0x1f7) {
4272 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4273 } else if (b == 0x2f7) {
4274 if (ot != OT_QUAD) {
4275 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4276 }
4277 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4278 } else {
4279 if (ot != OT_QUAD) {
4280 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4281 }
4282 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4283 }
4284 gen_op_mov_reg_T0(ot, reg);
4285 break;
4286
bc4b43dc
RH
4287 case 0x0f3:
4288 case 0x1f3:
4289 case 0x2f3:
4290 case 0x3f3: /* Group 17 */
4291 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI1)
4292 || !(s->prefix & PREFIX_VEX)
4293 || s->vex_l != 0) {
4294 goto illegal_op;
4295 }
4296 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4297 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4298
4299 switch (reg & 7) {
4300 case 1: /* blsr By,Ey */
4301 tcg_gen_neg_tl(cpu_T[1], cpu_T[0]);
4302 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4303 gen_op_mov_reg_T0(ot, s->vex_v);
4304 gen_op_update2_cc();
4305 set_cc_op(s, CC_OP_BMILGB + ot);
4306 break;
4307
4308 case 2: /* blsmsk By,Ey */
4309 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4310 tcg_gen_subi_tl(cpu_T[0], cpu_T[0], 1);
4311 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_cc_src);
4312 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4313 set_cc_op(s, CC_OP_BMILGB + ot);
4314 break;
4315
4316 case 3: /* blsi By, Ey */
4317 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4318 tcg_gen_subi_tl(cpu_T[0], cpu_T[0], 1);
4319 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_cc_src);
4320 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4321 set_cc_op(s, CC_OP_BMILGB + ot);
4322 break;
4323
4324 default:
4325 goto illegal_op;
4326 }
4327 break;
4328
111994ee
RH
4329 default:
4330 goto illegal_op;
4331 }
222a3336 4332 break;
111994ee 4333
222a3336
AZ
4334 case 0x03a:
4335 case 0x13a:
4242b1bd 4336 b = modrm;
0af10c86 4337 modrm = cpu_ldub_code(env, s->pc++);
4242b1bd
AZ
4338 rm = modrm & 7;
4339 reg = ((modrm >> 3) & 7) | rex_r;
4340 mod = (modrm >> 6) & 3;
c045af25
AK
4341 if (b1 >= 2) {
4342 goto illegal_op;
4343 }
4242b1bd 4344
d3eb5eae
BS
4345 sse_fn_eppi = sse_op_table7[b].op[b1];
4346 if (!sse_fn_eppi) {
4242b1bd 4347 goto illegal_op;
c4baa050 4348 }
222a3336
AZ
4349 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4350 goto illegal_op;
4351
d3eb5eae 4352 if (sse_fn_eppi == SSE_SPECIAL) {
222a3336
AZ
4353 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4354 rm = (modrm & 7) | REX_B(s);
4355 if (mod != 3)
0af10c86 4356 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
222a3336 4357 reg = ((modrm >> 3) & 7) | rex_r;
0af10c86 4358 val = cpu_ldub_code(env, s->pc++);
222a3336
AZ
4359 switch (b) {
4360 case 0x14: /* pextrb */
4361 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4362 xmm_regs[reg].XMM_B(val & 15)));
4363 if (mod == 3)
4364 gen_op_mov_reg_T0(ot, rm);
4365 else
4366 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4367 (s->mem_index >> 2) - 1);
4368 break;
4369 case 0x15: /* pextrw */
4370 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4371 xmm_regs[reg].XMM_W(val & 7)));
4372 if (mod == 3)
4373 gen_op_mov_reg_T0(ot, rm);
4374 else
4375 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4376 (s->mem_index >> 2) - 1);
4377 break;
4378 case 0x16:
4379 if (ot == OT_LONG) { /* pextrd */
4380 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4381 offsetof(CPUX86State,
4382 xmm_regs[reg].XMM_L(val & 3)));
a7812ae4 4383 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
222a3336 4384 if (mod == 3)
a7812ae4 4385 gen_op_mov_reg_v(ot, rm, cpu_T[0]);
222a3336 4386 else
a7812ae4 4387 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
222a3336
AZ
4388 (s->mem_index >> 2) - 1);
4389 } else { /* pextrq */
a7812ae4 4390#ifdef TARGET_X86_64
222a3336
AZ
4391 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4392 offsetof(CPUX86State,
4393 xmm_regs[reg].XMM_Q(val & 1)));
4394 if (mod == 3)
4395 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4396 else
4397 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4398 (s->mem_index >> 2) - 1);
a7812ae4
PB
4399#else
4400 goto illegal_op;
4401#endif
222a3336
AZ
4402 }
4403 break;
4404 case 0x17: /* extractps */
4405 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4406 xmm_regs[reg].XMM_L(val & 3)));
4407 if (mod == 3)
4408 gen_op_mov_reg_T0(ot, rm);
4409 else
4410 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4411 (s->mem_index >> 2) - 1);
4412 break;
4413 case 0x20: /* pinsrb */
4414 if (mod == 3)
4415 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4416 else
34c6addd 4417 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
222a3336 4418 (s->mem_index >> 2) - 1);
34c6addd 4419 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
222a3336
AZ
4420 xmm_regs[reg].XMM_B(val & 15)));
4421 break;
4422 case 0x21: /* insertps */
a7812ae4 4423 if (mod == 3) {
222a3336
AZ
4424 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4425 offsetof(CPUX86State,xmm_regs[rm]
4426 .XMM_L((val >> 6) & 3)));
a7812ae4
PB
4427 } else {
4428 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
222a3336 4429 (s->mem_index >> 2) - 1);
a7812ae4
PB
4430 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
4431 }
222a3336
AZ
4432 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4433 offsetof(CPUX86State,xmm_regs[reg]
4434 .XMM_L((val >> 4) & 3)));
4435 if ((val >> 0) & 1)
4436 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4437 cpu_env, offsetof(CPUX86State,
4438 xmm_regs[reg].XMM_L(0)));
4439 if ((val >> 1) & 1)
4440 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4441 cpu_env, offsetof(CPUX86State,
4442 xmm_regs[reg].XMM_L(1)));
4443 if ((val >> 2) & 1)
4444 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4445 cpu_env, offsetof(CPUX86State,
4446 xmm_regs[reg].XMM_L(2)));
4447 if ((val >> 3) & 1)
4448 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4449 cpu_env, offsetof(CPUX86State,
4450 xmm_regs[reg].XMM_L(3)));
4451 break;
4452 case 0x22:
4453 if (ot == OT_LONG) { /* pinsrd */
4454 if (mod == 3)
a7812ae4 4455 gen_op_mov_v_reg(ot, cpu_tmp0, rm);
222a3336 4456 else
a7812ae4 4457 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
222a3336 4458 (s->mem_index >> 2) - 1);
a7812ae4 4459 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
222a3336
AZ
4460 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4461 offsetof(CPUX86State,
4462 xmm_regs[reg].XMM_L(val & 3)));
4463 } else { /* pinsrq */
a7812ae4 4464#ifdef TARGET_X86_64
222a3336
AZ
4465 if (mod == 3)
4466 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4467 else
4468 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4469 (s->mem_index >> 2) - 1);
4470 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4471 offsetof(CPUX86State,
4472 xmm_regs[reg].XMM_Q(val & 1)));
a7812ae4
PB
4473#else
4474 goto illegal_op;
4475#endif
222a3336
AZ
4476 }
4477 break;
4478 }
4479 return;
4480 }
4242b1bd
AZ
4481
4482 if (b1) {
4483 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4484 if (mod == 3) {
4485 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4486 } else {
4487 op2_offset = offsetof(CPUX86State,xmm_t0);
0af10c86 4488 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
4242b1bd
AZ
4489 gen_ldo_env_A0(s->mem_index, op2_offset);
4490 }
4491 } else {
4492 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4493 if (mod == 3) {
4494 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4495 } else {
4496 op2_offset = offsetof(CPUX86State,mmx_t0);
0af10c86 4497 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
4242b1bd
AZ
4498 gen_ldq_env_A0(s->mem_index, op2_offset);
4499 }
4500 }
0af10c86 4501 val = cpu_ldub_code(env, s->pc++);
4242b1bd 4502
222a3336 4503 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
3ca51d07 4504 set_cc_op(s, CC_OP_EFLAGS);
222a3336
AZ
4505
4506 if (s->dflag == 2)
4507 /* The helper must use entire 64-bit gp registers */
4508 val |= 1 << 8;
4509 }
4510
4242b1bd
AZ
4511 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4512 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 4513 sse_fn_eppi(cpu_env, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4242b1bd 4514 break;
e2c3c2c5
RH
4515
4516 case 0x33a:
4517 /* Various integer extensions at 0f 3a f[0-f]. */
4518 b = modrm | (b1 << 8);
4519 modrm = cpu_ldub_code(env, s->pc++);
4520 reg = ((modrm >> 3) & 7) | rex_r;
4521
4522 switch (b) {
4523 case 0x3f0: /* rorx Gy,Ey, Ib */
4524 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4525 || !(s->prefix & PREFIX_VEX)
4526 || s->vex_l != 0) {
4527 goto illegal_op;
4528 }
4529 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4530 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4531 b = cpu_ldub_code(env, s->pc++);
4532 if (ot == OT_QUAD) {
4533 tcg_gen_rotri_tl(cpu_T[0], cpu_T[0], b & 63);
4534 } else {
4535 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4536 tcg_gen_rotri_i32(cpu_tmp2_i32, cpu_tmp2_i32, b & 31);
4537 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4538 }
4539 gen_op_mov_reg_T0(ot, reg);
4540 break;
4541
4542 default:
4543 goto illegal_op;
4544 }
4545 break;
4546
664e0f19
FB
4547 default:
4548 goto illegal_op;
4549 }
4550 } else {
4551 /* generic MMX or SSE operation */
d1e42c5c 4552 switch(b) {
d1e42c5c
FB
4553 case 0x70: /* pshufx insn */
4554 case 0xc6: /* pshufx insn */
4555 case 0xc2: /* compare insns */
4556 s->rip_offset = 1;
4557 break;
4558 default:
4559 break;
664e0f19
FB
4560 }
4561 if (is_xmm) {
4562 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4563 if (mod != 3) {
0af10c86 4564 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 4565 op2_offset = offsetof(CPUX86State,xmm_t0);
480c1cdb 4566 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
664e0f19
FB
4567 b == 0xc2)) {
4568 /* specific case for SSE single instructions */
4569 if (b1 == 2) {
4570 /* 32 bit access */
57fec1fe 4571 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
651ba608 4572 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
664e0f19
FB
4573 } else {
4574 /* 64 bit access */
8686c490 4575 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
664e0f19
FB
4576 }
4577 } else {
8686c490 4578 gen_ldo_env_A0(s->mem_index, op2_offset);
664e0f19
FB
4579 }
4580 } else {
4581 rm = (modrm & 7) | REX_B(s);
4582 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4583 }
4584 } else {
4585 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4586 if (mod != 3) {
0af10c86 4587 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 4588 op2_offset = offsetof(CPUX86State,mmx_t0);
8686c490 4589 gen_ldq_env_A0(s->mem_index, op2_offset);
664e0f19
FB
4590 } else {
4591 rm = (modrm & 7);
4592 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4593 }
4594 }
4595 switch(b) {
a35f3ec7 4596 case 0x0f: /* 3DNow! data insns */
e771edab
AJ
4597 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4598 goto illegal_op;
0af10c86 4599 val = cpu_ldub_code(env, s->pc++);
d3eb5eae
BS
4600 sse_fn_epp = sse_op_table5[val];
4601 if (!sse_fn_epp) {
a35f3ec7 4602 goto illegal_op;
c4baa050 4603 }
5af45186
FB
4604 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4605 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 4606 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
a35f3ec7 4607 break;
664e0f19
FB
4608 case 0x70: /* pshufx insn */
4609 case 0xc6: /* pshufx insn */
0af10c86 4610 val = cpu_ldub_code(env, s->pc++);
5af45186
FB
4611 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4612 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
c4baa050 4613 /* XXX: introduce a new table? */
d3eb5eae 4614 sse_fn_ppi = (SSEFunc_0_ppi)sse_fn_epp;
c4baa050 4615 sse_fn_ppi(cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
664e0f19
FB
4616 break;
4617 case 0xc2:
4618 /* compare insns */
0af10c86 4619 val = cpu_ldub_code(env, s->pc++);
664e0f19
FB
4620 if (val >= 8)
4621 goto illegal_op;
d3eb5eae 4622 sse_fn_epp = sse_op_table4[val][b1];
c4baa050 4623
5af45186
FB
4624 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4625 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 4626 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19 4627 break;
b8b6a50b
FB
4628 case 0xf7:
4629 /* maskmov : we must prepare A0 */
4630 if (mod != 3)
4631 goto illegal_op;
4632#ifdef TARGET_X86_64
4633 if (s->aflag == 2) {
4634 gen_op_movq_A0_reg(R_EDI);
4635 } else
4636#endif
4637 {
4638 gen_op_movl_A0_reg(R_EDI);
4639 if (s->aflag == 0)
4640 gen_op_andl_A0_ffff();
4641 }
4642 gen_add_A0_ds_seg(s);
4643
4644 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4645 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
c4baa050 4646 /* XXX: introduce a new table? */
d3eb5eae
BS
4647 sse_fn_eppt = (SSEFunc_0_eppt)sse_fn_epp;
4648 sse_fn_eppt(cpu_env, cpu_ptr0, cpu_ptr1, cpu_A0);
b8b6a50b 4649 break;
664e0f19 4650 default:
5af45186
FB
4651 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4652 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 4653 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
4654 break;
4655 }
4656 if (b == 0x2e || b == 0x2f) {
3ca51d07 4657 set_cc_op(s, CC_OP_EFLAGS);
664e0f19
FB
4658 }
4659 }
4660}
4661
2c0262af
FB
4662/* convert one instruction. s->is_jmp is set if the translation must
4663 be stopped. Return the next pc value */
0af10c86
BS
4664static target_ulong disas_insn(CPUX86State *env, DisasContext *s,
4665 target_ulong pc_start)
2c0262af
FB
4666{
4667 int b, prefixes, aflag, dflag;
4668 int shift, ot;
4669 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
14ce26e7
FB
4670 target_ulong next_eip, tval;
4671 int rex_w, rex_r;
2c0262af 4672
fdefe51c 4673 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
70cff25e 4674 tcg_gen_debug_insn_start(pc_start);
fdefe51c 4675 }
2c0262af
FB
4676 s->pc = pc_start;
4677 prefixes = 0;
4678 aflag = s->code32;
4679 dflag = s->code32;
4680 s->override = -1;
14ce26e7
FB
4681 rex_w = -1;
4682 rex_r = 0;
4683#ifdef TARGET_X86_64
4684 s->rex_x = 0;
4685 s->rex_b = 0;
5fafdf24 4686 x86_64_hregs = 0;
14ce26e7
FB
4687#endif
4688 s->rip_offset = 0; /* for relative ip address */
701ed211
RH
4689 s->vex_l = 0;
4690 s->vex_v = 0;
2c0262af 4691 next_byte:
0af10c86 4692 b = cpu_ldub_code(env, s->pc);
2c0262af 4693 s->pc++;
4a6fd938
RH
4694 /* Collect prefixes. */
4695 switch (b) {
4696 case 0xf3:
4697 prefixes |= PREFIX_REPZ;
4698 goto next_byte;
4699 case 0xf2:
4700 prefixes |= PREFIX_REPNZ;
4701 goto next_byte;
4702 case 0xf0:
4703 prefixes |= PREFIX_LOCK;
4704 goto next_byte;
4705 case 0x2e:
4706 s->override = R_CS;
4707 goto next_byte;
4708 case 0x36:
4709 s->override = R_SS;
4710 goto next_byte;
4711 case 0x3e:
4712 s->override = R_DS;
4713 goto next_byte;
4714 case 0x26:
4715 s->override = R_ES;
4716 goto next_byte;
4717 case 0x64:
4718 s->override = R_FS;
4719 goto next_byte;
4720 case 0x65:
4721 s->override = R_GS;
4722 goto next_byte;
4723 case 0x66:
4724 prefixes |= PREFIX_DATA;
4725 goto next_byte;
4726 case 0x67:
4727 prefixes |= PREFIX_ADR;
4728 goto next_byte;
14ce26e7 4729#ifdef TARGET_X86_64
4a6fd938
RH
4730 case 0x40 ... 0x4f:
4731 if (CODE64(s)) {
14ce26e7
FB
4732 /* REX prefix */
4733 rex_w = (b >> 3) & 1;
4734 rex_r = (b & 0x4) << 1;
4735 s->rex_x = (b & 0x2) << 2;
4736 REX_B(s) = (b & 0x1) << 3;
4737 x86_64_hregs = 1; /* select uniform byte register addressing */
4738 goto next_byte;
4739 }
4a6fd938
RH
4740 break;
4741#endif
701ed211
RH
4742 case 0xc5: /* 2-byte VEX */
4743 case 0xc4: /* 3-byte VEX */
4744 /* VEX prefixes cannot be used except in 32-bit mode.
4745 Otherwise the instruction is LES or LDS. */
4746 if (s->code32 && !s->vm86) {
4747 static const int pp_prefix[4] = {
4748 0, PREFIX_DATA, PREFIX_REPZ, PREFIX_REPNZ
4749 };
4750 int vex3, vex2 = cpu_ldub_code(env, s->pc);
4751
4752 if (!CODE64(s) && (vex2 & 0xc0) != 0xc0) {
4753 /* 4.1.4.6: In 32-bit mode, bits [7:6] must be 11b,
4754 otherwise the instruction is LES or LDS. */
4755 break;
4756 }
4757 s->pc++;
4758
085d8134 4759 /* 4.1.1-4.1.3: No preceding lock, 66, f2, f3, or rex prefixes. */
701ed211
RH
4760 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ
4761 | PREFIX_LOCK | PREFIX_DATA)) {
4762 goto illegal_op;
4763 }
4764#ifdef TARGET_X86_64
4765 if (x86_64_hregs) {
4766 goto illegal_op;
4767 }
4768#endif
4769 rex_r = (~vex2 >> 4) & 8;
4770 if (b == 0xc5) {
4771 vex3 = vex2;
4772 b = cpu_ldub_code(env, s->pc++);
4773 } else {
4774#ifdef TARGET_X86_64
4775 s->rex_x = (~vex2 >> 3) & 8;
4776 s->rex_b = (~vex2 >> 2) & 8;
4777#endif
4778 vex3 = cpu_ldub_code(env, s->pc++);
4779 rex_w = (vex3 >> 7) & 1;
4780 switch (vex2 & 0x1f) {
4781 case 0x01: /* Implied 0f leading opcode bytes. */
4782 b = cpu_ldub_code(env, s->pc++) | 0x100;
4783 break;
4784 case 0x02: /* Implied 0f 38 leading opcode bytes. */
4785 b = 0x138;
4786 break;
4787 case 0x03: /* Implied 0f 3a leading opcode bytes. */
4788 b = 0x13a;
4789 break;
4790 default: /* Reserved for future use. */
4791 goto illegal_op;
4792 }
4793 }
4794 s->vex_v = (~vex3 >> 3) & 0xf;
4795 s->vex_l = (vex3 >> 2) & 1;
4796 prefixes |= pp_prefix[vex3 & 3] | PREFIX_VEX;
4797 }
4798 break;
4a6fd938
RH
4799 }
4800
4801 /* Post-process prefixes. */
4802 if (prefixes & PREFIX_DATA) {
4803 dflag ^= 1;
4804 }
4805 if (prefixes & PREFIX_ADR) {
4806 aflag ^= 1;
4807 }
4808#ifdef TARGET_X86_64
4809 if (CODE64(s)) {
14ce26e7
FB
4810 if (rex_w == 1) {
4811 /* 0x66 is ignored if rex.w is set */
4812 dflag = 2;
14ce26e7 4813 }
4a6fd938 4814 if (!(prefixes & PREFIX_ADR)) {
14ce26e7 4815 aflag = 2;
14ce26e7 4816 }
2c0262af 4817 }
4a6fd938 4818#endif
2c0262af 4819
2c0262af
FB
4820 s->prefix = prefixes;
4821 s->aflag = aflag;
4822 s->dflag = dflag;
4823
4824 /* lock generation */
4825 if (prefixes & PREFIX_LOCK)
a7812ae4 4826 gen_helper_lock();
2c0262af
FB
4827
4828 /* now check op code */
4829 reswitch:
4830 switch(b) {
4831 case 0x0f:
4832 /**************************/
4833 /* extended op code */
0af10c86 4834 b = cpu_ldub_code(env, s->pc++) | 0x100;
2c0262af 4835 goto reswitch;
3b46e624 4836
2c0262af
FB
4837 /**************************/
4838 /* arith & logic */
4839 case 0x00 ... 0x05:
4840 case 0x08 ... 0x0d:
4841 case 0x10 ... 0x15:
4842 case 0x18 ... 0x1d:
4843 case 0x20 ... 0x25:
4844 case 0x28 ... 0x2d:
4845 case 0x30 ... 0x35:
4846 case 0x38 ... 0x3d:
4847 {
4848 int op, f, val;
4849 op = (b >> 3) & 7;
4850 f = (b >> 1) & 3;
4851
4852 if ((b & 1) == 0)
4853 ot = OT_BYTE;
4854 else
14ce26e7 4855 ot = dflag + OT_WORD;
3b46e624 4856
2c0262af
FB
4857 switch(f) {
4858 case 0: /* OP Ev, Gv */
0af10c86 4859 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 4860 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af 4861 mod = (modrm >> 6) & 3;
14ce26e7 4862 rm = (modrm & 7) | REX_B(s);
2c0262af 4863 if (mod != 3) {
0af10c86 4864 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
4865 opreg = OR_TMP0;
4866 } else if (op == OP_XORL && rm == reg) {
4867 xor_zero:
4868 /* xor reg, reg optimisation */
436ff2d2 4869 set_cc_op(s, CC_OP_CLR);
2c0262af 4870 gen_op_movl_T0_0();
57fec1fe 4871 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
4872 break;
4873 } else {
4874 opreg = rm;
4875 }
57fec1fe 4876 gen_op_mov_TN_reg(ot, 1, reg);
2c0262af
FB
4877 gen_op(s, op, ot, opreg);
4878 break;
4879 case 1: /* OP Gv, Ev */
0af10c86 4880 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 4881 mod = (modrm >> 6) & 3;
14ce26e7
FB
4882 reg = ((modrm >> 3) & 7) | rex_r;
4883 rm = (modrm & 7) | REX_B(s);
2c0262af 4884 if (mod != 3) {
0af10c86 4885 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 4886 gen_op_ld_T1_A0(ot + s->mem_index);
2c0262af
FB
4887 } else if (op == OP_XORL && rm == reg) {
4888 goto xor_zero;
4889 } else {
57fec1fe 4890 gen_op_mov_TN_reg(ot, 1, rm);
2c0262af
FB
4891 }
4892 gen_op(s, op, ot, reg);
4893 break;
4894 case 2: /* OP A, Iv */
0af10c86 4895 val = insn_get(env, s, ot);
2c0262af
FB
4896 gen_op_movl_T1_im(val);
4897 gen_op(s, op, ot, OR_EAX);
4898 break;
4899 }
4900 }
4901 break;
4902
ec9d6075
FB
4903 case 0x82:
4904 if (CODE64(s))
4905 goto illegal_op;
2c0262af
FB
4906 case 0x80: /* GRP1 */
4907 case 0x81:
4908 case 0x83:
4909 {
4910 int val;
4911
4912 if ((b & 1) == 0)
4913 ot = OT_BYTE;
4914 else
14ce26e7 4915 ot = dflag + OT_WORD;
3b46e624 4916
0af10c86 4917 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 4918 mod = (modrm >> 6) & 3;
14ce26e7 4919 rm = (modrm & 7) | REX_B(s);
2c0262af 4920 op = (modrm >> 3) & 7;
3b46e624 4921
2c0262af 4922 if (mod != 3) {
14ce26e7
FB
4923 if (b == 0x83)
4924 s->rip_offset = 1;
4925 else
4926 s->rip_offset = insn_const_size(ot);
0af10c86 4927 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
4928 opreg = OR_TMP0;
4929 } else {
14ce26e7 4930 opreg = rm;
2c0262af
FB
4931 }
4932
4933 switch(b) {
4934 default:
4935 case 0x80:
4936 case 0x81:
d64477af 4937 case 0x82:
0af10c86 4938 val = insn_get(env, s, ot);
2c0262af
FB
4939 break;
4940 case 0x83:
0af10c86 4941 val = (int8_t)insn_get(env, s, OT_BYTE);
2c0262af
FB
4942 break;
4943 }
4944 gen_op_movl_T1_im(val);
4945 gen_op(s, op, ot, opreg);
4946 }
4947 break;
4948
4949 /**************************/
4950 /* inc, dec, and other misc arith */
4951 case 0x40 ... 0x47: /* inc Gv */
4952 ot = dflag ? OT_LONG : OT_WORD;
4953 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4954 break;
4955 case 0x48 ... 0x4f: /* dec Gv */
4956 ot = dflag ? OT_LONG : OT_WORD;
4957 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4958 break;
4959 case 0xf6: /* GRP3 */
4960 case 0xf7:
4961 if ((b & 1) == 0)
4962 ot = OT_BYTE;
4963 else
14ce26e7 4964 ot = dflag + OT_WORD;
2c0262af 4965
0af10c86 4966 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 4967 mod = (modrm >> 6) & 3;
14ce26e7 4968 rm = (modrm & 7) | REX_B(s);
2c0262af
FB
4969 op = (modrm >> 3) & 7;
4970 if (mod != 3) {
14ce26e7
FB
4971 if (op == 0)
4972 s->rip_offset = insn_const_size(ot);
0af10c86 4973 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 4974 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 4975 } else {
57fec1fe 4976 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af
FB
4977 }
4978
4979 switch(op) {
4980 case 0: /* test */
0af10c86 4981 val = insn_get(env, s, ot);
2c0262af
FB
4982 gen_op_movl_T1_im(val);
4983 gen_op_testl_T0_T1_cc();
3ca51d07 4984 set_cc_op(s, CC_OP_LOGICB + ot);
2c0262af
FB
4985 break;
4986 case 2: /* not */
b6abf97d 4987 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
2c0262af 4988 if (mod != 3) {
57fec1fe 4989 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 4990 } else {
57fec1fe 4991 gen_op_mov_reg_T0(ot, rm);
2c0262af
FB
4992 }
4993 break;
4994 case 3: /* neg */
b6abf97d 4995 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
2c0262af 4996 if (mod != 3) {
57fec1fe 4997 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 4998 } else {
57fec1fe 4999 gen_op_mov_reg_T0(ot, rm);
2c0262af
FB
5000 }
5001 gen_op_update_neg_cc();
3ca51d07 5002 set_cc_op(s, CC_OP_SUBB + ot);
2c0262af
FB
5003 break;
5004 case 4: /* mul */
5005 switch(ot) {
5006 case OT_BYTE:
0211e5af
FB
5007 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5008 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5009 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
5010 /* XXX: use 32 bit mul which could be faster */
5011 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5012 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5013 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5014 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
3ca51d07 5015 set_cc_op(s, CC_OP_MULB);
2c0262af
FB
5016 break;
5017 case OT_WORD:
0211e5af
FB
5018 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5019 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5020 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
5021 /* XXX: use 32 bit mul which could be faster */
5022 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5023 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5024 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5025 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5026 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5027 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3ca51d07 5028 set_cc_op(s, CC_OP_MULW);
2c0262af
FB
5029 break;
5030 default:
5031 case OT_LONG:
a4bcea3d
RH
5032 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5033 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_regs[R_EAX]);
5034 tcg_gen_mulu2_i32(cpu_tmp2_i32, cpu_tmp3_i32,
5035 cpu_tmp2_i32, cpu_tmp3_i32);
5036 tcg_gen_extu_i32_tl(cpu_regs[R_EAX], cpu_tmp2_i32);
5037 tcg_gen_extu_i32_tl(cpu_regs[R_EDX], cpu_tmp3_i32);
5038 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[R_EAX]);
5039 tcg_gen_mov_tl(cpu_cc_src, cpu_regs[R_EDX]);
3ca51d07 5040 set_cc_op(s, CC_OP_MULL);
2c0262af 5041 break;
14ce26e7
FB
5042#ifdef TARGET_X86_64
5043 case OT_QUAD:
a4bcea3d
RH
5044 tcg_gen_mulu2_i64(cpu_regs[R_EAX], cpu_regs[R_EDX],
5045 cpu_T[0], cpu_regs[R_EAX]);
5046 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[R_EAX]);
5047 tcg_gen_mov_tl(cpu_cc_src, cpu_regs[R_EDX]);
3ca51d07 5048 set_cc_op(s, CC_OP_MULQ);
14ce26e7
FB
5049 break;
5050#endif
2c0262af 5051 }
2c0262af
FB
5052 break;
5053 case 5: /* imul */
5054 switch(ot) {
5055 case OT_BYTE:
0211e5af
FB
5056 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5057 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5058 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5059 /* XXX: use 32 bit mul which could be faster */
5060 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5061 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5062 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5063 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5064 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3ca51d07 5065 set_cc_op(s, CC_OP_MULB);
2c0262af
FB
5066 break;
5067 case OT_WORD:
0211e5af
FB
5068 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5069 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5070 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5071 /* XXX: use 32 bit mul which could be faster */
5072 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5073 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5074 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5075 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5076 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5077 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5078 gen_op_mov_reg_T0(OT_WORD, R_EDX);
3ca51d07 5079 set_cc_op(s, CC_OP_MULW);
2c0262af
FB
5080 break;
5081 default:
5082 case OT_LONG:
a4bcea3d
RH
5083 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5084 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_regs[R_EAX]);
5085 tcg_gen_muls2_i32(cpu_tmp2_i32, cpu_tmp3_i32,
5086 cpu_tmp2_i32, cpu_tmp3_i32);
5087 tcg_gen_extu_i32_tl(cpu_regs[R_EAX], cpu_tmp2_i32);
5088 tcg_gen_extu_i32_tl(cpu_regs[R_EDX], cpu_tmp3_i32);
5089 tcg_gen_sari_i32(cpu_tmp2_i32, cpu_tmp2_i32, 31);
5090 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[R_EAX]);
5091 tcg_gen_sub_i32(cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
5092 tcg_gen_extu_i32_tl(cpu_cc_src, cpu_tmp2_i32);
3ca51d07 5093 set_cc_op(s, CC_OP_MULL);
2c0262af 5094 break;
14ce26e7
FB
5095#ifdef TARGET_X86_64
5096 case OT_QUAD:
a4bcea3d
RH
5097 tcg_gen_muls2_i64(cpu_regs[R_EAX], cpu_regs[R_EDX],
5098 cpu_T[0], cpu_regs[R_EAX]);
5099 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[R_EAX]);
5100 tcg_gen_sari_tl(cpu_cc_src, cpu_regs[R_EAX], 63);
5101 tcg_gen_sub_tl(cpu_cc_src, cpu_cc_src, cpu_regs[R_EDX]);
3ca51d07 5102 set_cc_op(s, CC_OP_MULQ);
14ce26e7
FB
5103 break;
5104#endif
2c0262af 5105 }
2c0262af
FB
5106 break;
5107 case 6: /* div */
5108 switch(ot) {
5109 case OT_BYTE:
14ce26e7 5110 gen_jmp_im(pc_start - s->cs_base);
7923057b 5111 gen_helper_divb_AL(cpu_env, cpu_T[0]);
2c0262af
FB
5112 break;
5113 case OT_WORD:
14ce26e7 5114 gen_jmp_im(pc_start - s->cs_base);
7923057b 5115 gen_helper_divw_AX(cpu_env, cpu_T[0]);
2c0262af
FB
5116 break;
5117 default:
5118 case OT_LONG:
14ce26e7 5119 gen_jmp_im(pc_start - s->cs_base);
7923057b 5120 gen_helper_divl_EAX(cpu_env, cpu_T[0]);
14ce26e7
FB
5121 break;
5122#ifdef TARGET_X86_64
5123 case OT_QUAD:
5124 gen_jmp_im(pc_start - s->cs_base);
7923057b 5125 gen_helper_divq_EAX(cpu_env, cpu_T[0]);
2c0262af 5126 break;
14ce26e7 5127#endif
2c0262af
FB
5128 }
5129 break;
5130 case 7: /* idiv */
5131 switch(ot) {
5132 case OT_BYTE:
14ce26e7 5133 gen_jmp_im(pc_start - s->cs_base);
7923057b 5134 gen_helper_idivb_AL(cpu_env, cpu_T[0]);
2c0262af
FB
5135 break;
5136 case OT_WORD:
14ce26e7 5137 gen_jmp_im(pc_start - s->cs_base);
7923057b 5138 gen_helper_idivw_AX(cpu_env, cpu_T[0]);
2c0262af
FB
5139 break;
5140 default:
5141 case OT_LONG:
14ce26e7 5142 gen_jmp_im(pc_start - s->cs_base);
7923057b 5143 gen_helper_idivl_EAX(cpu_env, cpu_T[0]);
14ce26e7
FB
5144 break;
5145#ifdef TARGET_X86_64
5146 case OT_QUAD:
5147 gen_jmp_im(pc_start - s->cs_base);
7923057b 5148 gen_helper_idivq_EAX(cpu_env, cpu_T[0]);
2c0262af 5149 break;
14ce26e7 5150#endif
2c0262af
FB
5151 }
5152 break;
5153 default:
5154 goto illegal_op;
5155 }
5156 break;
5157
5158 case 0xfe: /* GRP4 */
5159 case 0xff: /* GRP5 */
5160 if ((b & 1) == 0)
5161 ot = OT_BYTE;
5162 else
14ce26e7 5163 ot = dflag + OT_WORD;
2c0262af 5164
0af10c86 5165 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 5166 mod = (modrm >> 6) & 3;
14ce26e7 5167 rm = (modrm & 7) | REX_B(s);
2c0262af
FB
5168 op = (modrm >> 3) & 7;
5169 if (op >= 2 && b == 0xfe) {
5170 goto illegal_op;
5171 }
14ce26e7 5172 if (CODE64(s)) {
aba9d61e 5173 if (op == 2 || op == 4) {
14ce26e7
FB
5174 /* operand size for jumps is 64 bit */
5175 ot = OT_QUAD;
aba9d61e 5176 } else if (op == 3 || op == 5) {
41b1e61f 5177 ot = dflag ? OT_LONG + (rex_w == 1) : OT_WORD;
14ce26e7
FB
5178 } else if (op == 6) {
5179 /* default push size is 64 bit */
5180 ot = dflag ? OT_QUAD : OT_WORD;
5181 }
5182 }
2c0262af 5183 if (mod != 3) {
0af10c86 5184 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af 5185 if (op >= 2 && op != 3 && op != 5)
57fec1fe 5186 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 5187 } else {
57fec1fe 5188 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af
FB
5189 }
5190
5191 switch(op) {
5192 case 0: /* inc Ev */
5193 if (mod != 3)
5194 opreg = OR_TMP0;
5195 else
5196 opreg = rm;
5197 gen_inc(s, ot, opreg, 1);
5198 break;
5199 case 1: /* dec Ev */
5200 if (mod != 3)
5201 opreg = OR_TMP0;
5202 else
5203 opreg = rm;
5204 gen_inc(s, ot, opreg, -1);
5205 break;
5206 case 2: /* call Ev */
4f31916f 5207 /* XXX: optimize if memory (no 'and' is necessary) */
2c0262af
FB
5208 if (s->dflag == 0)
5209 gen_op_andl_T0_ffff();
2c0262af 5210 next_eip = s->pc - s->cs_base;
1ef38687 5211 gen_movtl_T1_im(next_eip);
4f31916f
FB
5212 gen_push_T1(s);
5213 gen_op_jmp_T0();
2c0262af
FB
5214 gen_eob(s);
5215 break;
61382a50 5216 case 3: /* lcall Ev */
57fec1fe 5217 gen_op_ld_T1_A0(ot + s->mem_index);
aba9d61e 5218 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
57fec1fe 5219 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
5220 do_lcall:
5221 if (s->pe && !s->vm86) {
773cdfcc 5222 gen_update_cc_op(s);
14ce26e7 5223 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 5224 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2
BS
5225 gen_helper_lcall_protected(cpu_env, cpu_tmp2_i32, cpu_T[1],
5226 tcg_const_i32(dflag),
a7812ae4 5227 tcg_const_i32(s->pc - pc_start));
2c0262af 5228 } else {
b6abf97d 5229 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2
BS
5230 gen_helper_lcall_real(cpu_env, cpu_tmp2_i32, cpu_T[1],
5231 tcg_const_i32(dflag),
a7812ae4 5232 tcg_const_i32(s->pc - s->cs_base));
2c0262af
FB
5233 }
5234 gen_eob(s);
5235 break;
5236 case 4: /* jmp Ev */
5237 if (s->dflag == 0)
5238 gen_op_andl_T0_ffff();
5239 gen_op_jmp_T0();
5240 gen_eob(s);
5241 break;
5242 case 5: /* ljmp Ev */
57fec1fe 5243 gen_op_ld_T1_A0(ot + s->mem_index);
aba9d61e 5244 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
57fec1fe 5245 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
5246 do_ljmp:
5247 if (s->pe && !s->vm86) {
773cdfcc 5248 gen_update_cc_op(s);
14ce26e7 5249 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 5250 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2 5251 gen_helper_ljmp_protected(cpu_env, cpu_tmp2_i32, cpu_T[1],
a7812ae4 5252 tcg_const_i32(s->pc - pc_start));
2c0262af 5253 } else {
3bd7da9e 5254 gen_op_movl_seg_T0_vm(R_CS);
2c0262af
FB
5255 gen_op_movl_T0_T1();
5256 gen_op_jmp_T0();
5257 }
5258 gen_eob(s);
5259 break;
5260 case 6: /* push Ev */
5261 gen_push_T0(s);
5262 break;
5263 default:
5264 goto illegal_op;
5265 }
5266 break;
5267
5268 case 0x84: /* test Ev, Gv */
5fafdf24 5269 case 0x85:
2c0262af
FB
5270 if ((b & 1) == 0)
5271 ot = OT_BYTE;
5272 else
14ce26e7 5273 ot = dflag + OT_WORD;
2c0262af 5274
0af10c86 5275 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5276 reg = ((modrm >> 3) & 7) | rex_r;
3b46e624 5277
0af10c86 5278 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
57fec1fe 5279 gen_op_mov_TN_reg(ot, 1, reg);
2c0262af 5280 gen_op_testl_T0_T1_cc();
3ca51d07 5281 set_cc_op(s, CC_OP_LOGICB + ot);
2c0262af 5282 break;
3b46e624 5283
2c0262af
FB
5284 case 0xa8: /* test eAX, Iv */
5285 case 0xa9:
5286 if ((b & 1) == 0)
5287 ot = OT_BYTE;
5288 else
14ce26e7 5289 ot = dflag + OT_WORD;
0af10c86 5290 val = insn_get(env, s, ot);
2c0262af 5291
57fec1fe 5292 gen_op_mov_TN_reg(ot, 0, OR_EAX);
2c0262af
FB
5293 gen_op_movl_T1_im(val);
5294 gen_op_testl_T0_T1_cc();
3ca51d07 5295 set_cc_op(s, CC_OP_LOGICB + ot);
2c0262af 5296 break;
3b46e624 5297
2c0262af 5298 case 0x98: /* CWDE/CBW */
14ce26e7
FB
5299#ifdef TARGET_X86_64
5300 if (dflag == 2) {
e108dd01
FB
5301 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5302 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5303 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
14ce26e7
FB
5304 } else
5305#endif
e108dd01
FB
5306 if (dflag == 1) {
5307 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5308 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5309 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5310 } else {
5311 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5312 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5313 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5314 }
2c0262af
FB
5315 break;
5316 case 0x99: /* CDQ/CWD */
14ce26e7
FB
5317#ifdef TARGET_X86_64
5318 if (dflag == 2) {
e108dd01
FB
5319 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5320 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5321 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
14ce26e7
FB
5322 } else
5323#endif
e108dd01
FB
5324 if (dflag == 1) {
5325 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5326 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5327 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5328 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5329 } else {
5330 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5331 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5332 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5333 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5334 }
2c0262af
FB
5335 break;
5336 case 0x1af: /* imul Gv, Ev */
5337 case 0x69: /* imul Gv, Ev, I */
5338 case 0x6b:
14ce26e7 5339 ot = dflag + OT_WORD;
0af10c86 5340 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7
FB
5341 reg = ((modrm >> 3) & 7) | rex_r;
5342 if (b == 0x69)
5343 s->rip_offset = insn_const_size(ot);
5344 else if (b == 0x6b)
5345 s->rip_offset = 1;
0af10c86 5346 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
2c0262af 5347 if (b == 0x69) {
0af10c86 5348 val = insn_get(env, s, ot);
2c0262af
FB
5349 gen_op_movl_T1_im(val);
5350 } else if (b == 0x6b) {
0af10c86 5351 val = (int8_t)insn_get(env, s, OT_BYTE);
2c0262af
FB
5352 gen_op_movl_T1_im(val);
5353 } else {
57fec1fe 5354 gen_op_mov_TN_reg(ot, 1, reg);
2c0262af 5355 }
a4bcea3d 5356 switch (ot) {
0211e5af 5357#ifdef TARGET_X86_64
a4bcea3d
RH
5358 case OT_QUAD:
5359 tcg_gen_muls2_i64(cpu_regs[reg], cpu_T[1], cpu_T[0], cpu_T[1]);
5360 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[reg]);
5361 tcg_gen_sari_tl(cpu_cc_src, cpu_cc_dst, 63);
5362 tcg_gen_sub_tl(cpu_cc_src, cpu_cc_src, cpu_T[1]);
5363 break;
0211e5af 5364#endif
a4bcea3d
RH
5365 case OT_LONG:
5366 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5367 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5368 tcg_gen_muls2_i32(cpu_tmp2_i32, cpu_tmp3_i32,
5369 cpu_tmp2_i32, cpu_tmp3_i32);
5370 tcg_gen_extu_i32_tl(cpu_regs[reg], cpu_tmp2_i32);
5371 tcg_gen_sari_i32(cpu_tmp2_i32, cpu_tmp2_i32, 31);
5372 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[reg]);
5373 tcg_gen_sub_i32(cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
5374 tcg_gen_extu_i32_tl(cpu_cc_src, cpu_tmp2_i32);
5375 break;
5376 default:
0211e5af
FB
5377 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5378 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5379 /* XXX: use 32 bit mul which could be faster */
5380 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5381 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5382 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5383 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
a4bcea3d
RH
5384 gen_op_mov_reg_T0(ot, reg);
5385 break;
2c0262af 5386 }
3ca51d07 5387 set_cc_op(s, CC_OP_MULB + ot);
2c0262af
FB
5388 break;
5389 case 0x1c0:
5390 case 0x1c1: /* xadd Ev, Gv */
5391 if ((b & 1) == 0)
5392 ot = OT_BYTE;
5393 else
14ce26e7 5394 ot = dflag + OT_WORD;
0af10c86 5395 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5396 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af
FB
5397 mod = (modrm >> 6) & 3;
5398 if (mod == 3) {
14ce26e7 5399 rm = (modrm & 7) | REX_B(s);
57fec1fe
FB
5400 gen_op_mov_TN_reg(ot, 0, reg);
5401 gen_op_mov_TN_reg(ot, 1, rm);
2c0262af 5402 gen_op_addl_T0_T1();
57fec1fe
FB
5403 gen_op_mov_reg_T1(ot, reg);
5404 gen_op_mov_reg_T0(ot, rm);
2c0262af 5405 } else {
0af10c86 5406 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe
FB
5407 gen_op_mov_TN_reg(ot, 0, reg);
5408 gen_op_ld_T1_A0(ot + s->mem_index);
2c0262af 5409 gen_op_addl_T0_T1();
57fec1fe
FB
5410 gen_op_st_T0_A0(ot + s->mem_index);
5411 gen_op_mov_reg_T1(ot, reg);
2c0262af
FB
5412 }
5413 gen_op_update2_cc();
3ca51d07 5414 set_cc_op(s, CC_OP_ADDB + ot);
2c0262af
FB
5415 break;
5416 case 0x1b0:
5417 case 0x1b1: /* cmpxchg Ev, Gv */
cad3a37d 5418 {
1130328e 5419 int label1, label2;
1e4840bf 5420 TCGv t0, t1, t2, a0;
cad3a37d
FB
5421
5422 if ((b & 1) == 0)
5423 ot = OT_BYTE;
5424 else
5425 ot = dflag + OT_WORD;
0af10c86 5426 modrm = cpu_ldub_code(env, s->pc++);
cad3a37d
FB
5427 reg = ((modrm >> 3) & 7) | rex_r;
5428 mod = (modrm >> 6) & 3;
a7812ae4
PB
5429 t0 = tcg_temp_local_new();
5430 t1 = tcg_temp_local_new();
5431 t2 = tcg_temp_local_new();
5432 a0 = tcg_temp_local_new();
1e4840bf 5433 gen_op_mov_v_reg(ot, t1, reg);
cad3a37d
FB
5434 if (mod == 3) {
5435 rm = (modrm & 7) | REX_B(s);
1e4840bf 5436 gen_op_mov_v_reg(ot, t0, rm);
cad3a37d 5437 } else {
0af10c86 5438 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
1e4840bf
FB
5439 tcg_gen_mov_tl(a0, cpu_A0);
5440 gen_op_ld_v(ot + s->mem_index, t0, a0);
cad3a37d
FB
5441 rm = 0; /* avoid warning */
5442 }
5443 label1 = gen_new_label();
a3251186
RH
5444 tcg_gen_mov_tl(t2, cpu_regs[R_EAX]);
5445 gen_extu(ot, t0);
1e4840bf 5446 gen_extu(ot, t2);
a3251186 5447 tcg_gen_brcond_tl(TCG_COND_EQ, t2, t0, label1);
f7e80adf 5448 label2 = gen_new_label();
cad3a37d 5449 if (mod == 3) {
1e4840bf 5450 gen_op_mov_reg_v(ot, R_EAX, t0);
1130328e
FB
5451 tcg_gen_br(label2);
5452 gen_set_label(label1);
1e4840bf 5453 gen_op_mov_reg_v(ot, rm, t1);
cad3a37d 5454 } else {
f7e80adf
AG
5455 /* perform no-op store cycle like physical cpu; must be
5456 before changing accumulator to ensure idempotency if
5457 the store faults and the instruction is restarted */
5458 gen_op_st_v(ot + s->mem_index, t0, a0);
1e4840bf 5459 gen_op_mov_reg_v(ot, R_EAX, t0);
f7e80adf 5460 tcg_gen_br(label2);
1130328e 5461 gen_set_label(label1);
1e4840bf 5462 gen_op_st_v(ot + s->mem_index, t1, a0);
cad3a37d 5463 }
f7e80adf 5464 gen_set_label(label2);
1e4840bf 5465 tcg_gen_mov_tl(cpu_cc_src, t0);
a3251186
RH
5466 tcg_gen_mov_tl(cpu_cc_srcT, t2);
5467 tcg_gen_sub_tl(cpu_cc_dst, t2, t0);
3ca51d07 5468 set_cc_op(s, CC_OP_SUBB + ot);
1e4840bf
FB
5469 tcg_temp_free(t0);
5470 tcg_temp_free(t1);
5471 tcg_temp_free(t2);
5472 tcg_temp_free(a0);
2c0262af 5473 }
2c0262af
FB
5474 break;
5475 case 0x1c7: /* cmpxchg8b */
0af10c86 5476 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 5477 mod = (modrm >> 6) & 3;
71c3558e 5478 if ((mod == 3) || ((modrm & 0x38) != 0x8))
2c0262af 5479 goto illegal_op;
1b9d9ebb
FB
5480#ifdef TARGET_X86_64
5481 if (dflag == 2) {
5482 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5483 goto illegal_op;
5484 gen_jmp_im(pc_start - s->cs_base);
773cdfcc 5485 gen_update_cc_op(s);
0af10c86 5486 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
92fc4b58 5487 gen_helper_cmpxchg16b(cpu_env, cpu_A0);
1b9d9ebb
FB
5488 } else
5489#endif
5490 {
5491 if (!(s->cpuid_features & CPUID_CX8))
5492 goto illegal_op;
5493 gen_jmp_im(pc_start - s->cs_base);
773cdfcc 5494 gen_update_cc_op(s);
0af10c86 5495 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
92fc4b58 5496 gen_helper_cmpxchg8b(cpu_env, cpu_A0);
1b9d9ebb 5497 }
3ca51d07 5498 set_cc_op(s, CC_OP_EFLAGS);
2c0262af 5499 break;
3b46e624 5500
2c0262af
FB
5501 /**************************/
5502 /* push/pop */
5503 case 0x50 ... 0x57: /* push */
57fec1fe 5504 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
2c0262af
FB
5505 gen_push_T0(s);
5506 break;
5507 case 0x58 ... 0x5f: /* pop */
14ce26e7
FB
5508 if (CODE64(s)) {
5509 ot = dflag ? OT_QUAD : OT_WORD;
5510 } else {
5511 ot = dflag + OT_WORD;
5512 }
2c0262af 5513 gen_pop_T0(s);
77729c24 5514 /* NOTE: order is important for pop %sp */
2c0262af 5515 gen_pop_update(s);
57fec1fe 5516 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
2c0262af
FB
5517 break;
5518 case 0x60: /* pusha */
14ce26e7
FB
5519 if (CODE64(s))
5520 goto illegal_op;
2c0262af
FB
5521 gen_pusha(s);
5522 break;
5523 case 0x61: /* popa */
14ce26e7
FB
5524 if (CODE64(s))
5525 goto illegal_op;
2c0262af
FB
5526 gen_popa(s);
5527 break;
5528 case 0x68: /* push Iv */
5529 case 0x6a:
14ce26e7
FB
5530 if (CODE64(s)) {
5531 ot = dflag ? OT_QUAD : OT_WORD;
5532 } else {
5533 ot = dflag + OT_WORD;
5534 }
2c0262af 5535 if (b == 0x68)
0af10c86 5536 val = insn_get(env, s, ot);
2c0262af 5537 else
0af10c86 5538 val = (int8_t)insn_get(env, s, OT_BYTE);
2c0262af
FB
5539 gen_op_movl_T0_im(val);
5540 gen_push_T0(s);
5541 break;
5542 case 0x8f: /* pop Ev */
14ce26e7
FB
5543 if (CODE64(s)) {
5544 ot = dflag ? OT_QUAD : OT_WORD;
5545 } else {
5546 ot = dflag + OT_WORD;
5547 }
0af10c86 5548 modrm = cpu_ldub_code(env, s->pc++);
77729c24 5549 mod = (modrm >> 6) & 3;
2c0262af 5550 gen_pop_T0(s);
77729c24
FB
5551 if (mod == 3) {
5552 /* NOTE: order is important for pop %sp */
5553 gen_pop_update(s);
14ce26e7 5554 rm = (modrm & 7) | REX_B(s);
57fec1fe 5555 gen_op_mov_reg_T0(ot, rm);
77729c24
FB
5556 } else {
5557 /* NOTE: order is important too for MMU exceptions */
14ce26e7 5558 s->popl_esp_hack = 1 << ot;
0af10c86 5559 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
77729c24
FB
5560 s->popl_esp_hack = 0;
5561 gen_pop_update(s);
5562 }
2c0262af
FB
5563 break;
5564 case 0xc8: /* enter */
5565 {
5566 int level;
0af10c86 5567 val = cpu_lduw_code(env, s->pc);
2c0262af 5568 s->pc += 2;
0af10c86 5569 level = cpu_ldub_code(env, s->pc++);
2c0262af
FB
5570 gen_enter(s, val, level);
5571 }
5572 break;
5573 case 0xc9: /* leave */
5574 /* XXX: exception not precise (ESP is updated before potential exception) */
14ce26e7 5575 if (CODE64(s)) {
57fec1fe
FB
5576 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5577 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
14ce26e7 5578 } else if (s->ss32) {
57fec1fe
FB
5579 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5580 gen_op_mov_reg_T0(OT_LONG, R_ESP);
2c0262af 5581 } else {
57fec1fe
FB
5582 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5583 gen_op_mov_reg_T0(OT_WORD, R_ESP);
2c0262af
FB
5584 }
5585 gen_pop_T0(s);
14ce26e7
FB
5586 if (CODE64(s)) {
5587 ot = dflag ? OT_QUAD : OT_WORD;
5588 } else {
5589 ot = dflag + OT_WORD;
5590 }
57fec1fe 5591 gen_op_mov_reg_T0(ot, R_EBP);
2c0262af
FB
5592 gen_pop_update(s);
5593 break;
5594 case 0x06: /* push es */
5595 case 0x0e: /* push cs */
5596 case 0x16: /* push ss */
5597 case 0x1e: /* push ds */
14ce26e7
FB
5598 if (CODE64(s))
5599 goto illegal_op;
2c0262af
FB
5600 gen_op_movl_T0_seg(b >> 3);
5601 gen_push_T0(s);
5602 break;
5603 case 0x1a0: /* push fs */
5604 case 0x1a8: /* push gs */
5605 gen_op_movl_T0_seg((b >> 3) & 7);
5606 gen_push_T0(s);
5607 break;
5608 case 0x07: /* pop es */
5609 case 0x17: /* pop ss */
5610 case 0x1f: /* pop ds */
14ce26e7
FB
5611 if (CODE64(s))
5612 goto illegal_op;
2c0262af
FB
5613 reg = b >> 3;
5614 gen_pop_T0(s);
5615 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5616 gen_pop_update(s);
5617 if (reg == R_SS) {
a2cc3b24
FB
5618 /* if reg == SS, inhibit interrupts/trace. */
5619 /* If several instructions disable interrupts, only the
5620 _first_ does it */
5621 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
f0967a1a 5622 gen_helper_set_inhibit_irq(cpu_env);
2c0262af
FB
5623 s->tf = 0;
5624 }
5625 if (s->is_jmp) {
14ce26e7 5626 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
5627 gen_eob(s);
5628 }
5629 break;
5630 case 0x1a1: /* pop fs */
5631 case 0x1a9: /* pop gs */
5632 gen_pop_T0(s);
5633 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5634 gen_pop_update(s);
5635 if (s->is_jmp) {
14ce26e7 5636 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
5637 gen_eob(s);
5638 }
5639 break;
5640
5641 /**************************/
5642 /* mov */
5643 case 0x88:
5644 case 0x89: /* mov Gv, Ev */
5645 if ((b & 1) == 0)
5646 ot = OT_BYTE;
5647 else
14ce26e7 5648 ot = dflag + OT_WORD;
0af10c86 5649 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5650 reg = ((modrm >> 3) & 7) | rex_r;
3b46e624 5651
2c0262af 5652 /* generate a generic store */
0af10c86 5653 gen_ldst_modrm(env, s, modrm, ot, reg, 1);
2c0262af
FB
5654 break;
5655 case 0xc6:
5656 case 0xc7: /* mov Ev, Iv */
5657 if ((b & 1) == 0)
5658 ot = OT_BYTE;
5659 else
14ce26e7 5660 ot = dflag + OT_WORD;
0af10c86 5661 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 5662 mod = (modrm >> 6) & 3;
14ce26e7
FB
5663 if (mod != 3) {
5664 s->rip_offset = insn_const_size(ot);
0af10c86 5665 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
14ce26e7 5666 }
0af10c86 5667 val = insn_get(env, s, ot);
2c0262af
FB
5668 gen_op_movl_T0_im(val);
5669 if (mod != 3)
57fec1fe 5670 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 5671 else
57fec1fe 5672 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
2c0262af
FB
5673 break;
5674 case 0x8a:
5675 case 0x8b: /* mov Ev, Gv */
5676 if ((b & 1) == 0)
5677 ot = OT_BYTE;
5678 else
14ce26e7 5679 ot = OT_WORD + dflag;
0af10c86 5680 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5681 reg = ((modrm >> 3) & 7) | rex_r;
3b46e624 5682
0af10c86 5683 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
57fec1fe 5684 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
5685 break;
5686 case 0x8e: /* mov seg, Gv */
0af10c86 5687 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
5688 reg = (modrm >> 3) & 7;
5689 if (reg >= 6 || reg == R_CS)
5690 goto illegal_op;
0af10c86 5691 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
2c0262af
FB
5692 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5693 if (reg == R_SS) {
5694 /* if reg == SS, inhibit interrupts/trace */
a2cc3b24
FB
5695 /* If several instructions disable interrupts, only the
5696 _first_ does it */
5697 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
f0967a1a 5698 gen_helper_set_inhibit_irq(cpu_env);
2c0262af
FB
5699 s->tf = 0;
5700 }
5701 if (s->is_jmp) {
14ce26e7 5702 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
5703 gen_eob(s);
5704 }
5705 break;
5706 case 0x8c: /* mov Gv, seg */
0af10c86 5707 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
5708 reg = (modrm >> 3) & 7;
5709 mod = (modrm >> 6) & 3;
5710 if (reg >= 6)
5711 goto illegal_op;
5712 gen_op_movl_T0_seg(reg);
14ce26e7
FB
5713 if (mod == 3)
5714 ot = OT_WORD + dflag;
5715 else
5716 ot = OT_WORD;
0af10c86 5717 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
2c0262af
FB
5718 break;
5719
5720 case 0x1b6: /* movzbS Gv, Eb */
5721 case 0x1b7: /* movzwS Gv, Eb */
5722 case 0x1be: /* movsbS Gv, Eb */
5723 case 0x1bf: /* movswS Gv, Eb */
5724 {
5725 int d_ot;
5726 /* d_ot is the size of destination */
5727 d_ot = dflag + OT_WORD;
5728 /* ot is the size of source */
5729 ot = (b & 1) + OT_BYTE;
0af10c86 5730 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5731 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af 5732 mod = (modrm >> 6) & 3;
14ce26e7 5733 rm = (modrm & 7) | REX_B(s);
3b46e624 5734
2c0262af 5735 if (mod == 3) {
57fec1fe 5736 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af
FB
5737 switch(ot | (b & 8)) {
5738 case OT_BYTE:
e108dd01 5739 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
2c0262af
FB
5740 break;
5741 case OT_BYTE | 8:
e108dd01 5742 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
2c0262af
FB
5743 break;
5744 case OT_WORD:
e108dd01 5745 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
2c0262af
FB
5746 break;
5747 default:
5748 case OT_WORD | 8:
e108dd01 5749 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
2c0262af
FB
5750 break;
5751 }
57fec1fe 5752 gen_op_mov_reg_T0(d_ot, reg);
2c0262af 5753 } else {
0af10c86 5754 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af 5755 if (b & 8) {
57fec1fe 5756 gen_op_lds_T0_A0(ot + s->mem_index);
2c0262af 5757 } else {
57fec1fe 5758 gen_op_ldu_T0_A0(ot + s->mem_index);
2c0262af 5759 }
57fec1fe 5760 gen_op_mov_reg_T0(d_ot, reg);
2c0262af
FB
5761 }
5762 }
5763 break;
5764
5765 case 0x8d: /* lea */
14ce26e7 5766 ot = dflag + OT_WORD;
0af10c86 5767 modrm = cpu_ldub_code(env, s->pc++);
3a1d9b8b
FB
5768 mod = (modrm >> 6) & 3;
5769 if (mod == 3)
5770 goto illegal_op;
14ce26e7 5771 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af
FB
5772 /* we must ensure that no segment is added */
5773 s->override = -1;
5774 val = s->addseg;
5775 s->addseg = 0;
0af10c86 5776 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af 5777 s->addseg = val;
57fec1fe 5778 gen_op_mov_reg_A0(ot - OT_WORD, reg);
2c0262af 5779 break;
3b46e624 5780
2c0262af
FB
5781 case 0xa0: /* mov EAX, Ov */
5782 case 0xa1:
5783 case 0xa2: /* mov Ov, EAX */
5784 case 0xa3:
2c0262af 5785 {
14ce26e7
FB
5786 target_ulong offset_addr;
5787
5788 if ((b & 1) == 0)
5789 ot = OT_BYTE;
5790 else
5791 ot = dflag + OT_WORD;
5792#ifdef TARGET_X86_64
8f091a59 5793 if (s->aflag == 2) {
0af10c86 5794 offset_addr = cpu_ldq_code(env, s->pc);
14ce26e7 5795 s->pc += 8;
57fec1fe 5796 gen_op_movq_A0_im(offset_addr);
5fafdf24 5797 } else
14ce26e7
FB
5798#endif
5799 {
5800 if (s->aflag) {
0af10c86 5801 offset_addr = insn_get(env, s, OT_LONG);
14ce26e7 5802 } else {
0af10c86 5803 offset_addr = insn_get(env, s, OT_WORD);
14ce26e7
FB
5804 }
5805 gen_op_movl_A0_im(offset_addr);
5806 }
664e0f19 5807 gen_add_A0_ds_seg(s);
14ce26e7 5808 if ((b & 2) == 0) {
57fec1fe
FB
5809 gen_op_ld_T0_A0(ot + s->mem_index);
5810 gen_op_mov_reg_T0(ot, R_EAX);
14ce26e7 5811 } else {
57fec1fe
FB
5812 gen_op_mov_TN_reg(ot, 0, R_EAX);
5813 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af
FB
5814 }
5815 }
2c0262af
FB
5816 break;
5817 case 0xd7: /* xlat */
14ce26e7 5818#ifdef TARGET_X86_64
8f091a59 5819 if (s->aflag == 2) {
57fec1fe 5820 gen_op_movq_A0_reg(R_EBX);
bbf662ee
FB
5821 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5822 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5823 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5fafdf24 5824 } else
14ce26e7
FB
5825#endif
5826 {
57fec1fe 5827 gen_op_movl_A0_reg(R_EBX);
bbf662ee
FB
5828 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5829 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5830 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
14ce26e7
FB
5831 if (s->aflag == 0)
5832 gen_op_andl_A0_ffff();
bbf662ee
FB
5833 else
5834 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
14ce26e7 5835 }
664e0f19 5836 gen_add_A0_ds_seg(s);
57fec1fe
FB
5837 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5838 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
2c0262af
FB
5839 break;
5840 case 0xb0 ... 0xb7: /* mov R, Ib */
0af10c86 5841 val = insn_get(env, s, OT_BYTE);
2c0262af 5842 gen_op_movl_T0_im(val);
57fec1fe 5843 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
2c0262af
FB
5844 break;
5845 case 0xb8 ... 0xbf: /* mov R, Iv */
14ce26e7
FB
5846#ifdef TARGET_X86_64
5847 if (dflag == 2) {
5848 uint64_t tmp;
5849 /* 64 bit case */
0af10c86 5850 tmp = cpu_ldq_code(env, s->pc);
14ce26e7
FB
5851 s->pc += 8;
5852 reg = (b & 7) | REX_B(s);
5853 gen_movtl_T0_im(tmp);
57fec1fe 5854 gen_op_mov_reg_T0(OT_QUAD, reg);
5fafdf24 5855 } else
14ce26e7
FB
5856#endif
5857 {
5858 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 5859 val = insn_get(env, s, ot);
14ce26e7
FB
5860 reg = (b & 7) | REX_B(s);
5861 gen_op_movl_T0_im(val);
57fec1fe 5862 gen_op_mov_reg_T0(ot, reg);
14ce26e7 5863 }
2c0262af
FB
5864 break;
5865
5866 case 0x91 ... 0x97: /* xchg R, EAX */
7418027e 5867 do_xchg_reg_eax:
14ce26e7
FB
5868 ot = dflag + OT_WORD;
5869 reg = (b & 7) | REX_B(s);
2c0262af
FB
5870 rm = R_EAX;
5871 goto do_xchg_reg;
5872 case 0x86:
5873 case 0x87: /* xchg Ev, Gv */
5874 if ((b & 1) == 0)
5875 ot = OT_BYTE;
5876 else
14ce26e7 5877 ot = dflag + OT_WORD;
0af10c86 5878 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5879 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af
FB
5880 mod = (modrm >> 6) & 3;
5881 if (mod == 3) {
14ce26e7 5882 rm = (modrm & 7) | REX_B(s);
2c0262af 5883 do_xchg_reg:
57fec1fe
FB
5884 gen_op_mov_TN_reg(ot, 0, reg);
5885 gen_op_mov_TN_reg(ot, 1, rm);
5886 gen_op_mov_reg_T0(ot, rm);
5887 gen_op_mov_reg_T1(ot, reg);
2c0262af 5888 } else {
0af10c86 5889 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 5890 gen_op_mov_TN_reg(ot, 0, reg);
2c0262af
FB
5891 /* for xchg, lock is implicit */
5892 if (!(prefixes & PREFIX_LOCK))
a7812ae4 5893 gen_helper_lock();
57fec1fe
FB
5894 gen_op_ld_T1_A0(ot + s->mem_index);
5895 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 5896 if (!(prefixes & PREFIX_LOCK))
a7812ae4 5897 gen_helper_unlock();
57fec1fe 5898 gen_op_mov_reg_T1(ot, reg);
2c0262af
FB
5899 }
5900 break;
5901 case 0xc4: /* les Gv */
701ed211 5902 /* In CODE64 this is VEX3; see above. */
2c0262af
FB
5903 op = R_ES;
5904 goto do_lxx;
5905 case 0xc5: /* lds Gv */
701ed211 5906 /* In CODE64 this is VEX2; see above. */
2c0262af
FB
5907 op = R_DS;
5908 goto do_lxx;
5909 case 0x1b2: /* lss Gv */
5910 op = R_SS;
5911 goto do_lxx;
5912 case 0x1b4: /* lfs Gv */
5913 op = R_FS;
5914 goto do_lxx;
5915 case 0x1b5: /* lgs Gv */
5916 op = R_GS;
5917 do_lxx:
5918 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 5919 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5920 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af
FB
5921 mod = (modrm >> 6) & 3;
5922 if (mod == 3)
5923 goto illegal_op;
0af10c86 5924 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 5925 gen_op_ld_T1_A0(ot + s->mem_index);
aba9d61e 5926 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
2c0262af 5927 /* load the segment first to handle exceptions properly */
57fec1fe 5928 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
5929 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5930 /* then put the data */
57fec1fe 5931 gen_op_mov_reg_T1(ot, reg);
2c0262af 5932 if (s->is_jmp) {
14ce26e7 5933 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
5934 gen_eob(s);
5935 }
5936 break;
3b46e624 5937
2c0262af
FB
5938 /************************/
5939 /* shifts */
5940 case 0xc0:
5941 case 0xc1:
5942 /* shift Ev,Ib */
5943 shift = 2;
5944 grp2:
5945 {
5946 if ((b & 1) == 0)
5947 ot = OT_BYTE;
5948 else
14ce26e7 5949 ot = dflag + OT_WORD;
3b46e624 5950
0af10c86 5951 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 5952 mod = (modrm >> 6) & 3;
2c0262af 5953 op = (modrm >> 3) & 7;
3b46e624 5954
2c0262af 5955 if (mod != 3) {
14ce26e7
FB
5956 if (shift == 2) {
5957 s->rip_offset = 1;
5958 }
0af10c86 5959 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
5960 opreg = OR_TMP0;
5961 } else {
14ce26e7 5962 opreg = (modrm & 7) | REX_B(s);
2c0262af
FB
5963 }
5964
5965 /* simpler op */
5966 if (shift == 0) {
5967 gen_shift(s, op, ot, opreg, OR_ECX);
5968 } else {
5969 if (shift == 2) {
0af10c86 5970 shift = cpu_ldub_code(env, s->pc++);
2c0262af
FB
5971 }
5972 gen_shifti(s, op, ot, opreg, shift);
5973 }
5974 }
5975 break;
5976 case 0xd0:
5977 case 0xd1:
5978 /* shift Ev,1 */
5979 shift = 1;
5980 goto grp2;
5981 case 0xd2:
5982 case 0xd3:
5983 /* shift Ev,cl */
5984 shift = 0;
5985 goto grp2;
5986
5987 case 0x1a4: /* shld imm */
5988 op = 0;
5989 shift = 1;
5990 goto do_shiftd;
5991 case 0x1a5: /* shld cl */
5992 op = 0;
5993 shift = 0;
5994 goto do_shiftd;
5995 case 0x1ac: /* shrd imm */
5996 op = 1;
5997 shift = 1;
5998 goto do_shiftd;
5999 case 0x1ad: /* shrd cl */
6000 op = 1;
6001 shift = 0;
6002 do_shiftd:
14ce26e7 6003 ot = dflag + OT_WORD;
0af10c86 6004 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 6005 mod = (modrm >> 6) & 3;
14ce26e7
FB
6006 rm = (modrm & 7) | REX_B(s);
6007 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af 6008 if (mod != 3) {
0af10c86 6009 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
b6abf97d 6010 opreg = OR_TMP0;
2c0262af 6011 } else {
b6abf97d 6012 opreg = rm;
2c0262af 6013 }
57fec1fe 6014 gen_op_mov_TN_reg(ot, 1, reg);
3b46e624 6015
2c0262af 6016 if (shift) {
3b9d3cf1
PB
6017 TCGv imm = tcg_const_tl(cpu_ldub_code(env, s->pc++));
6018 gen_shiftd_rm_T1(s, ot, opreg, op, imm);
6019 tcg_temp_free(imm);
2c0262af 6020 } else {
3b9d3cf1 6021 gen_shiftd_rm_T1(s, ot, opreg, op, cpu_regs[R_ECX]);
2c0262af
FB
6022 }
6023 break;
6024
6025 /************************/
6026 /* floats */
5fafdf24 6027 case 0xd8 ... 0xdf:
7eee2a50
FB
6028 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6029 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6030 /* XXX: what to do if illegal op ? */
6031 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6032 break;
6033 }
0af10c86 6034 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
6035 mod = (modrm >> 6) & 3;
6036 rm = modrm & 7;
6037 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
2c0262af
FB
6038 if (mod != 3) {
6039 /* memory op */
0af10c86 6040 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
6041 switch(op) {
6042 case 0x00 ... 0x07: /* fxxxs */
6043 case 0x10 ... 0x17: /* fixxxl */
6044 case 0x20 ... 0x27: /* fxxxl */
6045 case 0x30 ... 0x37: /* fixxx */
6046 {
6047 int op1;
6048 op1 = op & 7;
6049
6050 switch(op >> 4) {
6051 case 0:
ba7cd150 6052 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
b6abf97d 6053 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6054 gen_helper_flds_FT0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6055 break;
6056 case 1:
ba7cd150 6057 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
b6abf97d 6058 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6059 gen_helper_fildl_FT0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6060 break;
6061 case 2:
b6abf97d 6062 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6063 (s->mem_index >> 2) - 1);
d3eb5eae 6064 gen_helper_fldl_FT0(cpu_env, cpu_tmp1_i64);
2c0262af
FB
6065 break;
6066 case 3:
6067 default:
ba7cd150 6068 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
b6abf97d 6069 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6070 gen_helper_fildl_FT0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6071 break;
6072 }
3b46e624 6073
a7812ae4 6074 gen_helper_fp_arith_ST0_FT0(op1);
2c0262af
FB
6075 if (op1 == 3) {
6076 /* fcomp needs pop */
d3eb5eae 6077 gen_helper_fpop(cpu_env);
2c0262af
FB
6078 }
6079 }
6080 break;
6081 case 0x08: /* flds */
6082 case 0x0a: /* fsts */
6083 case 0x0b: /* fstps */
465e9838
FB
6084 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6085 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6086 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
2c0262af
FB
6087 switch(op & 7) {
6088 case 0:
6089 switch(op >> 4) {
6090 case 0:
ba7cd150 6091 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
b6abf97d 6092 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6093 gen_helper_flds_ST0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6094 break;
6095 case 1:
ba7cd150 6096 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
b6abf97d 6097 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6098 gen_helper_fildl_ST0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6099 break;
6100 case 2:
b6abf97d 6101 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6102 (s->mem_index >> 2) - 1);
d3eb5eae 6103 gen_helper_fldl_ST0(cpu_env, cpu_tmp1_i64);
2c0262af
FB
6104 break;
6105 case 3:
6106 default:
ba7cd150 6107 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
b6abf97d 6108 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6109 gen_helper_fildl_ST0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6110 break;
6111 }
6112 break;
465e9838 6113 case 1:
19e6c4b8 6114 /* XXX: the corresponding CPUID bit must be tested ! */
465e9838
FB
6115 switch(op >> 4) {
6116 case 1:
d3eb5eae 6117 gen_helper_fisttl_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6118 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6119 gen_op_st_T0_A0(OT_LONG + s->mem_index);
465e9838
FB
6120 break;
6121 case 2:
d3eb5eae 6122 gen_helper_fisttll_ST0(cpu_tmp1_i64, cpu_env);
b6abf97d 6123 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6124 (s->mem_index >> 2) - 1);
465e9838
FB
6125 break;
6126 case 3:
6127 default:
d3eb5eae 6128 gen_helper_fistt_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6129 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6130 gen_op_st_T0_A0(OT_WORD + s->mem_index);
19e6c4b8 6131 break;
465e9838 6132 }
d3eb5eae 6133 gen_helper_fpop(cpu_env);
465e9838 6134 break;
2c0262af
FB
6135 default:
6136 switch(op >> 4) {
6137 case 0:
d3eb5eae 6138 gen_helper_fsts_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6139 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6140 gen_op_st_T0_A0(OT_LONG + s->mem_index);
2c0262af
FB
6141 break;
6142 case 1:
d3eb5eae 6143 gen_helper_fistl_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6144 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6145 gen_op_st_T0_A0(OT_LONG + s->mem_index);
2c0262af
FB
6146 break;
6147 case 2:
d3eb5eae 6148 gen_helper_fstl_ST0(cpu_tmp1_i64, cpu_env);
b6abf97d 6149 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6150 (s->mem_index >> 2) - 1);
2c0262af
FB
6151 break;
6152 case 3:
6153 default:
d3eb5eae 6154 gen_helper_fist_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6155 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6156 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
6157 break;
6158 }
6159 if ((op & 7) == 3)
d3eb5eae 6160 gen_helper_fpop(cpu_env);
2c0262af
FB
6161 break;
6162 }
6163 break;
6164 case 0x0c: /* fldenv mem */
773cdfcc 6165 gen_update_cc_op(s);
19e6c4b8 6166 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6167 gen_helper_fldenv(cpu_env, cpu_A0, tcg_const_i32(s->dflag));
2c0262af
FB
6168 break;
6169 case 0x0d: /* fldcw mem */
19e6c4b8 6170 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
b6abf97d 6171 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6172 gen_helper_fldcw(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6173 break;
6174 case 0x0e: /* fnstenv mem */
773cdfcc 6175 gen_update_cc_op(s);
19e6c4b8 6176 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6177 gen_helper_fstenv(cpu_env, cpu_A0, tcg_const_i32(s->dflag));
2c0262af
FB
6178 break;
6179 case 0x0f: /* fnstcw mem */
d3eb5eae 6180 gen_helper_fnstcw(cpu_tmp2_i32, cpu_env);
b6abf97d 6181 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
19e6c4b8 6182 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
6183 break;
6184 case 0x1d: /* fldt mem */
773cdfcc 6185 gen_update_cc_op(s);
19e6c4b8 6186 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6187 gen_helper_fldt_ST0(cpu_env, cpu_A0);
2c0262af
FB
6188 break;
6189 case 0x1f: /* fstpt mem */
773cdfcc 6190 gen_update_cc_op(s);
19e6c4b8 6191 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae
BS
6192 gen_helper_fstt_ST0(cpu_env, cpu_A0);
6193 gen_helper_fpop(cpu_env);
2c0262af
FB
6194 break;
6195 case 0x2c: /* frstor mem */
773cdfcc 6196 gen_update_cc_op(s);
19e6c4b8 6197 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6198 gen_helper_frstor(cpu_env, cpu_A0, tcg_const_i32(s->dflag));
2c0262af
FB
6199 break;
6200 case 0x2e: /* fnsave mem */
773cdfcc 6201 gen_update_cc_op(s);
19e6c4b8 6202 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6203 gen_helper_fsave(cpu_env, cpu_A0, tcg_const_i32(s->dflag));
2c0262af
FB
6204 break;
6205 case 0x2f: /* fnstsw mem */
d3eb5eae 6206 gen_helper_fnstsw(cpu_tmp2_i32, cpu_env);
b6abf97d 6207 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
19e6c4b8 6208 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
6209 break;
6210 case 0x3c: /* fbld */
773cdfcc 6211 gen_update_cc_op(s);
19e6c4b8 6212 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6213 gen_helper_fbld_ST0(cpu_env, cpu_A0);
2c0262af
FB
6214 break;
6215 case 0x3e: /* fbstp */
773cdfcc 6216 gen_update_cc_op(s);
19e6c4b8 6217 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae
BS
6218 gen_helper_fbst_ST0(cpu_env, cpu_A0);
6219 gen_helper_fpop(cpu_env);
2c0262af
FB
6220 break;
6221 case 0x3d: /* fildll */
b6abf97d 6222 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6223 (s->mem_index >> 2) - 1);
d3eb5eae 6224 gen_helper_fildll_ST0(cpu_env, cpu_tmp1_i64);
2c0262af
FB
6225 break;
6226 case 0x3f: /* fistpll */
d3eb5eae 6227 gen_helper_fistll_ST0(cpu_tmp1_i64, cpu_env);
b6abf97d 6228 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6229 (s->mem_index >> 2) - 1);
d3eb5eae 6230 gen_helper_fpop(cpu_env);
2c0262af
FB
6231 break;
6232 default:
6233 goto illegal_op;
6234 }
6235 } else {
6236 /* register float ops */
6237 opreg = rm;
6238
6239 switch(op) {
6240 case 0x08: /* fld sti */
d3eb5eae
BS
6241 gen_helper_fpush(cpu_env);
6242 gen_helper_fmov_ST0_STN(cpu_env,
6243 tcg_const_i32((opreg + 1) & 7));
2c0262af
FB
6244 break;
6245 case 0x09: /* fxchg sti */
c169c906
FB
6246 case 0x29: /* fxchg4 sti, undocumented op */
6247 case 0x39: /* fxchg7 sti, undocumented op */
d3eb5eae 6248 gen_helper_fxchg_ST0_STN(cpu_env, tcg_const_i32(opreg));
2c0262af
FB
6249 break;
6250 case 0x0a: /* grp d9/2 */
6251 switch(rm) {
6252 case 0: /* fnop */
023fe10d 6253 /* check exceptions (FreeBSD FPU probe) */
773cdfcc 6254 gen_update_cc_op(s);
14ce26e7 6255 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6256 gen_helper_fwait(cpu_env);
2c0262af
FB
6257 break;
6258 default:
6259 goto illegal_op;
6260 }
6261 break;
6262 case 0x0c: /* grp d9/4 */
6263 switch(rm) {
6264 case 0: /* fchs */
d3eb5eae 6265 gen_helper_fchs_ST0(cpu_env);
2c0262af
FB
6266 break;
6267 case 1: /* fabs */
d3eb5eae 6268 gen_helper_fabs_ST0(cpu_env);
2c0262af
FB
6269 break;
6270 case 4: /* ftst */
d3eb5eae
BS
6271 gen_helper_fldz_FT0(cpu_env);
6272 gen_helper_fcom_ST0_FT0(cpu_env);
2c0262af
FB
6273 break;
6274 case 5: /* fxam */
d3eb5eae 6275 gen_helper_fxam_ST0(cpu_env);
2c0262af
FB
6276 break;
6277 default:
6278 goto illegal_op;
6279 }
6280 break;
6281 case 0x0d: /* grp d9/5 */
6282 {
6283 switch(rm) {
6284 case 0:
d3eb5eae
BS
6285 gen_helper_fpush(cpu_env);
6286 gen_helper_fld1_ST0(cpu_env);
2c0262af
FB
6287 break;
6288 case 1:
d3eb5eae
BS
6289 gen_helper_fpush(cpu_env);
6290 gen_helper_fldl2t_ST0(cpu_env);
2c0262af
FB
6291 break;
6292 case 2:
d3eb5eae
BS
6293 gen_helper_fpush(cpu_env);
6294 gen_helper_fldl2e_ST0(cpu_env);
2c0262af
FB
6295 break;
6296 case 3:
d3eb5eae
BS
6297 gen_helper_fpush(cpu_env);
6298 gen_helper_fldpi_ST0(cpu_env);
2c0262af
FB
6299 break;
6300 case 4:
d3eb5eae
BS
6301 gen_helper_fpush(cpu_env);
6302 gen_helper_fldlg2_ST0(cpu_env);
2c0262af
FB
6303 break;
6304 case 5:
d3eb5eae
BS
6305 gen_helper_fpush(cpu_env);
6306 gen_helper_fldln2_ST0(cpu_env);
2c0262af
FB
6307 break;
6308 case 6:
d3eb5eae
BS
6309 gen_helper_fpush(cpu_env);
6310 gen_helper_fldz_ST0(cpu_env);
2c0262af
FB
6311 break;
6312 default:
6313 goto illegal_op;
6314 }
6315 }
6316 break;
6317 case 0x0e: /* grp d9/6 */
6318 switch(rm) {
6319 case 0: /* f2xm1 */
d3eb5eae 6320 gen_helper_f2xm1(cpu_env);
2c0262af
FB
6321 break;
6322 case 1: /* fyl2x */
d3eb5eae 6323 gen_helper_fyl2x(cpu_env);
2c0262af
FB
6324 break;
6325 case 2: /* fptan */
d3eb5eae 6326 gen_helper_fptan(cpu_env);
2c0262af
FB
6327 break;
6328 case 3: /* fpatan */
d3eb5eae 6329 gen_helper_fpatan(cpu_env);
2c0262af
FB
6330 break;
6331 case 4: /* fxtract */
d3eb5eae 6332 gen_helper_fxtract(cpu_env);
2c0262af
FB
6333 break;
6334 case 5: /* fprem1 */
d3eb5eae 6335 gen_helper_fprem1(cpu_env);
2c0262af
FB
6336 break;
6337 case 6: /* fdecstp */
d3eb5eae 6338 gen_helper_fdecstp(cpu_env);
2c0262af
FB
6339 break;
6340 default:
6341 case 7: /* fincstp */
d3eb5eae 6342 gen_helper_fincstp(cpu_env);
2c0262af
FB
6343 break;
6344 }
6345 break;
6346 case 0x0f: /* grp d9/7 */
6347 switch(rm) {
6348 case 0: /* fprem */
d3eb5eae 6349 gen_helper_fprem(cpu_env);
2c0262af
FB
6350 break;
6351 case 1: /* fyl2xp1 */
d3eb5eae 6352 gen_helper_fyl2xp1(cpu_env);
2c0262af
FB
6353 break;
6354 case 2: /* fsqrt */
d3eb5eae 6355 gen_helper_fsqrt(cpu_env);
2c0262af
FB
6356 break;
6357 case 3: /* fsincos */
d3eb5eae 6358 gen_helper_fsincos(cpu_env);
2c0262af
FB
6359 break;
6360 case 5: /* fscale */
d3eb5eae 6361 gen_helper_fscale(cpu_env);
2c0262af
FB
6362 break;
6363 case 4: /* frndint */
d3eb5eae 6364 gen_helper_frndint(cpu_env);
2c0262af
FB
6365 break;
6366 case 6: /* fsin */
d3eb5eae 6367 gen_helper_fsin(cpu_env);
2c0262af
FB
6368 break;
6369 default:
6370 case 7: /* fcos */
d3eb5eae 6371 gen_helper_fcos(cpu_env);
2c0262af
FB
6372 break;
6373 }
6374 break;
6375 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6376 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6377 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6378 {
6379 int op1;
3b46e624 6380
2c0262af
FB
6381 op1 = op & 7;
6382 if (op >= 0x20) {
a7812ae4 6383 gen_helper_fp_arith_STN_ST0(op1, opreg);
2c0262af 6384 if (op >= 0x30)
d3eb5eae 6385 gen_helper_fpop(cpu_env);
2c0262af 6386 } else {
d3eb5eae 6387 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
a7812ae4 6388 gen_helper_fp_arith_ST0_FT0(op1);
2c0262af
FB
6389 }
6390 }
6391 break;
6392 case 0x02: /* fcom */
c169c906 6393 case 0x22: /* fcom2, undocumented op */
d3eb5eae
BS
6394 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6395 gen_helper_fcom_ST0_FT0(cpu_env);
2c0262af
FB
6396 break;
6397 case 0x03: /* fcomp */
c169c906
FB
6398 case 0x23: /* fcomp3, undocumented op */
6399 case 0x32: /* fcomp5, undocumented op */
d3eb5eae
BS
6400 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6401 gen_helper_fcom_ST0_FT0(cpu_env);
6402 gen_helper_fpop(cpu_env);
2c0262af
FB
6403 break;
6404 case 0x15: /* da/5 */
6405 switch(rm) {
6406 case 1: /* fucompp */
d3eb5eae
BS
6407 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(1));
6408 gen_helper_fucom_ST0_FT0(cpu_env);
6409 gen_helper_fpop(cpu_env);
6410 gen_helper_fpop(cpu_env);
2c0262af
FB
6411 break;
6412 default:
6413 goto illegal_op;
6414 }
6415 break;
6416 case 0x1c:
6417 switch(rm) {
6418 case 0: /* feni (287 only, just do nop here) */
6419 break;
6420 case 1: /* fdisi (287 only, just do nop here) */
6421 break;
6422 case 2: /* fclex */
d3eb5eae 6423 gen_helper_fclex(cpu_env);
2c0262af
FB
6424 break;
6425 case 3: /* fninit */
d3eb5eae 6426 gen_helper_fninit(cpu_env);
2c0262af
FB
6427 break;
6428 case 4: /* fsetpm (287 only, just do nop here) */
6429 break;
6430 default:
6431 goto illegal_op;
6432 }
6433 break;
6434 case 0x1d: /* fucomi */
773cdfcc 6435 gen_update_cc_op(s);
d3eb5eae
BS
6436 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6437 gen_helper_fucomi_ST0_FT0(cpu_env);
3ca51d07 6438 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
6439 break;
6440 case 0x1e: /* fcomi */
773cdfcc 6441 gen_update_cc_op(s);
d3eb5eae
BS
6442 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6443 gen_helper_fcomi_ST0_FT0(cpu_env);
3ca51d07 6444 set_cc_op(s, CC_OP_EFLAGS);
2c0262af 6445 break;
658c8bda 6446 case 0x28: /* ffree sti */
d3eb5eae 6447 gen_helper_ffree_STN(cpu_env, tcg_const_i32(opreg));
5fafdf24 6448 break;
2c0262af 6449 case 0x2a: /* fst sti */
d3eb5eae 6450 gen_helper_fmov_STN_ST0(cpu_env, tcg_const_i32(opreg));
2c0262af
FB
6451 break;
6452 case 0x2b: /* fstp sti */
c169c906
FB
6453 case 0x0b: /* fstp1 sti, undocumented op */
6454 case 0x3a: /* fstp8 sti, undocumented op */
6455 case 0x3b: /* fstp9 sti, undocumented op */
d3eb5eae
BS
6456 gen_helper_fmov_STN_ST0(cpu_env, tcg_const_i32(opreg));
6457 gen_helper_fpop(cpu_env);
2c0262af
FB
6458 break;
6459 case 0x2c: /* fucom st(i) */
d3eb5eae
BS
6460 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6461 gen_helper_fucom_ST0_FT0(cpu_env);
2c0262af
FB
6462 break;
6463 case 0x2d: /* fucomp st(i) */
d3eb5eae
BS
6464 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6465 gen_helper_fucom_ST0_FT0(cpu_env);
6466 gen_helper_fpop(cpu_env);
2c0262af
FB
6467 break;
6468 case 0x33: /* de/3 */
6469 switch(rm) {
6470 case 1: /* fcompp */
d3eb5eae
BS
6471 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(1));
6472 gen_helper_fcom_ST0_FT0(cpu_env);
6473 gen_helper_fpop(cpu_env);
6474 gen_helper_fpop(cpu_env);
2c0262af
FB
6475 break;
6476 default:
6477 goto illegal_op;
6478 }
6479 break;
c169c906 6480 case 0x38: /* ffreep sti, undocumented op */
d3eb5eae
BS
6481 gen_helper_ffree_STN(cpu_env, tcg_const_i32(opreg));
6482 gen_helper_fpop(cpu_env);
c169c906 6483 break;
2c0262af
FB
6484 case 0x3c: /* df/4 */
6485 switch(rm) {
6486 case 0:
d3eb5eae 6487 gen_helper_fnstsw(cpu_tmp2_i32, cpu_env);
b6abf97d 6488 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
19e6c4b8 6489 gen_op_mov_reg_T0(OT_WORD, R_EAX);
2c0262af
FB
6490 break;
6491 default:
6492 goto illegal_op;
6493 }
6494 break;
6495 case 0x3d: /* fucomip */
773cdfcc 6496 gen_update_cc_op(s);
d3eb5eae
BS
6497 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6498 gen_helper_fucomi_ST0_FT0(cpu_env);
6499 gen_helper_fpop(cpu_env);
3ca51d07 6500 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
6501 break;
6502 case 0x3e: /* fcomip */
773cdfcc 6503 gen_update_cc_op(s);
d3eb5eae
BS
6504 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6505 gen_helper_fcomi_ST0_FT0(cpu_env);
6506 gen_helper_fpop(cpu_env);
3ca51d07 6507 set_cc_op(s, CC_OP_EFLAGS);
2c0262af 6508 break;
a2cc3b24
FB
6509 case 0x10 ... 0x13: /* fcmovxx */
6510 case 0x18 ... 0x1b:
6511 {
19e6c4b8 6512 int op1, l1;
d70040bc 6513 static const uint8_t fcmov_cc[8] = {
a2cc3b24
FB
6514 (JCC_B << 1),
6515 (JCC_Z << 1),
6516 (JCC_BE << 1),
6517 (JCC_P << 1),
6518 };
1e4840bf 6519 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
19e6c4b8 6520 l1 = gen_new_label();
dc259201 6521 gen_jcc1_noeob(s, op1, l1);
d3eb5eae 6522 gen_helper_fmov_ST0_STN(cpu_env, tcg_const_i32(opreg));
19e6c4b8 6523 gen_set_label(l1);
a2cc3b24
FB
6524 }
6525 break;
2c0262af
FB
6526 default:
6527 goto illegal_op;
6528 }
6529 }
6530 break;
6531 /************************/
6532 /* string ops */
6533
6534 case 0xa4: /* movsS */
6535 case 0xa5:
6536 if ((b & 1) == 0)
6537 ot = OT_BYTE;
6538 else
14ce26e7 6539 ot = dflag + OT_WORD;
2c0262af
FB
6540
6541 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6542 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6543 } else {
6544 gen_movs(s, ot);
6545 }
6546 break;
3b46e624 6547
2c0262af
FB
6548 case 0xaa: /* stosS */
6549 case 0xab:
6550 if ((b & 1) == 0)
6551 ot = OT_BYTE;
6552 else
14ce26e7 6553 ot = dflag + OT_WORD;
2c0262af
FB
6554
6555 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6556 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6557 } else {
6558 gen_stos(s, ot);
6559 }
6560 break;
6561 case 0xac: /* lodsS */
6562 case 0xad:
6563 if ((b & 1) == 0)
6564 ot = OT_BYTE;
6565 else
14ce26e7 6566 ot = dflag + OT_WORD;
2c0262af
FB
6567 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6568 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6569 } else {
6570 gen_lods(s, ot);
6571 }
6572 break;
6573 case 0xae: /* scasS */
6574 case 0xaf:
6575 if ((b & 1) == 0)
6576 ot = OT_BYTE;
6577 else
14ce26e7 6578 ot = dflag + OT_WORD;
2c0262af
FB
6579 if (prefixes & PREFIX_REPNZ) {
6580 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6581 } else if (prefixes & PREFIX_REPZ) {
6582 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6583 } else {
6584 gen_scas(s, ot);
2c0262af
FB
6585 }
6586 break;
6587
6588 case 0xa6: /* cmpsS */
6589 case 0xa7:
6590 if ((b & 1) == 0)
6591 ot = OT_BYTE;
6592 else
14ce26e7 6593 ot = dflag + OT_WORD;
2c0262af
FB
6594 if (prefixes & PREFIX_REPNZ) {
6595 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6596 } else if (prefixes & PREFIX_REPZ) {
6597 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6598 } else {
6599 gen_cmps(s, ot);
2c0262af
FB
6600 }
6601 break;
6602 case 0x6c: /* insS */
6603 case 0x6d:
f115e911
FB
6604 if ((b & 1) == 0)
6605 ot = OT_BYTE;
6606 else
6607 ot = dflag ? OT_LONG : OT_WORD;
57fec1fe 6608 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
0573fbfc 6609 gen_op_andl_T0_ffff();
b8b6a50b
FB
6610 gen_check_io(s, ot, pc_start - s->cs_base,
6611 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
f115e911
FB
6612 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6613 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
2c0262af 6614 } else {
f115e911 6615 gen_ins(s, ot);
2e70f6ef
PB
6616 if (use_icount) {
6617 gen_jmp(s, s->pc - s->cs_base);
6618 }
2c0262af
FB
6619 }
6620 break;
6621 case 0x6e: /* outsS */
6622 case 0x6f:
f115e911
FB
6623 if ((b & 1) == 0)
6624 ot = OT_BYTE;
6625 else
6626 ot = dflag ? OT_LONG : OT_WORD;
57fec1fe 6627 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
0573fbfc 6628 gen_op_andl_T0_ffff();
b8b6a50b
FB
6629 gen_check_io(s, ot, pc_start - s->cs_base,
6630 svm_is_rep(prefixes) | 4);
f115e911
FB
6631 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6632 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
2c0262af 6633 } else {
f115e911 6634 gen_outs(s, ot);
2e70f6ef
PB
6635 if (use_icount) {
6636 gen_jmp(s, s->pc - s->cs_base);
6637 }
2c0262af
FB
6638 }
6639 break;
6640
6641 /************************/
6642 /* port I/O */
0573fbfc 6643
2c0262af
FB
6644 case 0xe4:
6645 case 0xe5:
f115e911
FB
6646 if ((b & 1) == 0)
6647 ot = OT_BYTE;
6648 else
6649 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 6650 val = cpu_ldub_code(env, s->pc++);
f115e911 6651 gen_op_movl_T0_im(val);
b8b6a50b
FB
6652 gen_check_io(s, ot, pc_start - s->cs_base,
6653 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
2e70f6ef
PB
6654 if (use_icount)
6655 gen_io_start();
b6abf97d 6656 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
a7812ae4 6657 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
57fec1fe 6658 gen_op_mov_reg_T1(ot, R_EAX);
2e70f6ef
PB
6659 if (use_icount) {
6660 gen_io_end();
6661 gen_jmp(s, s->pc - s->cs_base);
6662 }
2c0262af
FB
6663 break;
6664 case 0xe6:
6665 case 0xe7:
f115e911
FB
6666 if ((b & 1) == 0)
6667 ot = OT_BYTE;
6668 else
6669 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 6670 val = cpu_ldub_code(env, s->pc++);
f115e911 6671 gen_op_movl_T0_im(val);
b8b6a50b
FB
6672 gen_check_io(s, ot, pc_start - s->cs_base,
6673 svm_is_rep(prefixes));
57fec1fe 6674 gen_op_mov_TN_reg(ot, 1, R_EAX);
b8b6a50b 6675
2e70f6ef
PB
6676 if (use_icount)
6677 gen_io_start();
b6abf97d 6678 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
b6abf97d 6679 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
a7812ae4 6680 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
2e70f6ef
PB
6681 if (use_icount) {
6682 gen_io_end();
6683 gen_jmp(s, s->pc - s->cs_base);
6684 }
2c0262af
FB
6685 break;
6686 case 0xec:
6687 case 0xed:
f115e911
FB
6688 if ((b & 1) == 0)
6689 ot = OT_BYTE;
6690 else
6691 ot = dflag ? OT_LONG : OT_WORD;
57fec1fe 6692 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4f31916f 6693 gen_op_andl_T0_ffff();
b8b6a50b
FB
6694 gen_check_io(s, ot, pc_start - s->cs_base,
6695 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
2e70f6ef
PB
6696 if (use_icount)
6697 gen_io_start();
b6abf97d 6698 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
a7812ae4 6699 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
57fec1fe 6700 gen_op_mov_reg_T1(ot, R_EAX);
2e70f6ef
PB
6701 if (use_icount) {
6702 gen_io_end();
6703 gen_jmp(s, s->pc - s->cs_base);
6704 }
2c0262af
FB
6705 break;
6706 case 0xee:
6707 case 0xef:
f115e911
FB
6708 if ((b & 1) == 0)
6709 ot = OT_BYTE;
6710 else
6711 ot = dflag ? OT_LONG : OT_WORD;
57fec1fe 6712 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4f31916f 6713 gen_op_andl_T0_ffff();
b8b6a50b
FB
6714 gen_check_io(s, ot, pc_start - s->cs_base,
6715 svm_is_rep(prefixes));
57fec1fe 6716 gen_op_mov_TN_reg(ot, 1, R_EAX);
b8b6a50b 6717
2e70f6ef
PB
6718 if (use_icount)
6719 gen_io_start();
b6abf97d 6720 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
b6abf97d 6721 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
a7812ae4 6722 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
2e70f6ef
PB
6723 if (use_icount) {
6724 gen_io_end();
6725 gen_jmp(s, s->pc - s->cs_base);
6726 }
2c0262af
FB
6727 break;
6728
6729 /************************/
6730 /* control */
6731 case 0xc2: /* ret im */
0af10c86 6732 val = cpu_ldsw_code(env, s->pc);
2c0262af
FB
6733 s->pc += 2;
6734 gen_pop_T0(s);
8f091a59
FB
6735 if (CODE64(s) && s->dflag)
6736 s->dflag = 2;
2c0262af
FB
6737 gen_stack_update(s, val + (2 << s->dflag));
6738 if (s->dflag == 0)
6739 gen_op_andl_T0_ffff();
6740 gen_op_jmp_T0();
6741 gen_eob(s);
6742 break;
6743 case 0xc3: /* ret */
6744 gen_pop_T0(s);
6745 gen_pop_update(s);
6746 if (s->dflag == 0)
6747 gen_op_andl_T0_ffff();
6748 gen_op_jmp_T0();
6749 gen_eob(s);
6750 break;
6751 case 0xca: /* lret im */
0af10c86 6752 val = cpu_ldsw_code(env, s->pc);
2c0262af
FB
6753 s->pc += 2;
6754 do_lret:
6755 if (s->pe && !s->vm86) {
773cdfcc 6756 gen_update_cc_op(s);
14ce26e7 6757 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 6758 gen_helper_lret_protected(cpu_env, tcg_const_i32(s->dflag),
a7812ae4 6759 tcg_const_i32(val));
2c0262af
FB
6760 } else {
6761 gen_stack_A0(s);
6762 /* pop offset */
57fec1fe 6763 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
2c0262af
FB
6764 if (s->dflag == 0)
6765 gen_op_andl_T0_ffff();
6766 /* NOTE: keeping EIP updated is not a problem in case of
6767 exception */
6768 gen_op_jmp_T0();
6769 /* pop selector */
6770 gen_op_addl_A0_im(2 << s->dflag);
57fec1fe 6771 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
3bd7da9e 6772 gen_op_movl_seg_T0_vm(R_CS);
2c0262af
FB
6773 /* add stack offset */
6774 gen_stack_update(s, val + (4 << s->dflag));
6775 }
6776 gen_eob(s);
6777 break;
6778 case 0xcb: /* lret */
6779 val = 0;
6780 goto do_lret;
6781 case 0xcf: /* iret */
872929aa 6782 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
2c0262af
FB
6783 if (!s->pe) {
6784 /* real mode */
2999a0b2 6785 gen_helper_iret_real(cpu_env, tcg_const_i32(s->dflag));
3ca51d07 6786 set_cc_op(s, CC_OP_EFLAGS);
f115e911
FB
6787 } else if (s->vm86) {
6788 if (s->iopl != 3) {
6789 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6790 } else {
2999a0b2 6791 gen_helper_iret_real(cpu_env, tcg_const_i32(s->dflag));
3ca51d07 6792 set_cc_op(s, CC_OP_EFLAGS);
f115e911 6793 }
2c0262af 6794 } else {
773cdfcc 6795 gen_update_cc_op(s);
14ce26e7 6796 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 6797 gen_helper_iret_protected(cpu_env, tcg_const_i32(s->dflag),
a7812ae4 6798 tcg_const_i32(s->pc - s->cs_base));
3ca51d07 6799 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
6800 }
6801 gen_eob(s);
6802 break;
6803 case 0xe8: /* call im */
6804 {
14ce26e7 6805 if (dflag)
0af10c86 6806 tval = (int32_t)insn_get(env, s, OT_LONG);
14ce26e7 6807 else
0af10c86 6808 tval = (int16_t)insn_get(env, s, OT_WORD);
2c0262af 6809 next_eip = s->pc - s->cs_base;
14ce26e7 6810 tval += next_eip;
2c0262af 6811 if (s->dflag == 0)
14ce26e7 6812 tval &= 0xffff;
99596385
AJ
6813 else if(!CODE64(s))
6814 tval &= 0xffffffff;
14ce26e7 6815 gen_movtl_T0_im(next_eip);
2c0262af 6816 gen_push_T0(s);
14ce26e7 6817 gen_jmp(s, tval);
2c0262af
FB
6818 }
6819 break;
6820 case 0x9a: /* lcall im */
6821 {
6822 unsigned int selector, offset;
3b46e624 6823
14ce26e7
FB
6824 if (CODE64(s))
6825 goto illegal_op;
2c0262af 6826 ot = dflag ? OT_LONG : OT_WORD;
0af10c86
BS
6827 offset = insn_get(env, s, ot);
6828 selector = insn_get(env, s, OT_WORD);
3b46e624 6829
2c0262af 6830 gen_op_movl_T0_im(selector);
14ce26e7 6831 gen_op_movl_T1_imu(offset);
2c0262af
FB
6832 }
6833 goto do_lcall;
ecada8a2 6834 case 0xe9: /* jmp im */
14ce26e7 6835 if (dflag)
0af10c86 6836 tval = (int32_t)insn_get(env, s, OT_LONG);
14ce26e7 6837 else
0af10c86 6838 tval = (int16_t)insn_get(env, s, OT_WORD);
14ce26e7 6839 tval += s->pc - s->cs_base;
2c0262af 6840 if (s->dflag == 0)
14ce26e7 6841 tval &= 0xffff;
32938e12
AJ
6842 else if(!CODE64(s))
6843 tval &= 0xffffffff;
14ce26e7 6844 gen_jmp(s, tval);
2c0262af
FB
6845 break;
6846 case 0xea: /* ljmp im */
6847 {
6848 unsigned int selector, offset;
6849
14ce26e7
FB
6850 if (CODE64(s))
6851 goto illegal_op;
2c0262af 6852 ot = dflag ? OT_LONG : OT_WORD;
0af10c86
BS
6853 offset = insn_get(env, s, ot);
6854 selector = insn_get(env, s, OT_WORD);
3b46e624 6855
2c0262af 6856 gen_op_movl_T0_im(selector);
14ce26e7 6857 gen_op_movl_T1_imu(offset);
2c0262af
FB
6858 }
6859 goto do_ljmp;
6860 case 0xeb: /* jmp Jb */
0af10c86 6861 tval = (int8_t)insn_get(env, s, OT_BYTE);
14ce26e7 6862 tval += s->pc - s->cs_base;
2c0262af 6863 if (s->dflag == 0)
14ce26e7
FB
6864 tval &= 0xffff;
6865 gen_jmp(s, tval);
2c0262af
FB
6866 break;
6867 case 0x70 ... 0x7f: /* jcc Jb */
0af10c86 6868 tval = (int8_t)insn_get(env, s, OT_BYTE);
2c0262af
FB
6869 goto do_jcc;
6870 case 0x180 ... 0x18f: /* jcc Jv */
6871 if (dflag) {
0af10c86 6872 tval = (int32_t)insn_get(env, s, OT_LONG);
2c0262af 6873 } else {
0af10c86 6874 tval = (int16_t)insn_get(env, s, OT_WORD);
2c0262af
FB
6875 }
6876 do_jcc:
6877 next_eip = s->pc - s->cs_base;
14ce26e7 6878 tval += next_eip;
2c0262af 6879 if (s->dflag == 0)
14ce26e7
FB
6880 tval &= 0xffff;
6881 gen_jcc(s, b, tval, next_eip);
2c0262af
FB
6882 break;
6883
6884 case 0x190 ... 0x19f: /* setcc Gv */
0af10c86 6885 modrm = cpu_ldub_code(env, s->pc++);
cc8b6f5b 6886 gen_setcc1(s, b, cpu_T[0]);
0af10c86 6887 gen_ldst_modrm(env, s, modrm, OT_BYTE, OR_TMP0, 1);
2c0262af
FB
6888 break;
6889 case 0x140 ... 0x14f: /* cmov Gv, Ev */
f32d3781
PB
6890 ot = dflag + OT_WORD;
6891 modrm = cpu_ldub_code(env, s->pc++);
6892 reg = ((modrm >> 3) & 7) | rex_r;
6893 gen_cmovcc1(env, s, ot, b, modrm, reg);
2c0262af 6894 break;
3b46e624 6895
2c0262af
FB
6896 /************************/
6897 /* flags */
6898 case 0x9c: /* pushf */
872929aa 6899 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
2c0262af
FB
6900 if (s->vm86 && s->iopl != 3) {
6901 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6902 } else {
773cdfcc 6903 gen_update_cc_op(s);
f0967a1a 6904 gen_helper_read_eflags(cpu_T[0], cpu_env);
2c0262af
FB
6905 gen_push_T0(s);
6906 }
6907 break;
6908 case 0x9d: /* popf */
872929aa 6909 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
2c0262af
FB
6910 if (s->vm86 && s->iopl != 3) {
6911 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6912 } else {
6913 gen_pop_T0(s);
6914 if (s->cpl == 0) {
6915 if (s->dflag) {
f0967a1a
BS
6916 gen_helper_write_eflags(cpu_env, cpu_T[0],
6917 tcg_const_i32((TF_MASK | AC_MASK |
6918 ID_MASK | NT_MASK |
6919 IF_MASK |
6920 IOPL_MASK)));
2c0262af 6921 } else {
f0967a1a
BS
6922 gen_helper_write_eflags(cpu_env, cpu_T[0],
6923 tcg_const_i32((TF_MASK | AC_MASK |
6924 ID_MASK | NT_MASK |
6925 IF_MASK | IOPL_MASK)
6926 & 0xffff));
2c0262af
FB
6927 }
6928 } else {
4136f33c
FB
6929 if (s->cpl <= s->iopl) {
6930 if (s->dflag) {
f0967a1a
BS
6931 gen_helper_write_eflags(cpu_env, cpu_T[0],
6932 tcg_const_i32((TF_MASK |
6933 AC_MASK |
6934 ID_MASK |
6935 NT_MASK |
6936 IF_MASK)));
4136f33c 6937 } else {
f0967a1a
BS
6938 gen_helper_write_eflags(cpu_env, cpu_T[0],
6939 tcg_const_i32((TF_MASK |
6940 AC_MASK |
6941 ID_MASK |
6942 NT_MASK |
6943 IF_MASK)
6944 & 0xffff));
4136f33c 6945 }
2c0262af 6946 } else {
4136f33c 6947 if (s->dflag) {
f0967a1a
BS
6948 gen_helper_write_eflags(cpu_env, cpu_T[0],
6949 tcg_const_i32((TF_MASK | AC_MASK |
6950 ID_MASK | NT_MASK)));
4136f33c 6951 } else {
f0967a1a
BS
6952 gen_helper_write_eflags(cpu_env, cpu_T[0],
6953 tcg_const_i32((TF_MASK | AC_MASK |
6954 ID_MASK | NT_MASK)
6955 & 0xffff));
4136f33c 6956 }
2c0262af
FB
6957 }
6958 }
6959 gen_pop_update(s);
3ca51d07 6960 set_cc_op(s, CC_OP_EFLAGS);
a9321a4d 6961 /* abort translation because TF/AC flag may change */
14ce26e7 6962 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
6963 gen_eob(s);
6964 }
6965 break;
6966 case 0x9e: /* sahf */
12e26b75 6967 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
14ce26e7 6968 goto illegal_op;
57fec1fe 6969 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
d229edce 6970 gen_compute_eflags(s);
bd7a7b33
FB
6971 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
6972 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
6973 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
2c0262af
FB
6974 break;
6975 case 0x9f: /* lahf */
12e26b75 6976 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
14ce26e7 6977 goto illegal_op;
d229edce 6978 gen_compute_eflags(s);
bd7a7b33 6979 /* Note: gen_compute_eflags() only gives the condition codes */
d229edce 6980 tcg_gen_ori_tl(cpu_T[0], cpu_cc_src, 0x02);
57fec1fe 6981 gen_op_mov_reg_T0(OT_BYTE, R_AH);
2c0262af
FB
6982 break;
6983 case 0xf5: /* cmc */
d229edce 6984 gen_compute_eflags(s);
bd7a7b33 6985 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
2c0262af
FB
6986 break;
6987 case 0xf8: /* clc */
d229edce 6988 gen_compute_eflags(s);
bd7a7b33 6989 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
2c0262af
FB
6990 break;
6991 case 0xf9: /* stc */
d229edce 6992 gen_compute_eflags(s);
bd7a7b33 6993 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
2c0262af
FB
6994 break;
6995 case 0xfc: /* cld */
b6abf97d 6996 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
317ac620 6997 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUX86State, df));
2c0262af
FB
6998 break;
6999 case 0xfd: /* std */
b6abf97d 7000 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
317ac620 7001 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUX86State, df));
2c0262af
FB
7002 break;
7003
7004 /************************/
7005 /* bit operations */
7006 case 0x1ba: /* bt/bts/btr/btc Gv, im */
14ce26e7 7007 ot = dflag + OT_WORD;
0af10c86 7008 modrm = cpu_ldub_code(env, s->pc++);
33698e5f 7009 op = (modrm >> 3) & 7;
2c0262af 7010 mod = (modrm >> 6) & 3;
14ce26e7 7011 rm = (modrm & 7) | REX_B(s);
2c0262af 7012 if (mod != 3) {
14ce26e7 7013 s->rip_offset = 1;
0af10c86 7014 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 7015 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 7016 } else {
57fec1fe 7017 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af
FB
7018 }
7019 /* load shift */
0af10c86 7020 val = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7021 gen_op_movl_T1_im(val);
7022 if (op < 4)
7023 goto illegal_op;
7024 op -= 4;
f484d386 7025 goto bt_op;
2c0262af
FB
7026 case 0x1a3: /* bt Gv, Ev */
7027 op = 0;
7028 goto do_btx;
7029 case 0x1ab: /* bts */
7030 op = 1;
7031 goto do_btx;
7032 case 0x1b3: /* btr */
7033 op = 2;
7034 goto do_btx;
7035 case 0x1bb: /* btc */
7036 op = 3;
7037 do_btx:
14ce26e7 7038 ot = dflag + OT_WORD;
0af10c86 7039 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 7040 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af 7041 mod = (modrm >> 6) & 3;
14ce26e7 7042 rm = (modrm & 7) | REX_B(s);
57fec1fe 7043 gen_op_mov_TN_reg(OT_LONG, 1, reg);
2c0262af 7044 if (mod != 3) {
0af10c86 7045 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af 7046 /* specific case: we need to add a displacement */
f484d386
FB
7047 gen_exts(ot, cpu_T[1]);
7048 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7049 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7050 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
57fec1fe 7051 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 7052 } else {
57fec1fe 7053 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af 7054 }
f484d386
FB
7055 bt_op:
7056 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7057 switch(op) {
7058 case 0:
7059 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7060 tcg_gen_movi_tl(cpu_cc_dst, 0);
7061 break;
7062 case 1:
7063 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7064 tcg_gen_movi_tl(cpu_tmp0, 1);
7065 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7066 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7067 break;
7068 case 2:
7069 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7070 tcg_gen_movi_tl(cpu_tmp0, 1);
7071 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7072 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7073 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7074 break;
7075 default:
7076 case 3:
7077 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7078 tcg_gen_movi_tl(cpu_tmp0, 1);
7079 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7080 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7081 break;
7082 }
3ca51d07 7083 set_cc_op(s, CC_OP_SARB + ot);
2c0262af
FB
7084 if (op != 0) {
7085 if (mod != 3)
57fec1fe 7086 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 7087 else
57fec1fe 7088 gen_op_mov_reg_T0(ot, rm);
f484d386
FB
7089 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7090 tcg_gen_movi_tl(cpu_cc_dst, 0);
2c0262af
FB
7091 }
7092 break;
321c5351
RH
7093 case 0x1bc: /* bsf / tzcnt */
7094 case 0x1bd: /* bsr / lzcnt */
7095 ot = dflag + OT_WORD;
7096 modrm = cpu_ldub_code(env, s->pc++);
7097 reg = ((modrm >> 3) & 7) | rex_r;
7098 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
7099 gen_extu(ot, cpu_T[0]);
7100
7101 /* Note that lzcnt and tzcnt are in different extensions. */
7102 if ((prefixes & PREFIX_REPZ)
7103 && (b & 1
7104 ? s->cpuid_ext3_features & CPUID_EXT3_ABM
7105 : s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI1)) {
7106 int size = 8 << ot;
7107 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
7108 if (b & 1) {
7109 /* For lzcnt, reduce the target_ulong result by the
7110 number of zeros that we expect to find at the top. */
7111 gen_helper_clz(cpu_T[0], cpu_T[0]);
7112 tcg_gen_subi_tl(cpu_T[0], cpu_T[0], TARGET_LONG_BITS - size);
6191b059 7113 } else {
321c5351
RH
7114 /* For tzcnt, a zero input must return the operand size:
7115 force all bits outside the operand size to 1. */
7116 target_ulong mask = (target_ulong)-2 << (size - 1);
7117 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], mask);
7118 gen_helper_ctz(cpu_T[0], cpu_T[0]);
6191b059 7119 }
321c5351
RH
7120 /* For lzcnt/tzcnt, C and Z bits are defined and are
7121 related to the result. */
7122 gen_op_update1_cc();
7123 set_cc_op(s, CC_OP_BMILGB + ot);
7124 } else {
7125 /* For bsr/bsf, only the Z bit is defined and it is related
7126 to the input and not the result. */
7127 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
7128 set_cc_op(s, CC_OP_LOGICB + ot);
7129 if (b & 1) {
7130 /* For bsr, return the bit index of the first 1 bit,
7131 not the count of leading zeros. */
7132 gen_helper_clz(cpu_T[0], cpu_T[0]);
7133 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], TARGET_LONG_BITS - 1);
7134 } else {
7135 gen_helper_ctz(cpu_T[0], cpu_T[0]);
7136 }
7137 /* ??? The manual says that the output is undefined when the
7138 input is zero, but real hardware leaves it unchanged, and
7139 real programs appear to depend on that. */
7140 tcg_gen_movi_tl(cpu_tmp0, 0);
7141 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_T[0], cpu_cc_dst, cpu_tmp0,
7142 cpu_regs[reg], cpu_T[0]);
6191b059 7143 }
321c5351 7144 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
7145 break;
7146 /************************/
7147 /* bcd */
7148 case 0x27: /* daa */
14ce26e7
FB
7149 if (CODE64(s))
7150 goto illegal_op;
773cdfcc 7151 gen_update_cc_op(s);
7923057b 7152 gen_helper_daa(cpu_env);
3ca51d07 7153 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
7154 break;
7155 case 0x2f: /* das */
14ce26e7
FB
7156 if (CODE64(s))
7157 goto illegal_op;
773cdfcc 7158 gen_update_cc_op(s);
7923057b 7159 gen_helper_das(cpu_env);
3ca51d07 7160 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
7161 break;
7162 case 0x37: /* aaa */
14ce26e7
FB
7163 if (CODE64(s))
7164 goto illegal_op;
773cdfcc 7165 gen_update_cc_op(s);
7923057b 7166 gen_helper_aaa(cpu_env);
3ca51d07 7167 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
7168 break;
7169 case 0x3f: /* aas */
14ce26e7
FB
7170 if (CODE64(s))
7171 goto illegal_op;
773cdfcc 7172 gen_update_cc_op(s);
7923057b 7173 gen_helper_aas(cpu_env);
3ca51d07 7174 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
7175 break;
7176 case 0xd4: /* aam */
14ce26e7
FB
7177 if (CODE64(s))
7178 goto illegal_op;
0af10c86 7179 val = cpu_ldub_code(env, s->pc++);
b6d7c3db
TS
7180 if (val == 0) {
7181 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7182 } else {
7923057b 7183 gen_helper_aam(cpu_env, tcg_const_i32(val));
3ca51d07 7184 set_cc_op(s, CC_OP_LOGICB);
b6d7c3db 7185 }
2c0262af
FB
7186 break;
7187 case 0xd5: /* aad */
14ce26e7
FB
7188 if (CODE64(s))
7189 goto illegal_op;
0af10c86 7190 val = cpu_ldub_code(env, s->pc++);
7923057b 7191 gen_helper_aad(cpu_env, tcg_const_i32(val));
3ca51d07 7192 set_cc_op(s, CC_OP_LOGICB);
2c0262af
FB
7193 break;
7194 /************************/
7195 /* misc */
7196 case 0x90: /* nop */
ab1f142b 7197 /* XXX: correct lock test for all insn */
7418027e 7198 if (prefixes & PREFIX_LOCK) {
ab1f142b 7199 goto illegal_op;
7418027e
RH
7200 }
7201 /* If REX_B is set, then this is xchg eax, r8d, not a nop. */
7202 if (REX_B(s)) {
7203 goto do_xchg_reg_eax;
7204 }
0573fbfc
TS
7205 if (prefixes & PREFIX_REPZ) {
7206 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7207 }
2c0262af
FB
7208 break;
7209 case 0x9b: /* fwait */
5fafdf24 7210 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7eee2a50
FB
7211 (HF_MP_MASK | HF_TS_MASK)) {
7212 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2ee73ac3 7213 } else {
773cdfcc 7214 gen_update_cc_op(s);
14ce26e7 7215 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 7216 gen_helper_fwait(cpu_env);
7eee2a50 7217 }
2c0262af
FB
7218 break;
7219 case 0xcc: /* int3 */
7220 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7221 break;
7222 case 0xcd: /* int N */
0af10c86 7223 val = cpu_ldub_code(env, s->pc++);
f115e911 7224 if (s->vm86 && s->iopl != 3) {
5fafdf24 7225 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
f115e911
FB
7226 } else {
7227 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7228 }
2c0262af
FB
7229 break;
7230 case 0xce: /* into */
14ce26e7
FB
7231 if (CODE64(s))
7232 goto illegal_op;
773cdfcc 7233 gen_update_cc_op(s);
a8ede8ba 7234 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7235 gen_helper_into(cpu_env, tcg_const_i32(s->pc - pc_start));
2c0262af 7236 break;
0b97134b 7237#ifdef WANT_ICEBP
2c0262af 7238 case 0xf1: /* icebp (undocumented, exits to external debugger) */
872929aa 7239 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
aba9d61e 7240#if 1
2c0262af 7241 gen_debug(s, pc_start - s->cs_base);
aba9d61e
FB
7242#else
7243 /* start debug */
0af10c86 7244 tb_flush(env);
24537a01 7245 qemu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
aba9d61e 7246#endif
2c0262af 7247 break;
0b97134b 7248#endif
2c0262af
FB
7249 case 0xfa: /* cli */
7250 if (!s->vm86) {
7251 if (s->cpl <= s->iopl) {
f0967a1a 7252 gen_helper_cli(cpu_env);
2c0262af
FB
7253 } else {
7254 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7255 }
7256 } else {
7257 if (s->iopl == 3) {
f0967a1a 7258 gen_helper_cli(cpu_env);
2c0262af
FB
7259 } else {
7260 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7261 }
7262 }
7263 break;
7264 case 0xfb: /* sti */
7265 if (!s->vm86) {
7266 if (s->cpl <= s->iopl) {
7267 gen_sti:
f0967a1a 7268 gen_helper_sti(cpu_env);
2c0262af 7269 /* interruptions are enabled only the first insn after sti */
a2cc3b24
FB
7270 /* If several instructions disable interrupts, only the
7271 _first_ does it */
7272 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
f0967a1a 7273 gen_helper_set_inhibit_irq(cpu_env);
2c0262af 7274 /* give a chance to handle pending irqs */
14ce26e7 7275 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
7276 gen_eob(s);
7277 } else {
7278 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7279 }
7280 } else {
7281 if (s->iopl == 3) {
7282 goto gen_sti;
7283 } else {
7284 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7285 }
7286 }
7287 break;
7288 case 0x62: /* bound */
14ce26e7
FB
7289 if (CODE64(s))
7290 goto illegal_op;
2c0262af 7291 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 7292 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7293 reg = (modrm >> 3) & 7;
7294 mod = (modrm >> 6) & 3;
7295 if (mod == 3)
7296 goto illegal_op;
57fec1fe 7297 gen_op_mov_TN_reg(ot, 0, reg);
0af10c86 7298 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
14ce26e7 7299 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 7300 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
92fc4b58
BS
7301 if (ot == OT_WORD) {
7302 gen_helper_boundw(cpu_env, cpu_A0, cpu_tmp2_i32);
7303 } else {
7304 gen_helper_boundl(cpu_env, cpu_A0, cpu_tmp2_i32);
7305 }
2c0262af
FB
7306 break;
7307 case 0x1c8 ... 0x1cf: /* bswap reg */
14ce26e7
FB
7308 reg = (b & 7) | REX_B(s);
7309#ifdef TARGET_X86_64
7310 if (dflag == 2) {
57fec1fe 7311 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
66896cb8 7312 tcg_gen_bswap64_i64(cpu_T[0], cpu_T[0]);
57fec1fe 7313 gen_op_mov_reg_T0(OT_QUAD, reg);
5fafdf24 7314 } else
8777643e 7315#endif
57fec1fe
FB
7316 {
7317 gen_op_mov_TN_reg(OT_LONG, 0, reg);
8777643e
AJ
7318 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
7319 tcg_gen_bswap32_tl(cpu_T[0], cpu_T[0]);
57fec1fe 7320 gen_op_mov_reg_T0(OT_LONG, reg);
14ce26e7 7321 }
2c0262af
FB
7322 break;
7323 case 0xd6: /* salc */
14ce26e7
FB
7324 if (CODE64(s))
7325 goto illegal_op;
cc8b6f5b 7326 gen_compute_eflags_c(s, cpu_T[0]);
bd7a7b33
FB
7327 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7328 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
2c0262af
FB
7329 break;
7330 case 0xe0: /* loopnz */
7331 case 0xe1: /* loopz */
2c0262af
FB
7332 case 0xe2: /* loop */
7333 case 0xe3: /* jecxz */
14ce26e7 7334 {
6e0d8677 7335 int l1, l2, l3;
14ce26e7 7336
0af10c86 7337 tval = (int8_t)insn_get(env, s, OT_BYTE);
14ce26e7
FB
7338 next_eip = s->pc - s->cs_base;
7339 tval += next_eip;
7340 if (s->dflag == 0)
7341 tval &= 0xffff;
3b46e624 7342
14ce26e7
FB
7343 l1 = gen_new_label();
7344 l2 = gen_new_label();
6e0d8677 7345 l3 = gen_new_label();
14ce26e7 7346 b &= 3;
6e0d8677
FB
7347 switch(b) {
7348 case 0: /* loopnz */
7349 case 1: /* loopz */
6e0d8677
FB
7350 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7351 gen_op_jz_ecx(s->aflag, l3);
5bdb91b0 7352 gen_jcc1(s, (JCC_Z << 1) | (b ^ 1), l1);
6e0d8677
FB
7353 break;
7354 case 2: /* loop */
7355 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7356 gen_op_jnz_ecx(s->aflag, l1);
7357 break;
7358 default:
7359 case 3: /* jcxz */
7360 gen_op_jz_ecx(s->aflag, l1);
7361 break;
14ce26e7
FB
7362 }
7363
6e0d8677 7364 gen_set_label(l3);
14ce26e7 7365 gen_jmp_im(next_eip);
8e1c85e3 7366 tcg_gen_br(l2);
6e0d8677 7367
14ce26e7
FB
7368 gen_set_label(l1);
7369 gen_jmp_im(tval);
7370 gen_set_label(l2);
7371 gen_eob(s);
7372 }
2c0262af
FB
7373 break;
7374 case 0x130: /* wrmsr */
7375 case 0x132: /* rdmsr */
7376 if (s->cpl != 0) {
7377 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7378 } else {
773cdfcc 7379 gen_update_cc_op(s);
872929aa 7380 gen_jmp_im(pc_start - s->cs_base);
0573fbfc 7381 if (b & 2) {
4a7443be 7382 gen_helper_rdmsr(cpu_env);
0573fbfc 7383 } else {
4a7443be 7384 gen_helper_wrmsr(cpu_env);
0573fbfc 7385 }
2c0262af
FB
7386 }
7387 break;
7388 case 0x131: /* rdtsc */
773cdfcc 7389 gen_update_cc_op(s);
ecada8a2 7390 gen_jmp_im(pc_start - s->cs_base);
efade670
PB
7391 if (use_icount)
7392 gen_io_start();
4a7443be 7393 gen_helper_rdtsc(cpu_env);
efade670
PB
7394 if (use_icount) {
7395 gen_io_end();
7396 gen_jmp(s, s->pc - s->cs_base);
7397 }
2c0262af 7398 break;
df01e0fc 7399 case 0x133: /* rdpmc */
773cdfcc 7400 gen_update_cc_op(s);
df01e0fc 7401 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7402 gen_helper_rdpmc(cpu_env);
df01e0fc 7403 break;
023fe10d 7404 case 0x134: /* sysenter */
2436b61a 7405 /* For Intel SYSENTER is valid on 64-bit */
0af10c86 7406 if (CODE64(s) && env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
14ce26e7 7407 goto illegal_op;
023fe10d
FB
7408 if (!s->pe) {
7409 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7410 } else {
728d803b 7411 gen_update_cc_op(s);
14ce26e7 7412 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 7413 gen_helper_sysenter(cpu_env);
023fe10d
FB
7414 gen_eob(s);
7415 }
7416 break;
7417 case 0x135: /* sysexit */
2436b61a 7418 /* For Intel SYSEXIT is valid on 64-bit */
0af10c86 7419 if (CODE64(s) && env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
14ce26e7 7420 goto illegal_op;
023fe10d
FB
7421 if (!s->pe) {
7422 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7423 } else {
728d803b 7424 gen_update_cc_op(s);
14ce26e7 7425 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 7426 gen_helper_sysexit(cpu_env, tcg_const_i32(dflag));
023fe10d
FB
7427 gen_eob(s);
7428 }
7429 break;
14ce26e7
FB
7430#ifdef TARGET_X86_64
7431 case 0x105: /* syscall */
7432 /* XXX: is it usable in real mode ? */
728d803b 7433 gen_update_cc_op(s);
14ce26e7 7434 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 7435 gen_helper_syscall(cpu_env, tcg_const_i32(s->pc - pc_start));
14ce26e7
FB
7436 gen_eob(s);
7437 break;
7438 case 0x107: /* sysret */
7439 if (!s->pe) {
7440 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7441 } else {
728d803b 7442 gen_update_cc_op(s);
14ce26e7 7443 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 7444 gen_helper_sysret(cpu_env, tcg_const_i32(s->dflag));
aba9d61e 7445 /* condition codes are modified only in long mode */
3ca51d07
RH
7446 if (s->lma) {
7447 set_cc_op(s, CC_OP_EFLAGS);
7448 }
14ce26e7
FB
7449 gen_eob(s);
7450 }
7451 break;
7452#endif
2c0262af 7453 case 0x1a2: /* cpuid */
773cdfcc 7454 gen_update_cc_op(s);
9575cb94 7455 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7456 gen_helper_cpuid(cpu_env);
2c0262af
FB
7457 break;
7458 case 0xf4: /* hlt */
7459 if (s->cpl != 0) {
7460 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7461 } else {
773cdfcc 7462 gen_update_cc_op(s);
94451178 7463 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7464 gen_helper_hlt(cpu_env, tcg_const_i32(s->pc - pc_start));
5779406a 7465 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
7466 }
7467 break;
7468 case 0x100:
0af10c86 7469 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7470 mod = (modrm >> 6) & 3;
7471 op = (modrm >> 3) & 7;
7472 switch(op) {
7473 case 0: /* sldt */
f115e911
FB
7474 if (!s->pe || s->vm86)
7475 goto illegal_op;
872929aa 7476 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
651ba608 7477 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
2c0262af
FB
7478 ot = OT_WORD;
7479 if (mod == 3)
7480 ot += s->dflag;
0af10c86 7481 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
2c0262af
FB
7482 break;
7483 case 2: /* lldt */
f115e911
FB
7484 if (!s->pe || s->vm86)
7485 goto illegal_op;
2c0262af
FB
7486 if (s->cpl != 0) {
7487 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7488 } else {
872929aa 7489 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
0af10c86 7490 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
14ce26e7 7491 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 7492 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2 7493 gen_helper_lldt(cpu_env, cpu_tmp2_i32);
2c0262af
FB
7494 }
7495 break;
7496 case 1: /* str */
f115e911
FB
7497 if (!s->pe || s->vm86)
7498 goto illegal_op;
872929aa 7499 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
651ba608 7500 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
2c0262af
FB
7501 ot = OT_WORD;
7502 if (mod == 3)
7503 ot += s->dflag;
0af10c86 7504 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
2c0262af
FB
7505 break;
7506 case 3: /* ltr */
f115e911
FB
7507 if (!s->pe || s->vm86)
7508 goto illegal_op;
2c0262af
FB
7509 if (s->cpl != 0) {
7510 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7511 } else {
872929aa 7512 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
0af10c86 7513 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
14ce26e7 7514 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 7515 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2 7516 gen_helper_ltr(cpu_env, cpu_tmp2_i32);
2c0262af
FB
7517 }
7518 break;
7519 case 4: /* verr */
7520 case 5: /* verw */
f115e911
FB
7521 if (!s->pe || s->vm86)
7522 goto illegal_op;
0af10c86 7523 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
773cdfcc 7524 gen_update_cc_op(s);
2999a0b2
BS
7525 if (op == 4) {
7526 gen_helper_verr(cpu_env, cpu_T[0]);
7527 } else {
7528 gen_helper_verw(cpu_env, cpu_T[0]);
7529 }
3ca51d07 7530 set_cc_op(s, CC_OP_EFLAGS);
f115e911 7531 break;
2c0262af
FB
7532 default:
7533 goto illegal_op;
7534 }
7535 break;
7536 case 0x101:
0af10c86 7537 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7538 mod = (modrm >> 6) & 3;
7539 op = (modrm >> 3) & 7;
3d7374c5 7540 rm = modrm & 7;
2c0262af
FB
7541 switch(op) {
7542 case 0: /* sgdt */
2c0262af
FB
7543 if (mod == 3)
7544 goto illegal_op;
872929aa 7545 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
0af10c86 7546 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
651ba608 7547 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
57fec1fe 7548 gen_op_st_T0_A0(OT_WORD + s->mem_index);
aba9d61e 7549 gen_add_A0_im(s, 2);
651ba608 7550 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
2c0262af
FB
7551 if (!s->dflag)
7552 gen_op_andl_T0_im(0xffffff);
57fec1fe 7553 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
2c0262af 7554 break;
3d7374c5
FB
7555 case 1:
7556 if (mod == 3) {
7557 switch (rm) {
7558 case 0: /* monitor */
7559 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7560 s->cpl != 0)
7561 goto illegal_op;
773cdfcc 7562 gen_update_cc_op(s);
3d7374c5
FB
7563 gen_jmp_im(pc_start - s->cs_base);
7564#ifdef TARGET_X86_64
7565 if (s->aflag == 2) {
bbf662ee 7566 gen_op_movq_A0_reg(R_EAX);
5fafdf24 7567 } else
3d7374c5
FB
7568#endif
7569 {
bbf662ee 7570 gen_op_movl_A0_reg(R_EAX);
3d7374c5
FB
7571 if (s->aflag == 0)
7572 gen_op_andl_A0_ffff();
7573 }
7574 gen_add_A0_ds_seg(s);
4a7443be 7575 gen_helper_monitor(cpu_env, cpu_A0);
3d7374c5
FB
7576 break;
7577 case 1: /* mwait */
7578 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7579 s->cpl != 0)
7580 goto illegal_op;
728d803b 7581 gen_update_cc_op(s);
94451178 7582 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7583 gen_helper_mwait(cpu_env, tcg_const_i32(s->pc - pc_start));
3d7374c5
FB
7584 gen_eob(s);
7585 break;
a9321a4d
PA
7586 case 2: /* clac */
7587 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_SMAP) ||
7588 s->cpl != 0) {
7589 goto illegal_op;
7590 }
7591 gen_helper_clac(cpu_env);
7592 gen_jmp_im(s->pc - s->cs_base);
7593 gen_eob(s);
7594 break;
7595 case 3: /* stac */
7596 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_SMAP) ||
7597 s->cpl != 0) {
7598 goto illegal_op;
7599 }
7600 gen_helper_stac(cpu_env);
7601 gen_jmp_im(s->pc - s->cs_base);
7602 gen_eob(s);
7603 break;
3d7374c5
FB
7604 default:
7605 goto illegal_op;
7606 }
7607 } else { /* sidt */
872929aa 7608 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
0af10c86 7609 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
651ba608 7610 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
57fec1fe 7611 gen_op_st_T0_A0(OT_WORD + s->mem_index);
3d7374c5 7612 gen_add_A0_im(s, 2);
651ba608 7613 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
3d7374c5
FB
7614 if (!s->dflag)
7615 gen_op_andl_T0_im(0xffffff);
57fec1fe 7616 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
3d7374c5
FB
7617 }
7618 break;
2c0262af
FB
7619 case 2: /* lgdt */
7620 case 3: /* lidt */
0573fbfc 7621 if (mod == 3) {
773cdfcc 7622 gen_update_cc_op(s);
872929aa 7623 gen_jmp_im(pc_start - s->cs_base);
0573fbfc
TS
7624 switch(rm) {
7625 case 0: /* VMRUN */
872929aa
FB
7626 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7627 goto illegal_op;
7628 if (s->cpl != 0) {
7629 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
0573fbfc 7630 break;
872929aa 7631 } else {
052e80d5 7632 gen_helper_vmrun(cpu_env, tcg_const_i32(s->aflag),
a7812ae4 7633 tcg_const_i32(s->pc - pc_start));
db620f46 7634 tcg_gen_exit_tb(0);
5779406a 7635 s->is_jmp = DISAS_TB_JUMP;
872929aa 7636 }
0573fbfc
TS
7637 break;
7638 case 1: /* VMMCALL */
872929aa
FB
7639 if (!(s->flags & HF_SVME_MASK))
7640 goto illegal_op;
052e80d5 7641 gen_helper_vmmcall(cpu_env);
0573fbfc
TS
7642 break;
7643 case 2: /* VMLOAD */
872929aa
FB
7644 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7645 goto illegal_op;
7646 if (s->cpl != 0) {
7647 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7648 break;
7649 } else {
052e80d5 7650 gen_helper_vmload(cpu_env, tcg_const_i32(s->aflag));
872929aa 7651 }
0573fbfc
TS
7652 break;
7653 case 3: /* VMSAVE */
872929aa
FB
7654 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7655 goto illegal_op;
7656 if (s->cpl != 0) {
7657 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7658 break;
7659 } else {
052e80d5 7660 gen_helper_vmsave(cpu_env, tcg_const_i32(s->aflag));
872929aa 7661 }
0573fbfc
TS
7662 break;
7663 case 4: /* STGI */
872929aa
FB
7664 if ((!(s->flags & HF_SVME_MASK) &&
7665 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7666 !s->pe)
7667 goto illegal_op;
7668 if (s->cpl != 0) {
7669 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7670 break;
7671 } else {
052e80d5 7672 gen_helper_stgi(cpu_env);
872929aa 7673 }
0573fbfc
TS
7674 break;
7675 case 5: /* CLGI */
872929aa
FB
7676 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7677 goto illegal_op;
7678 if (s->cpl != 0) {
7679 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7680 break;
7681 } else {
052e80d5 7682 gen_helper_clgi(cpu_env);
872929aa 7683 }
0573fbfc
TS
7684 break;
7685 case 6: /* SKINIT */
872929aa
FB
7686 if ((!(s->flags & HF_SVME_MASK) &&
7687 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7688 !s->pe)
7689 goto illegal_op;
052e80d5 7690 gen_helper_skinit(cpu_env);
0573fbfc
TS
7691 break;
7692 case 7: /* INVLPGA */
872929aa
FB
7693 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7694 goto illegal_op;
7695 if (s->cpl != 0) {
7696 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7697 break;
7698 } else {
052e80d5 7699 gen_helper_invlpga(cpu_env, tcg_const_i32(s->aflag));
872929aa 7700 }
0573fbfc
TS
7701 break;
7702 default:
7703 goto illegal_op;
7704 }
7705 } else if (s->cpl != 0) {
2c0262af
FB
7706 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7707 } else {
872929aa
FB
7708 gen_svm_check_intercept(s, pc_start,
7709 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
0af10c86 7710 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 7711 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
aba9d61e 7712 gen_add_A0_im(s, 2);
57fec1fe 7713 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
2c0262af
FB
7714 if (!s->dflag)
7715 gen_op_andl_T0_im(0xffffff);
7716 if (op == 2) {
651ba608
FB
7717 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7718 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
2c0262af 7719 } else {
651ba608
FB
7720 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7721 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
2c0262af
FB
7722 }
7723 }
7724 break;
7725 case 4: /* smsw */
872929aa 7726 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
e2542fe2 7727#if defined TARGET_X86_64 && defined HOST_WORDS_BIGENDIAN
f60d2728 7728 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]) + 4);
7729#else
651ba608 7730 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
f60d2728 7731#endif
0af10c86 7732 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 1);
2c0262af
FB
7733 break;
7734 case 6: /* lmsw */
7735 if (s->cpl != 0) {
7736 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7737 } else {
872929aa 7738 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
0af10c86 7739 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
4a7443be 7740 gen_helper_lmsw(cpu_env, cpu_T[0]);
14ce26e7 7741 gen_jmp_im(s->pc - s->cs_base);
d71b9a8b 7742 gen_eob(s);
2c0262af
FB
7743 }
7744 break;
1b050077
AP
7745 case 7:
7746 if (mod != 3) { /* invlpg */
7747 if (s->cpl != 0) {
7748 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7749 } else {
773cdfcc 7750 gen_update_cc_op(s);
1b050077 7751 gen_jmp_im(pc_start - s->cs_base);
0af10c86 7752 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
4a7443be 7753 gen_helper_invlpg(cpu_env, cpu_A0);
1b050077
AP
7754 gen_jmp_im(s->pc - s->cs_base);
7755 gen_eob(s);
7756 }
2c0262af 7757 } else {
1b050077
AP
7758 switch (rm) {
7759 case 0: /* swapgs */
14ce26e7 7760#ifdef TARGET_X86_64
1b050077
AP
7761 if (CODE64(s)) {
7762 if (s->cpl != 0) {
7763 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7764 } else {
7765 tcg_gen_ld_tl(cpu_T[0], cpu_env,
7766 offsetof(CPUX86State,segs[R_GS].base));
7767 tcg_gen_ld_tl(cpu_T[1], cpu_env,
7768 offsetof(CPUX86State,kernelgsbase));
7769 tcg_gen_st_tl(cpu_T[1], cpu_env,
7770 offsetof(CPUX86State,segs[R_GS].base));
7771 tcg_gen_st_tl(cpu_T[0], cpu_env,
7772 offsetof(CPUX86State,kernelgsbase));
7773 }
5fafdf24 7774 } else
14ce26e7
FB
7775#endif
7776 {
7777 goto illegal_op;
7778 }
1b050077
AP
7779 break;
7780 case 1: /* rdtscp */
7781 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7782 goto illegal_op;
773cdfcc 7783 gen_update_cc_op(s);
9575cb94 7784 gen_jmp_im(pc_start - s->cs_base);
1b050077
AP
7785 if (use_icount)
7786 gen_io_start();
4a7443be 7787 gen_helper_rdtscp(cpu_env);
1b050077
AP
7788 if (use_icount) {
7789 gen_io_end();
7790 gen_jmp(s, s->pc - s->cs_base);
7791 }
7792 break;
7793 default:
7794 goto illegal_op;
14ce26e7 7795 }
2c0262af
FB
7796 }
7797 break;
7798 default:
7799 goto illegal_op;
7800 }
7801 break;
3415a4dd
FB
7802 case 0x108: /* invd */
7803 case 0x109: /* wbinvd */
7804 if (s->cpl != 0) {
7805 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7806 } else {
872929aa 7807 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
3415a4dd
FB
7808 /* nothing to do */
7809 }
7810 break;
14ce26e7
FB
7811 case 0x63: /* arpl or movslS (x86_64) */
7812#ifdef TARGET_X86_64
7813 if (CODE64(s)) {
7814 int d_ot;
7815 /* d_ot is the size of destination */
7816 d_ot = dflag + OT_WORD;
7817
0af10c86 7818 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7
FB
7819 reg = ((modrm >> 3) & 7) | rex_r;
7820 mod = (modrm >> 6) & 3;
7821 rm = (modrm & 7) | REX_B(s);
3b46e624 7822
14ce26e7 7823 if (mod == 3) {
57fec1fe 7824 gen_op_mov_TN_reg(OT_LONG, 0, rm);
14ce26e7
FB
7825 /* sign extend */
7826 if (d_ot == OT_QUAD)
e108dd01 7827 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
57fec1fe 7828 gen_op_mov_reg_T0(d_ot, reg);
14ce26e7 7829 } else {
0af10c86 7830 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
14ce26e7 7831 if (d_ot == OT_QUAD) {
57fec1fe 7832 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
14ce26e7 7833 } else {
57fec1fe 7834 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
14ce26e7 7835 }
57fec1fe 7836 gen_op_mov_reg_T0(d_ot, reg);
14ce26e7 7837 }
5fafdf24 7838 } else
14ce26e7
FB
7839#endif
7840 {
3bd7da9e 7841 int label1;
49d9fdcc 7842 TCGv t0, t1, t2, a0;
1e4840bf 7843
14ce26e7
FB
7844 if (!s->pe || s->vm86)
7845 goto illegal_op;
a7812ae4
PB
7846 t0 = tcg_temp_local_new();
7847 t1 = tcg_temp_local_new();
7848 t2 = tcg_temp_local_new();
3bd7da9e 7849 ot = OT_WORD;
0af10c86 7850 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7
FB
7851 reg = (modrm >> 3) & 7;
7852 mod = (modrm >> 6) & 3;
7853 rm = modrm & 7;
7854 if (mod != 3) {
0af10c86 7855 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
1e4840bf 7856 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
49d9fdcc
LD
7857 a0 = tcg_temp_local_new();
7858 tcg_gen_mov_tl(a0, cpu_A0);
14ce26e7 7859 } else {
1e4840bf 7860 gen_op_mov_v_reg(ot, t0, rm);
49d9fdcc 7861 TCGV_UNUSED(a0);
14ce26e7 7862 }
1e4840bf
FB
7863 gen_op_mov_v_reg(ot, t1, reg);
7864 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7865 tcg_gen_andi_tl(t1, t1, 3);
7866 tcg_gen_movi_tl(t2, 0);
3bd7da9e 7867 label1 = gen_new_label();
1e4840bf
FB
7868 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7869 tcg_gen_andi_tl(t0, t0, ~3);
7870 tcg_gen_or_tl(t0, t0, t1);
7871 tcg_gen_movi_tl(t2, CC_Z);
3bd7da9e 7872 gen_set_label(label1);
14ce26e7 7873 if (mod != 3) {
49d9fdcc
LD
7874 gen_op_st_v(ot + s->mem_index, t0, a0);
7875 tcg_temp_free(a0);
7876 } else {
1e4840bf 7877 gen_op_mov_reg_v(ot, rm, t0);
14ce26e7 7878 }
d229edce 7879 gen_compute_eflags(s);
3bd7da9e 7880 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
1e4840bf 7881 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
1e4840bf
FB
7882 tcg_temp_free(t0);
7883 tcg_temp_free(t1);
7884 tcg_temp_free(t2);
f115e911 7885 }
f115e911 7886 break;
2c0262af
FB
7887 case 0x102: /* lar */
7888 case 0x103: /* lsl */
cec6843e
FB
7889 {
7890 int label1;
1e4840bf 7891 TCGv t0;
cec6843e
FB
7892 if (!s->pe || s->vm86)
7893 goto illegal_op;
7894 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 7895 modrm = cpu_ldub_code(env, s->pc++);
cec6843e 7896 reg = ((modrm >> 3) & 7) | rex_r;
0af10c86 7897 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
a7812ae4 7898 t0 = tcg_temp_local_new();
773cdfcc 7899 gen_update_cc_op(s);
2999a0b2
BS
7900 if (b == 0x102) {
7901 gen_helper_lar(t0, cpu_env, cpu_T[0]);
7902 } else {
7903 gen_helper_lsl(t0, cpu_env, cpu_T[0]);
7904 }
cec6843e
FB
7905 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
7906 label1 = gen_new_label();
cb63669a 7907 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1e4840bf 7908 gen_op_mov_reg_v(ot, reg, t0);
cec6843e 7909 gen_set_label(label1);
3ca51d07 7910 set_cc_op(s, CC_OP_EFLAGS);
1e4840bf 7911 tcg_temp_free(t0);
cec6843e 7912 }
2c0262af
FB
7913 break;
7914 case 0x118:
0af10c86 7915 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7916 mod = (modrm >> 6) & 3;
7917 op = (modrm >> 3) & 7;
7918 switch(op) {
7919 case 0: /* prefetchnta */
7920 case 1: /* prefetchnt0 */
7921 case 2: /* prefetchnt0 */
7922 case 3: /* prefetchnt0 */
7923 if (mod == 3)
7924 goto illegal_op;
0af10c86 7925 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
7926 /* nothing more to do */
7927 break;
e17a36ce 7928 default: /* nop (multi byte) */
0af10c86 7929 gen_nop_modrm(env, s, modrm);
e17a36ce 7930 break;
2c0262af
FB
7931 }
7932 break;
e17a36ce 7933 case 0x119 ... 0x11f: /* nop (multi byte) */
0af10c86
BS
7934 modrm = cpu_ldub_code(env, s->pc++);
7935 gen_nop_modrm(env, s, modrm);
e17a36ce 7936 break;
2c0262af
FB
7937 case 0x120: /* mov reg, crN */
7938 case 0x122: /* mov crN, reg */
7939 if (s->cpl != 0) {
7940 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7941 } else {
0af10c86 7942 modrm = cpu_ldub_code(env, s->pc++);
5c73b757
MO
7943 /* Ignore the mod bits (assume (modrm&0xc0)==0xc0).
7944 * AMD documentation (24594.pdf) and testing of
7945 * intel 386 and 486 processors all show that the mod bits
7946 * are assumed to be 1's, regardless of actual values.
7947 */
14ce26e7
FB
7948 rm = (modrm & 7) | REX_B(s);
7949 reg = ((modrm >> 3) & 7) | rex_r;
7950 if (CODE64(s))
7951 ot = OT_QUAD;
7952 else
7953 ot = OT_LONG;
ccd59d09
AP
7954 if ((prefixes & PREFIX_LOCK) && (reg == 0) &&
7955 (s->cpuid_ext3_features & CPUID_EXT3_CR8LEG)) {
7956 reg = 8;
7957 }
2c0262af
FB
7958 switch(reg) {
7959 case 0:
7960 case 2:
7961 case 3:
7962 case 4:
9230e66e 7963 case 8:
773cdfcc 7964 gen_update_cc_op(s);
872929aa 7965 gen_jmp_im(pc_start - s->cs_base);
2c0262af 7966 if (b & 2) {
57fec1fe 7967 gen_op_mov_TN_reg(ot, 0, rm);
4a7443be
BS
7968 gen_helper_write_crN(cpu_env, tcg_const_i32(reg),
7969 cpu_T[0]);
14ce26e7 7970 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
7971 gen_eob(s);
7972 } else {
4a7443be 7973 gen_helper_read_crN(cpu_T[0], cpu_env, tcg_const_i32(reg));
57fec1fe 7974 gen_op_mov_reg_T0(ot, rm);
2c0262af
FB
7975 }
7976 break;
7977 default:
7978 goto illegal_op;
7979 }
7980 }
7981 break;
7982 case 0x121: /* mov reg, drN */
7983 case 0x123: /* mov drN, reg */
7984 if (s->cpl != 0) {
7985 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7986 } else {
0af10c86 7987 modrm = cpu_ldub_code(env, s->pc++);
5c73b757
MO
7988 /* Ignore the mod bits (assume (modrm&0xc0)==0xc0).
7989 * AMD documentation (24594.pdf) and testing of
7990 * intel 386 and 486 processors all show that the mod bits
7991 * are assumed to be 1's, regardless of actual values.
7992 */
14ce26e7
FB
7993 rm = (modrm & 7) | REX_B(s);
7994 reg = ((modrm >> 3) & 7) | rex_r;
7995 if (CODE64(s))
7996 ot = OT_QUAD;
7997 else
7998 ot = OT_LONG;
2c0262af 7999 /* XXX: do it dynamically with CR4.DE bit */
14ce26e7 8000 if (reg == 4 || reg == 5 || reg >= 8)
2c0262af
FB
8001 goto illegal_op;
8002 if (b & 2) {
0573fbfc 8003 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
57fec1fe 8004 gen_op_mov_TN_reg(ot, 0, rm);
4a7443be 8005 gen_helper_movl_drN_T0(cpu_env, tcg_const_i32(reg), cpu_T[0]);
14ce26e7 8006 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
8007 gen_eob(s);
8008 } else {
0573fbfc 8009 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
651ba608 8010 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
57fec1fe 8011 gen_op_mov_reg_T0(ot, rm);
2c0262af
FB
8012 }
8013 }
8014 break;
8015 case 0x106: /* clts */
8016 if (s->cpl != 0) {
8017 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8018 } else {
0573fbfc 8019 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
f0967a1a 8020 gen_helper_clts(cpu_env);
7eee2a50 8021 /* abort block because static cpu state changed */
14ce26e7 8022 gen_jmp_im(s->pc - s->cs_base);
7eee2a50 8023 gen_eob(s);
2c0262af
FB
8024 }
8025 break;
222a3336 8026 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
664e0f19
FB
8027 case 0x1c3: /* MOVNTI reg, mem */
8028 if (!(s->cpuid_features & CPUID_SSE2))
14ce26e7 8029 goto illegal_op;
664e0f19 8030 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
0af10c86 8031 modrm = cpu_ldub_code(env, s->pc++);
664e0f19
FB
8032 mod = (modrm >> 6) & 3;
8033 if (mod == 3)
8034 goto illegal_op;
8035 reg = ((modrm >> 3) & 7) | rex_r;
8036 /* generate a generic store */
0af10c86 8037 gen_ldst_modrm(env, s, modrm, ot, reg, 1);
14ce26e7 8038 break;
664e0f19 8039 case 0x1ae:
0af10c86 8040 modrm = cpu_ldub_code(env, s->pc++);
664e0f19
FB
8041 mod = (modrm >> 6) & 3;
8042 op = (modrm >> 3) & 7;
8043 switch(op) {
8044 case 0: /* fxsave */
5fafdf24 8045 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
09d85fb8 8046 (s->prefix & PREFIX_LOCK))
14ce26e7 8047 goto illegal_op;
09d85fb8 8048 if ((s->flags & HF_EM_MASK) || (s->flags & HF_TS_MASK)) {
0fd14b72
FB
8049 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8050 break;
8051 }
0af10c86 8052 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
773cdfcc 8053 gen_update_cc_op(s);
19e6c4b8 8054 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 8055 gen_helper_fxsave(cpu_env, cpu_A0, tcg_const_i32((s->dflag == 2)));
664e0f19
FB
8056 break;
8057 case 1: /* fxrstor */
5fafdf24 8058 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
09d85fb8 8059 (s->prefix & PREFIX_LOCK))
14ce26e7 8060 goto illegal_op;
09d85fb8 8061 if ((s->flags & HF_EM_MASK) || (s->flags & HF_TS_MASK)) {
0fd14b72
FB
8062 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8063 break;
8064 }
0af10c86 8065 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
773cdfcc 8066 gen_update_cc_op(s);
19e6c4b8 8067 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae
BS
8068 gen_helper_fxrstor(cpu_env, cpu_A0,
8069 tcg_const_i32((s->dflag == 2)));
664e0f19
FB
8070 break;
8071 case 2: /* ldmxcsr */
8072 case 3: /* stmxcsr */
8073 if (s->flags & HF_TS_MASK) {
8074 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8075 break;
14ce26e7 8076 }
664e0f19
FB
8077 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8078 mod == 3)
14ce26e7 8079 goto illegal_op;
0af10c86 8080 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 8081 if (op == 2) {
57fec1fe 8082 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
20f8bd48 8083 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 8084 gen_helper_ldmxcsr(cpu_env, cpu_tmp2_i32);
14ce26e7 8085 } else {
651ba608 8086 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
57fec1fe 8087 gen_op_st_T0_A0(OT_LONG + s->mem_index);
14ce26e7 8088 }
664e0f19
FB
8089 break;
8090 case 5: /* lfence */
8091 case 6: /* mfence */
8001c294 8092 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE2))
664e0f19
FB
8093 goto illegal_op;
8094 break;
8f091a59
FB
8095 case 7: /* sfence / clflush */
8096 if ((modrm & 0xc7) == 0xc0) {
8097 /* sfence */
a35f3ec7 8098 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8f091a59
FB
8099 if (!(s->cpuid_features & CPUID_SSE))
8100 goto illegal_op;
8101 } else {
8102 /* clflush */
8103 if (!(s->cpuid_features & CPUID_CLFLUSH))
8104 goto illegal_op;
0af10c86 8105 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8f091a59
FB
8106 }
8107 break;
664e0f19 8108 default:
14ce26e7
FB
8109 goto illegal_op;
8110 }
8111 break;
a35f3ec7 8112 case 0x10d: /* 3DNow! prefetch(w) */
0af10c86 8113 modrm = cpu_ldub_code(env, s->pc++);
a35f3ec7
AJ
8114 mod = (modrm >> 6) & 3;
8115 if (mod == 3)
8116 goto illegal_op;
0af10c86 8117 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8f091a59
FB
8118 /* ignore for now */
8119 break;
3b21e03e 8120 case 0x1aa: /* rsm */
872929aa 8121 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
3b21e03e
FB
8122 if (!(s->flags & HF_SMM_MASK))
8123 goto illegal_op;
728d803b 8124 gen_update_cc_op(s);
3b21e03e 8125 gen_jmp_im(s->pc - s->cs_base);
608badfc 8126 gen_helper_rsm(cpu_env);
3b21e03e
FB
8127 gen_eob(s);
8128 break;
222a3336
AZ
8129 case 0x1b8: /* SSE4.2 popcnt */
8130 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8131 PREFIX_REPZ)
8132 goto illegal_op;
8133 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8134 goto illegal_op;
8135
0af10c86 8136 modrm = cpu_ldub_code(env, s->pc++);
8b4a3df8 8137 reg = ((modrm >> 3) & 7) | rex_r;
222a3336
AZ
8138
8139 if (s->prefix & PREFIX_DATA)
8140 ot = OT_WORD;
8141 else if (s->dflag != 2)
8142 ot = OT_LONG;
8143 else
8144 ot = OT_QUAD;
8145
0af10c86 8146 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
d3eb5eae 8147 gen_helper_popcnt(cpu_T[0], cpu_env, cpu_T[0], tcg_const_i32(ot));
222a3336 8148 gen_op_mov_reg_T0(ot, reg);
fdb0d09d 8149
3ca51d07 8150 set_cc_op(s, CC_OP_EFLAGS);
222a3336 8151 break;
a35f3ec7
AJ
8152 case 0x10e ... 0x10f:
8153 /* 3DNow! instructions, ignore prefixes */
8154 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
664e0f19
FB
8155 case 0x110 ... 0x117:
8156 case 0x128 ... 0x12f:
4242b1bd 8157 case 0x138 ... 0x13a:
d9f4bb27 8158 case 0x150 ... 0x179:
664e0f19
FB
8159 case 0x17c ... 0x17f:
8160 case 0x1c2:
8161 case 0x1c4 ... 0x1c6:
8162 case 0x1d0 ... 0x1fe:
0af10c86 8163 gen_sse(env, s, b, pc_start, rex_r);
664e0f19 8164 break;
2c0262af
FB
8165 default:
8166 goto illegal_op;
8167 }
8168 /* lock generation */
8169 if (s->prefix & PREFIX_LOCK)
a7812ae4 8170 gen_helper_unlock();
2c0262af
FB
8171 return s->pc;
8172 illegal_op:
ab1f142b 8173 if (s->prefix & PREFIX_LOCK)
a7812ae4 8174 gen_helper_unlock();
2c0262af
FB
8175 /* XXX: ensure that no lock was generated */
8176 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8177 return s->pc;
8178}
8179
2c0262af
FB
8180void optimize_flags_init(void)
8181{
a7812ae4
PB
8182 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
8183 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8184 offsetof(CPUX86State, cc_op), "cc_op");
317ac620 8185 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUX86State, cc_dst),
a7812ae4 8186 "cc_dst");
a3251186
RH
8187 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUX86State, cc_src),
8188 "cc_src");
988c3eb0
RH
8189 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0, offsetof(CPUX86State, cc_src2),
8190 "cc_src2");
437a88a5 8191
cc739bb0
LD
8192#ifdef TARGET_X86_64
8193 cpu_regs[R_EAX] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8194 offsetof(CPUX86State, regs[R_EAX]), "rax");
cc739bb0 8195 cpu_regs[R_ECX] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8196 offsetof(CPUX86State, regs[R_ECX]), "rcx");
cc739bb0 8197 cpu_regs[R_EDX] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8198 offsetof(CPUX86State, regs[R_EDX]), "rdx");
cc739bb0 8199 cpu_regs[R_EBX] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8200 offsetof(CPUX86State, regs[R_EBX]), "rbx");
cc739bb0 8201 cpu_regs[R_ESP] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8202 offsetof(CPUX86State, regs[R_ESP]), "rsp");
cc739bb0 8203 cpu_regs[R_EBP] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8204 offsetof(CPUX86State, regs[R_EBP]), "rbp");
cc739bb0 8205 cpu_regs[R_ESI] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8206 offsetof(CPUX86State, regs[R_ESI]), "rsi");
cc739bb0 8207 cpu_regs[R_EDI] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8208 offsetof(CPUX86State, regs[R_EDI]), "rdi");
cc739bb0 8209 cpu_regs[8] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8210 offsetof(CPUX86State, regs[8]), "r8");
cc739bb0 8211 cpu_regs[9] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8212 offsetof(CPUX86State, regs[9]), "r9");
cc739bb0 8213 cpu_regs[10] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8214 offsetof(CPUX86State, regs[10]), "r10");
cc739bb0 8215 cpu_regs[11] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8216 offsetof(CPUX86State, regs[11]), "r11");
cc739bb0 8217 cpu_regs[12] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8218 offsetof(CPUX86State, regs[12]), "r12");
cc739bb0 8219 cpu_regs[13] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8220 offsetof(CPUX86State, regs[13]), "r13");
cc739bb0 8221 cpu_regs[14] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8222 offsetof(CPUX86State, regs[14]), "r14");
cc739bb0 8223 cpu_regs[15] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8224 offsetof(CPUX86State, regs[15]), "r15");
cc739bb0
LD
8225#else
8226 cpu_regs[R_EAX] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8227 offsetof(CPUX86State, regs[R_EAX]), "eax");
cc739bb0 8228 cpu_regs[R_ECX] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8229 offsetof(CPUX86State, regs[R_ECX]), "ecx");
cc739bb0 8230 cpu_regs[R_EDX] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8231 offsetof(CPUX86State, regs[R_EDX]), "edx");
cc739bb0 8232 cpu_regs[R_EBX] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8233 offsetof(CPUX86State, regs[R_EBX]), "ebx");
cc739bb0 8234 cpu_regs[R_ESP] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8235 offsetof(CPUX86State, regs[R_ESP]), "esp");
cc739bb0 8236 cpu_regs[R_EBP] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8237 offsetof(CPUX86State, regs[R_EBP]), "ebp");
cc739bb0 8238 cpu_regs[R_ESI] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8239 offsetof(CPUX86State, regs[R_ESI]), "esi");
cc739bb0 8240 cpu_regs[R_EDI] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8241 offsetof(CPUX86State, regs[R_EDI]), "edi");
cc739bb0
LD
8242#endif
8243
437a88a5 8244 /* register helpers */
a7812ae4 8245#define GEN_HELPER 2
437a88a5 8246#include "helper.h"
2c0262af
FB
8247}
8248
8249/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8250 basic block 'tb'. If search_pc is TRUE, also generate PC
8251 information for each intermediate instruction. */
317ac620 8252static inline void gen_intermediate_code_internal(CPUX86State *env,
2cfc5f17
TS
8253 TranslationBlock *tb,
8254 int search_pc)
2c0262af
FB
8255{
8256 DisasContext dc1, *dc = &dc1;
14ce26e7 8257 target_ulong pc_ptr;
2c0262af 8258 uint16_t *gen_opc_end;
a1d1bb31 8259 CPUBreakpoint *bp;
7f5b7d3e 8260 int j, lj;
c068688b 8261 uint64_t flags;
14ce26e7
FB
8262 target_ulong pc_start;
8263 target_ulong cs_base;
2e70f6ef
PB
8264 int num_insns;
8265 int max_insns;
3b46e624 8266
2c0262af 8267 /* generate intermediate code */
14ce26e7
FB
8268 pc_start = tb->pc;
8269 cs_base = tb->cs_base;
2c0262af 8270 flags = tb->flags;
3a1d9b8b 8271
4f31916f 8272 dc->pe = (flags >> HF_PE_SHIFT) & 1;
2c0262af
FB
8273 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8274 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8275 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8276 dc->f_st = 0;
8277 dc->vm86 = (flags >> VM_SHIFT) & 1;
8278 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8279 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8280 dc->tf = (flags >> TF_SHIFT) & 1;
34865134 8281 dc->singlestep_enabled = env->singlestep_enabled;
2c0262af 8282 dc->cc_op = CC_OP_DYNAMIC;
e207582f 8283 dc->cc_op_dirty = false;
2c0262af
FB
8284 dc->cs_base = cs_base;
8285 dc->tb = tb;
8286 dc->popl_esp_hack = 0;
8287 /* select memory access functions */
8288 dc->mem_index = 0;
8289 if (flags & HF_SOFTMMU_MASK) {
a9321a4d 8290 dc->mem_index = (cpu_mmu_index(env) + 1) << 2;
2c0262af 8291 }
14ce26e7 8292 dc->cpuid_features = env->cpuid_features;
3d7374c5 8293 dc->cpuid_ext_features = env->cpuid_ext_features;
e771edab 8294 dc->cpuid_ext2_features = env->cpuid_ext2_features;
12e26b75 8295 dc->cpuid_ext3_features = env->cpuid_ext3_features;
a9321a4d 8296 dc->cpuid_7_0_ebx_features = env->cpuid_7_0_ebx_features;
14ce26e7
FB
8297#ifdef TARGET_X86_64
8298 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8299 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8300#endif
7eee2a50 8301 dc->flags = flags;
a2cc3b24
FB
8302 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8303 (flags & HF_INHIBIT_IRQ_MASK)
415fa2ea 8304#ifndef CONFIG_SOFTMMU
2c0262af
FB
8305 || (flags & HF_SOFTMMU_MASK)
8306#endif
8307 );
4f31916f
FB
8308#if 0
8309 /* check addseg logic */
dc196a57 8310 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
4f31916f
FB
8311 printf("ERROR addseg\n");
8312#endif
8313
a7812ae4
PB
8314 cpu_T[0] = tcg_temp_new();
8315 cpu_T[1] = tcg_temp_new();
8316 cpu_A0 = tcg_temp_new();
a7812ae4
PB
8317
8318 cpu_tmp0 = tcg_temp_new();
8319 cpu_tmp1_i64 = tcg_temp_new_i64();
8320 cpu_tmp2_i32 = tcg_temp_new_i32();
8321 cpu_tmp3_i32 = tcg_temp_new_i32();
8322 cpu_tmp4 = tcg_temp_new();
a7812ae4
PB
8323 cpu_ptr0 = tcg_temp_new_ptr();
8324 cpu_ptr1 = tcg_temp_new_ptr();
a3251186 8325 cpu_cc_srcT = tcg_temp_local_new();
57fec1fe 8326
92414b31 8327 gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
2c0262af
FB
8328
8329 dc->is_jmp = DISAS_NEXT;
8330 pc_ptr = pc_start;
8331 lj = -1;
2e70f6ef
PB
8332 num_insns = 0;
8333 max_insns = tb->cflags & CF_COUNT_MASK;
8334 if (max_insns == 0)
8335 max_insns = CF_COUNT_MASK;
2c0262af 8336
806f352d 8337 gen_tb_start();
2c0262af 8338 for(;;) {
72cf2d4f
BS
8339 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
8340 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
a2397807
JK
8341 if (bp->pc == pc_ptr &&
8342 !((bp->flags & BP_CPU) && (tb->flags & HF_RF_MASK))) {
2c0262af
FB
8343 gen_debug(dc, pc_ptr - dc->cs_base);
8344 break;
8345 }
8346 }
8347 }
8348 if (search_pc) {
92414b31 8349 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
2c0262af
FB
8350 if (lj < j) {
8351 lj++;
8352 while (lj < j)
ab1103de 8353 tcg_ctx.gen_opc_instr_start[lj++] = 0;
2c0262af 8354 }
25983cad 8355 tcg_ctx.gen_opc_pc[lj] = pc_ptr;
2c0262af 8356 gen_opc_cc_op[lj] = dc->cc_op;
ab1103de 8357 tcg_ctx.gen_opc_instr_start[lj] = 1;
c9c99c22 8358 tcg_ctx.gen_opc_icount[lj] = num_insns;
2c0262af 8359 }
2e70f6ef
PB
8360 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8361 gen_io_start();
8362
0af10c86 8363 pc_ptr = disas_insn(env, dc, pc_ptr);
2e70f6ef 8364 num_insns++;
2c0262af
FB
8365 /* stop translation if indicated */
8366 if (dc->is_jmp)
8367 break;
8368 /* if single step mode, we generate only one instruction and
8369 generate an exception */
a2cc3b24
FB
8370 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8371 the flag and abort the translation to give the irqs a
8372 change to be happen */
5fafdf24 8373 if (dc->tf || dc->singlestep_enabled ||
2e70f6ef 8374 (flags & HF_INHIBIT_IRQ_MASK)) {
14ce26e7 8375 gen_jmp_im(pc_ptr - dc->cs_base);
2c0262af
FB
8376 gen_eob(dc);
8377 break;
8378 }
8379 /* if too long translation, stop generation too */
efd7f486 8380 if (tcg_ctx.gen_opc_ptr >= gen_opc_end ||
2e70f6ef
PB
8381 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8382 num_insns >= max_insns) {
14ce26e7 8383 gen_jmp_im(pc_ptr - dc->cs_base);
2c0262af
FB
8384 gen_eob(dc);
8385 break;
8386 }
1b530a6d
AJ
8387 if (singlestep) {
8388 gen_jmp_im(pc_ptr - dc->cs_base);
8389 gen_eob(dc);
8390 break;
8391 }
2c0262af 8392 }
2e70f6ef
PB
8393 if (tb->cflags & CF_LAST_IO)
8394 gen_io_end();
806f352d 8395 gen_tb_end(tb, num_insns);
efd7f486 8396 *tcg_ctx.gen_opc_ptr = INDEX_op_end;
2c0262af
FB
8397 /* we don't forget to fill the last values */
8398 if (search_pc) {
92414b31 8399 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
2c0262af
FB
8400 lj++;
8401 while (lj <= j)
ab1103de 8402 tcg_ctx.gen_opc_instr_start[lj++] = 0;
2c0262af 8403 }
3b46e624 8404
2c0262af 8405#ifdef DEBUG_DISAS
8fec2b8c 8406 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
14ce26e7 8407 int disas_flags;
93fcfe39
AL
8408 qemu_log("----------------\n");
8409 qemu_log("IN: %s\n", lookup_symbol(pc_start));
14ce26e7
FB
8410#ifdef TARGET_X86_64
8411 if (dc->code64)
8412 disas_flags = 2;
8413 else
8414#endif
8415 disas_flags = !dc->code32;
f4359b9f 8416 log_target_disas(env, pc_start, pc_ptr - pc_start, disas_flags);
93fcfe39 8417 qemu_log("\n");
2c0262af
FB
8418 }
8419#endif
8420
2e70f6ef 8421 if (!search_pc) {
2c0262af 8422 tb->size = pc_ptr - pc_start;
2e70f6ef
PB
8423 tb->icount = num_insns;
8424 }
2c0262af
FB
8425}
8426
317ac620 8427void gen_intermediate_code(CPUX86State *env, TranslationBlock *tb)
2c0262af 8428{
2cfc5f17 8429 gen_intermediate_code_internal(env, tb, 0);
2c0262af
FB
8430}
8431
317ac620 8432void gen_intermediate_code_pc(CPUX86State *env, TranslationBlock *tb)
2c0262af 8433{
2cfc5f17 8434 gen_intermediate_code_internal(env, tb, 1);
2c0262af
FB
8435}
8436
317ac620 8437void restore_state_to_opc(CPUX86State *env, TranslationBlock *tb, int pc_pos)
d2856f1a
AJ
8438{
8439 int cc_op;
8440#ifdef DEBUG_DISAS
8fec2b8c 8441 if (qemu_loglevel_mask(CPU_LOG_TB_OP)) {
d2856f1a 8442 int i;
93fcfe39 8443 qemu_log("RESTORE:\n");
d2856f1a 8444 for(i = 0;i <= pc_pos; i++) {
ab1103de 8445 if (tcg_ctx.gen_opc_instr_start[i]) {
25983cad
EV
8446 qemu_log("0x%04x: " TARGET_FMT_lx "\n", i,
8447 tcg_ctx.gen_opc_pc[i]);
d2856f1a
AJ
8448 }
8449 }
e87b7cb0 8450 qemu_log("pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
25983cad 8451 pc_pos, tcg_ctx.gen_opc_pc[pc_pos] - tb->cs_base,
d2856f1a
AJ
8452 (uint32_t)tb->cs_base);
8453 }
8454#endif
25983cad 8455 env->eip = tcg_ctx.gen_opc_pc[pc_pos] - tb->cs_base;
d2856f1a
AJ
8456 cc_op = gen_opc_cc_op[pc_pos];
8457 if (cc_op != CC_OP_DYNAMIC)
8458 env->cc_op = cc_op;
8459}