]> git.proxmox.com Git - qemu.git/blame - target-i386/translate.c
target-i386: Fix addr32 prefix in gen_lea_modrm
[qemu.git] / target-i386 / translate.c
CommitLineData
2c0262af
FB
1/*
2 * i386 translation
5fafdf24 3 *
2c0262af
FB
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
8167ee88 17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
2c0262af
FB
18 */
19#include <stdarg.h>
20#include <stdlib.h>
21#include <stdio.h>
22#include <string.h>
23#include <inttypes.h>
24#include <signal.h>
2c0262af 25
bec93d72 26#include "qemu/host-utils.h"
2c0262af 27#include "cpu.h"
76cad711 28#include "disas/disas.h"
57fec1fe 29#include "tcg-op.h"
2c0262af 30
a7812ae4
PB
31#include "helper.h"
32#define GEN_HELPER 1
33#include "helper.h"
34
2c0262af
FB
35#define PREFIX_REPZ 0x01
36#define PREFIX_REPNZ 0x02
37#define PREFIX_LOCK 0x04
38#define PREFIX_DATA 0x08
39#define PREFIX_ADR 0x10
701ed211 40#define PREFIX_VEX 0x20
2c0262af 41
14ce26e7 42#ifdef TARGET_X86_64
14ce26e7
FB
43#define CODE64(s) ((s)->code64)
44#define REX_X(s) ((s)->rex_x)
45#define REX_B(s) ((s)->rex_b)
14ce26e7 46#else
14ce26e7
FB
47#define CODE64(s) 0
48#define REX_X(s) 0
49#define REX_B(s) 0
50#endif
51
bec93d72
RH
52#ifdef TARGET_X86_64
53# define ctztl ctz64
54# define clztl clz64
55#else
56# define ctztl ctz32
57# define clztl clz32
58#endif
59
57fec1fe
FB
60//#define MACRO_TEST 1
61
57fec1fe 62/* global register indexes */
a7812ae4 63static TCGv_ptr cpu_env;
a3251186 64static TCGv cpu_A0;
988c3eb0 65static TCGv cpu_cc_dst, cpu_cc_src, cpu_cc_src2, cpu_cc_srcT;
a7812ae4 66static TCGv_i32 cpu_cc_op;
cc739bb0 67static TCGv cpu_regs[CPU_NB_REGS];
1e4840bf 68/* local temps */
3b9d3cf1 69static TCGv cpu_T[2];
57fec1fe 70/* local register indexes (only used inside old micro ops) */
a7812ae4
PB
71static TCGv cpu_tmp0, cpu_tmp4;
72static TCGv_ptr cpu_ptr0, cpu_ptr1;
73static TCGv_i32 cpu_tmp2_i32, cpu_tmp3_i32;
74static TCGv_i64 cpu_tmp1_i64;
57fec1fe 75
1a7ff922
PB
76static uint8_t gen_opc_cc_op[OPC_BUF_SIZE];
77
022c62cb 78#include "exec/gen-icount.h"
2e70f6ef 79
57fec1fe
FB
80#ifdef TARGET_X86_64
81static int x86_64_hregs;
ae063a68
FB
82#endif
83
2c0262af
FB
84typedef struct DisasContext {
85 /* current insn context */
86 int override; /* -1 if no override */
87 int prefix;
88 int aflag, dflag;
14ce26e7 89 target_ulong pc; /* pc = eip + cs_base */
2c0262af
FB
90 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
91 static state change (stop translation) */
92 /* current block context */
14ce26e7 93 target_ulong cs_base; /* base of CS segment */
2c0262af
FB
94 int pe; /* protected mode */
95 int code32; /* 32 bit code segment */
14ce26e7
FB
96#ifdef TARGET_X86_64
97 int lma; /* long mode active */
98 int code64; /* 64 bit code segment */
99 int rex_x, rex_b;
100#endif
701ed211
RH
101 int vex_l; /* vex vector length */
102 int vex_v; /* vex vvvv register, without 1's compliment. */
2c0262af 103 int ss32; /* 32 bit stack segment */
fee71888 104 CCOp cc_op; /* current CC operation */
e207582f 105 bool cc_op_dirty;
2c0262af
FB
106 int addseg; /* non zero if either DS/ES/SS have a non zero base */
107 int f_st; /* currently unused */
108 int vm86; /* vm86 mode */
109 int cpl;
110 int iopl;
111 int tf; /* TF cpu flag */
34865134 112 int singlestep_enabled; /* "hardware" single step enabled */
2c0262af
FB
113 int jmp_opt; /* use direct block chaining for direct jumps */
114 int mem_index; /* select memory access functions */
c068688b 115 uint64_t flags; /* all execution flags */
2c0262af
FB
116 struct TranslationBlock *tb;
117 int popl_esp_hack; /* for correct popl with esp base handling */
14ce26e7
FB
118 int rip_offset; /* only used in x86_64, but left for simplicity */
119 int cpuid_features;
3d7374c5 120 int cpuid_ext_features;
e771edab 121 int cpuid_ext2_features;
12e26b75 122 int cpuid_ext3_features;
a9321a4d 123 int cpuid_7_0_ebx_features;
2c0262af
FB
124} DisasContext;
125
126static void gen_eob(DisasContext *s);
14ce26e7
FB
127static void gen_jmp(DisasContext *s, target_ulong eip);
128static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
63633fe6 129static void gen_op(DisasContext *s1, int op, int ot, int d);
2c0262af
FB
130
131/* i386 arith/logic operations */
132enum {
5fafdf24
TS
133 OP_ADDL,
134 OP_ORL,
135 OP_ADCL,
2c0262af 136 OP_SBBL,
5fafdf24
TS
137 OP_ANDL,
138 OP_SUBL,
139 OP_XORL,
2c0262af
FB
140 OP_CMPL,
141};
142
143/* i386 shift ops */
144enum {
5fafdf24
TS
145 OP_ROL,
146 OP_ROR,
147 OP_RCL,
148 OP_RCR,
149 OP_SHL,
150 OP_SHR,
2c0262af
FB
151 OP_SHL1, /* undocumented */
152 OP_SAR = 7,
153};
154
8e1c85e3
FB
155enum {
156 JCC_O,
157 JCC_B,
158 JCC_Z,
159 JCC_BE,
160 JCC_S,
161 JCC_P,
162 JCC_L,
163 JCC_LE,
164};
165
2c0262af
FB
166/* operand size */
167enum {
168 OT_BYTE = 0,
169 OT_WORD,
5fafdf24 170 OT_LONG,
2c0262af
FB
171 OT_QUAD,
172};
173
174enum {
175 /* I386 int registers */
176 OR_EAX, /* MUST be even numbered */
177 OR_ECX,
178 OR_EDX,
179 OR_EBX,
180 OR_ESP,
181 OR_EBP,
182 OR_ESI,
183 OR_EDI,
14ce26e7
FB
184
185 OR_TMP0 = 16, /* temporary operand register */
2c0262af
FB
186 OR_TMP1,
187 OR_A0, /* temporary register used when doing address evaluation */
2c0262af
FB
188};
189
b666265b 190enum {
a3251186
RH
191 USES_CC_DST = 1,
192 USES_CC_SRC = 2,
988c3eb0
RH
193 USES_CC_SRC2 = 4,
194 USES_CC_SRCT = 8,
b666265b
RH
195};
196
197/* Bit set if the global variable is live after setting CC_OP to X. */
198static const uint8_t cc_op_live[CC_OP_NB] = {
988c3eb0 199 [CC_OP_DYNAMIC] = USES_CC_DST | USES_CC_SRC | USES_CC_SRC2,
b666265b
RH
200 [CC_OP_EFLAGS] = USES_CC_SRC,
201 [CC_OP_MULB ... CC_OP_MULQ] = USES_CC_DST | USES_CC_SRC,
202 [CC_OP_ADDB ... CC_OP_ADDQ] = USES_CC_DST | USES_CC_SRC,
988c3eb0 203 [CC_OP_ADCB ... CC_OP_ADCQ] = USES_CC_DST | USES_CC_SRC | USES_CC_SRC2,
a3251186 204 [CC_OP_SUBB ... CC_OP_SUBQ] = USES_CC_DST | USES_CC_SRC | USES_CC_SRCT,
988c3eb0 205 [CC_OP_SBBB ... CC_OP_SBBQ] = USES_CC_DST | USES_CC_SRC | USES_CC_SRC2,
b666265b
RH
206 [CC_OP_LOGICB ... CC_OP_LOGICQ] = USES_CC_DST,
207 [CC_OP_INCB ... CC_OP_INCQ] = USES_CC_DST | USES_CC_SRC,
208 [CC_OP_DECB ... CC_OP_DECQ] = USES_CC_DST | USES_CC_SRC,
209 [CC_OP_SHLB ... CC_OP_SHLQ] = USES_CC_DST | USES_CC_SRC,
210 [CC_OP_SARB ... CC_OP_SARQ] = USES_CC_DST | USES_CC_SRC,
bc4b43dc 211 [CC_OP_BMILGB ... CC_OP_BMILGQ] = USES_CC_DST | USES_CC_SRC,
cd7f97ca
RH
212 [CC_OP_ADCX] = USES_CC_DST | USES_CC_SRC,
213 [CC_OP_ADOX] = USES_CC_SRC | USES_CC_SRC2,
214 [CC_OP_ADCOX] = USES_CC_DST | USES_CC_SRC | USES_CC_SRC2,
436ff2d2 215 [CC_OP_CLR] = 0,
b666265b
RH
216};
217
e207582f 218static void set_cc_op(DisasContext *s, CCOp op)
3ca51d07 219{
b666265b
RH
220 int dead;
221
222 if (s->cc_op == op) {
223 return;
224 }
225
226 /* Discard CC computation that will no longer be used. */
227 dead = cc_op_live[s->cc_op] & ~cc_op_live[op];
228 if (dead & USES_CC_DST) {
229 tcg_gen_discard_tl(cpu_cc_dst);
e207582f 230 }
b666265b
RH
231 if (dead & USES_CC_SRC) {
232 tcg_gen_discard_tl(cpu_cc_src);
233 }
988c3eb0
RH
234 if (dead & USES_CC_SRC2) {
235 tcg_gen_discard_tl(cpu_cc_src2);
236 }
a3251186
RH
237 if (dead & USES_CC_SRCT) {
238 tcg_gen_discard_tl(cpu_cc_srcT);
239 }
b666265b 240
e2f515cf
RH
241 if (op == CC_OP_DYNAMIC) {
242 /* The DYNAMIC setting is translator only, and should never be
243 stored. Thus we always consider it clean. */
244 s->cc_op_dirty = false;
245 } else {
246 /* Discard any computed CC_OP value (see shifts). */
247 if (s->cc_op == CC_OP_DYNAMIC) {
248 tcg_gen_discard_i32(cpu_cc_op);
249 }
250 s->cc_op_dirty = true;
251 }
b666265b 252 s->cc_op = op;
e207582f
RH
253}
254
e207582f
RH
255static void gen_update_cc_op(DisasContext *s)
256{
257 if (s->cc_op_dirty) {
773cdfcc 258 tcg_gen_movi_i32(cpu_cc_op, s->cc_op);
e207582f
RH
259 s->cc_op_dirty = false;
260 }
3ca51d07
RH
261}
262
57fec1fe
FB
263static inline void gen_op_movl_T0_0(void)
264{
265 tcg_gen_movi_tl(cpu_T[0], 0);
266}
267
268static inline void gen_op_movl_T0_im(int32_t val)
269{
270 tcg_gen_movi_tl(cpu_T[0], val);
271}
272
273static inline void gen_op_movl_T0_imu(uint32_t val)
274{
275 tcg_gen_movi_tl(cpu_T[0], val);
276}
277
278static inline void gen_op_movl_T1_im(int32_t val)
279{
280 tcg_gen_movi_tl(cpu_T[1], val);
281}
282
283static inline void gen_op_movl_T1_imu(uint32_t val)
284{
285 tcg_gen_movi_tl(cpu_T[1], val);
286}
287
288static inline void gen_op_movl_A0_im(uint32_t val)
289{
290 tcg_gen_movi_tl(cpu_A0, val);
291}
292
293#ifdef TARGET_X86_64
294static inline void gen_op_movq_A0_im(int64_t val)
295{
296 tcg_gen_movi_tl(cpu_A0, val);
297}
298#endif
299
300static inline void gen_movtl_T0_im(target_ulong val)
301{
302 tcg_gen_movi_tl(cpu_T[0], val);
303}
304
305static inline void gen_movtl_T1_im(target_ulong val)
306{
307 tcg_gen_movi_tl(cpu_T[1], val);
308}
309
310static inline void gen_op_andl_T0_ffff(void)
311{
312 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
313}
314
315static inline void gen_op_andl_T0_im(uint32_t val)
316{
317 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
318}
319
320static inline void gen_op_movl_T0_T1(void)
321{
322 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
323}
324
325static inline void gen_op_andl_A0_ffff(void)
326{
327 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
328}
329
14ce26e7
FB
330#ifdef TARGET_X86_64
331
332#define NB_OP_SIZES 4
333
14ce26e7
FB
334#else /* !TARGET_X86_64 */
335
336#define NB_OP_SIZES 3
337
14ce26e7
FB
338#endif /* !TARGET_X86_64 */
339
e2542fe2 340#if defined(HOST_WORDS_BIGENDIAN)
57fec1fe
FB
341#define REG_B_OFFSET (sizeof(target_ulong) - 1)
342#define REG_H_OFFSET (sizeof(target_ulong) - 2)
343#define REG_W_OFFSET (sizeof(target_ulong) - 2)
344#define REG_L_OFFSET (sizeof(target_ulong) - 4)
345#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
14ce26e7 346#else
57fec1fe
FB
347#define REG_B_OFFSET 0
348#define REG_H_OFFSET 1
349#define REG_W_OFFSET 0
350#define REG_L_OFFSET 0
351#define REG_LH_OFFSET 4
14ce26e7 352#endif
57fec1fe 353
96d7073f
PM
354/* In instruction encodings for byte register accesses the
355 * register number usually indicates "low 8 bits of register N";
356 * however there are some special cases where N 4..7 indicates
357 * [AH, CH, DH, BH], ie "bits 15..8 of register N-4". Return
358 * true for this special case, false otherwise.
359 */
360static inline bool byte_reg_is_xH(int reg)
361{
362 if (reg < 4) {
363 return false;
364 }
365#ifdef TARGET_X86_64
366 if (reg >= 8 || x86_64_hregs) {
367 return false;
368 }
369#endif
370 return true;
371}
372
1e4840bf 373static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
57fec1fe
FB
374{
375 switch(ot) {
376 case OT_BYTE:
96d7073f 377 if (!byte_reg_is_xH(reg)) {
c832e3de 378 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], t0, 0, 8);
57fec1fe 379 } else {
c832e3de 380 tcg_gen_deposit_tl(cpu_regs[reg - 4], cpu_regs[reg - 4], t0, 8, 8);
57fec1fe
FB
381 }
382 break;
383 case OT_WORD:
c832e3de 384 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], t0, 0, 16);
57fec1fe 385 break;
cc739bb0 386 default: /* XXX this shouldn't be reached; abort? */
57fec1fe 387 case OT_LONG:
cc739bb0
LD
388 /* For x86_64, this sets the higher half of register to zero.
389 For i386, this is equivalent to a mov. */
390 tcg_gen_ext32u_tl(cpu_regs[reg], t0);
57fec1fe 391 break;
cc739bb0 392#ifdef TARGET_X86_64
57fec1fe 393 case OT_QUAD:
cc739bb0 394 tcg_gen_mov_tl(cpu_regs[reg], t0);
57fec1fe 395 break;
14ce26e7 396#endif
57fec1fe
FB
397 }
398}
2c0262af 399
57fec1fe
FB
400static inline void gen_op_mov_reg_T0(int ot, int reg)
401{
1e4840bf 402 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
57fec1fe
FB
403}
404
405static inline void gen_op_mov_reg_T1(int ot, int reg)
406{
1e4840bf 407 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
57fec1fe
FB
408}
409
410static inline void gen_op_mov_reg_A0(int size, int reg)
411{
412 switch(size) {
93ab25d7 413 case OT_BYTE:
c832e3de 414 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_A0, 0, 16);
57fec1fe 415 break;
cc739bb0 416 default: /* XXX this shouldn't be reached; abort? */
93ab25d7 417 case OT_WORD:
cc739bb0
LD
418 /* For x86_64, this sets the higher half of register to zero.
419 For i386, this is equivalent to a mov. */
420 tcg_gen_ext32u_tl(cpu_regs[reg], cpu_A0);
57fec1fe 421 break;
cc739bb0 422#ifdef TARGET_X86_64
93ab25d7 423 case OT_LONG:
cc739bb0 424 tcg_gen_mov_tl(cpu_regs[reg], cpu_A0);
57fec1fe 425 break;
14ce26e7 426#endif
57fec1fe
FB
427 }
428}
429
1e4840bf 430static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
57fec1fe 431{
96d7073f
PM
432 if (ot == OT_BYTE && byte_reg_is_xH(reg)) {
433 tcg_gen_shri_tl(t0, cpu_regs[reg - 4], 8);
434 tcg_gen_ext8u_tl(t0, t0);
435 } else {
cc739bb0 436 tcg_gen_mov_tl(t0, cpu_regs[reg]);
57fec1fe
FB
437 }
438}
439
1e4840bf
FB
440static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
441{
442 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
443}
444
57fec1fe
FB
445static inline void gen_op_movl_A0_reg(int reg)
446{
cc739bb0 447 tcg_gen_mov_tl(cpu_A0, cpu_regs[reg]);
57fec1fe
FB
448}
449
450static inline void gen_op_addl_A0_im(int32_t val)
451{
452 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
14ce26e7 453#ifdef TARGET_X86_64
57fec1fe 454 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
14ce26e7 455#endif
57fec1fe 456}
2c0262af 457
14ce26e7 458#ifdef TARGET_X86_64
57fec1fe
FB
459static inline void gen_op_addq_A0_im(int64_t val)
460{
461 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
462}
14ce26e7 463#endif
57fec1fe
FB
464
465static void gen_add_A0_im(DisasContext *s, int val)
466{
467#ifdef TARGET_X86_64
468 if (CODE64(s))
469 gen_op_addq_A0_im(val);
470 else
471#endif
472 gen_op_addl_A0_im(val);
473}
2c0262af 474
57fec1fe 475static inline void gen_op_addl_T0_T1(void)
2c0262af 476{
57fec1fe
FB
477 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
478}
479
480static inline void gen_op_jmp_T0(void)
481{
317ac620 482 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, eip));
57fec1fe
FB
483}
484
6e0d8677 485static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
57fec1fe 486{
6e0d8677 487 switch(size) {
93ab25d7 488 case OT_BYTE:
cc739bb0 489 tcg_gen_addi_tl(cpu_tmp0, cpu_regs[reg], val);
c832e3de 490 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_tmp0, 0, 16);
6e0d8677 491 break;
93ab25d7 492 case OT_WORD:
cc739bb0
LD
493 tcg_gen_addi_tl(cpu_tmp0, cpu_regs[reg], val);
494 /* For x86_64, this sets the higher half of register to zero.
495 For i386, this is equivalent to a nop. */
496 tcg_gen_ext32u_tl(cpu_tmp0, cpu_tmp0);
497 tcg_gen_mov_tl(cpu_regs[reg], cpu_tmp0);
6e0d8677
FB
498 break;
499#ifdef TARGET_X86_64
93ab25d7 500 case OT_LONG:
cc739bb0 501 tcg_gen_addi_tl(cpu_regs[reg], cpu_regs[reg], val);
6e0d8677
FB
502 break;
503#endif
504 }
57fec1fe
FB
505}
506
6e0d8677 507static inline void gen_op_add_reg_T0(int size, int reg)
57fec1fe 508{
6e0d8677 509 switch(size) {
93ab25d7 510 case OT_BYTE:
cc739bb0 511 tcg_gen_add_tl(cpu_tmp0, cpu_regs[reg], cpu_T[0]);
c832e3de 512 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_tmp0, 0, 16);
6e0d8677 513 break;
93ab25d7 514 case OT_WORD:
cc739bb0
LD
515 tcg_gen_add_tl(cpu_tmp0, cpu_regs[reg], cpu_T[0]);
516 /* For x86_64, this sets the higher half of register to zero.
517 For i386, this is equivalent to a nop. */
518 tcg_gen_ext32u_tl(cpu_tmp0, cpu_tmp0);
519 tcg_gen_mov_tl(cpu_regs[reg], cpu_tmp0);
6e0d8677 520 break;
14ce26e7 521#ifdef TARGET_X86_64
93ab25d7 522 case OT_LONG:
cc739bb0 523 tcg_gen_add_tl(cpu_regs[reg], cpu_regs[reg], cpu_T[0]);
6e0d8677 524 break;
14ce26e7 525#endif
6e0d8677
FB
526 }
527}
57fec1fe 528
57fec1fe
FB
529static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
530{
cc739bb0
LD
531 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[reg]);
532 if (shift != 0)
57fec1fe
FB
533 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
534 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
cc739bb0
LD
535 /* For x86_64, this sets the higher half of register to zero.
536 For i386, this is equivalent to a nop. */
537 tcg_gen_ext32u_tl(cpu_A0, cpu_A0);
57fec1fe 538}
2c0262af 539
57fec1fe
FB
540static inline void gen_op_movl_A0_seg(int reg)
541{
317ac620 542 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUX86State, segs[reg].base) + REG_L_OFFSET);
57fec1fe 543}
2c0262af 544
7162ab21 545static inline void gen_op_addl_A0_seg(DisasContext *s, int reg)
57fec1fe 546{
317ac620 547 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State, segs[reg].base));
57fec1fe 548#ifdef TARGET_X86_64
7162ab21
VC
549 if (CODE64(s)) {
550 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
551 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
552 } else {
553 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
554 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
555 }
556#else
557 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
57fec1fe
FB
558#endif
559}
2c0262af 560
14ce26e7 561#ifdef TARGET_X86_64
57fec1fe
FB
562static inline void gen_op_movq_A0_seg(int reg)
563{
317ac620 564 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUX86State, segs[reg].base));
57fec1fe 565}
14ce26e7 566
57fec1fe
FB
567static inline void gen_op_addq_A0_seg(int reg)
568{
317ac620 569 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State, segs[reg].base));
57fec1fe
FB
570 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
571}
572
573static inline void gen_op_movq_A0_reg(int reg)
574{
cc739bb0 575 tcg_gen_mov_tl(cpu_A0, cpu_regs[reg]);
57fec1fe
FB
576}
577
578static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
579{
cc739bb0
LD
580 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[reg]);
581 if (shift != 0)
57fec1fe
FB
582 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
583 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
584}
14ce26e7
FB
585#endif
586
57fec1fe
FB
587static inline void gen_op_lds_T0_A0(int idx)
588{
589 int mem_index = (idx >> 2) - 1;
590 switch(idx & 3) {
93ab25d7 591 case OT_BYTE:
57fec1fe
FB
592 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
593 break;
93ab25d7 594 case OT_WORD:
57fec1fe
FB
595 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
596 break;
597 default:
93ab25d7 598 case OT_LONG:
57fec1fe
FB
599 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
600 break;
601 }
602}
2c0262af 603
1e4840bf 604static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
57fec1fe
FB
605{
606 int mem_index = (idx >> 2) - 1;
607 switch(idx & 3) {
93ab25d7 608 case OT_BYTE:
1e4840bf 609 tcg_gen_qemu_ld8u(t0, a0, mem_index);
57fec1fe 610 break;
93ab25d7 611 case OT_WORD:
1e4840bf 612 tcg_gen_qemu_ld16u(t0, a0, mem_index);
57fec1fe 613 break;
93ab25d7 614 case OT_LONG:
1e4840bf 615 tcg_gen_qemu_ld32u(t0, a0, mem_index);
57fec1fe
FB
616 break;
617 default:
93ab25d7 618 case OT_QUAD:
a7812ae4
PB
619 /* Should never happen on 32-bit targets. */
620#ifdef TARGET_X86_64
1e4840bf 621 tcg_gen_qemu_ld64(t0, a0, mem_index);
a7812ae4 622#endif
57fec1fe
FB
623 break;
624 }
625}
2c0262af 626
1e4840bf
FB
627/* XXX: always use ldu or lds */
628static inline void gen_op_ld_T0_A0(int idx)
629{
630 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
631}
632
57fec1fe
FB
633static inline void gen_op_ldu_T0_A0(int idx)
634{
1e4840bf 635 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
57fec1fe 636}
2c0262af 637
57fec1fe 638static inline void gen_op_ld_T1_A0(int idx)
1e4840bf
FB
639{
640 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
641}
642
643static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
57fec1fe
FB
644{
645 int mem_index = (idx >> 2) - 1;
646 switch(idx & 3) {
93ab25d7 647 case OT_BYTE:
1e4840bf 648 tcg_gen_qemu_st8(t0, a0, mem_index);
57fec1fe 649 break;
93ab25d7 650 case OT_WORD:
1e4840bf 651 tcg_gen_qemu_st16(t0, a0, mem_index);
57fec1fe 652 break;
93ab25d7 653 case OT_LONG:
1e4840bf 654 tcg_gen_qemu_st32(t0, a0, mem_index);
57fec1fe
FB
655 break;
656 default:
93ab25d7 657 case OT_QUAD:
a7812ae4
PB
658 /* Should never happen on 32-bit targets. */
659#ifdef TARGET_X86_64
1e4840bf 660 tcg_gen_qemu_st64(t0, a0, mem_index);
a7812ae4 661#endif
57fec1fe
FB
662 break;
663 }
664}
4f31916f 665
57fec1fe
FB
666static inline void gen_op_st_T0_A0(int idx)
667{
1e4840bf 668 gen_op_st_v(idx, cpu_T[0], cpu_A0);
57fec1fe 669}
4f31916f 670
57fec1fe
FB
671static inline void gen_op_st_T1_A0(int idx)
672{
1e4840bf 673 gen_op_st_v(idx, cpu_T[1], cpu_A0);
57fec1fe 674}
4f31916f 675
14ce26e7
FB
676static inline void gen_jmp_im(target_ulong pc)
677{
57fec1fe 678 tcg_gen_movi_tl(cpu_tmp0, pc);
317ac620 679 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State, eip));
14ce26e7
FB
680}
681
2c0262af
FB
682static inline void gen_string_movl_A0_ESI(DisasContext *s)
683{
684 int override;
685
686 override = s->override;
14ce26e7
FB
687#ifdef TARGET_X86_64
688 if (s->aflag == 2) {
689 if (override >= 0) {
57fec1fe
FB
690 gen_op_movq_A0_seg(override);
691 gen_op_addq_A0_reg_sN(0, R_ESI);
14ce26e7 692 } else {
57fec1fe 693 gen_op_movq_A0_reg(R_ESI);
14ce26e7
FB
694 }
695 } else
696#endif
2c0262af
FB
697 if (s->aflag) {
698 /* 32 bit address */
699 if (s->addseg && override < 0)
700 override = R_DS;
701 if (override >= 0) {
57fec1fe
FB
702 gen_op_movl_A0_seg(override);
703 gen_op_addl_A0_reg_sN(0, R_ESI);
2c0262af 704 } else {
57fec1fe 705 gen_op_movl_A0_reg(R_ESI);
2c0262af
FB
706 }
707 } else {
708 /* 16 address, always override */
709 if (override < 0)
710 override = R_DS;
57fec1fe 711 gen_op_movl_A0_reg(R_ESI);
2c0262af 712 gen_op_andl_A0_ffff();
7162ab21 713 gen_op_addl_A0_seg(s, override);
2c0262af
FB
714 }
715}
716
717static inline void gen_string_movl_A0_EDI(DisasContext *s)
718{
14ce26e7
FB
719#ifdef TARGET_X86_64
720 if (s->aflag == 2) {
57fec1fe 721 gen_op_movq_A0_reg(R_EDI);
14ce26e7
FB
722 } else
723#endif
2c0262af
FB
724 if (s->aflag) {
725 if (s->addseg) {
57fec1fe
FB
726 gen_op_movl_A0_seg(R_ES);
727 gen_op_addl_A0_reg_sN(0, R_EDI);
2c0262af 728 } else {
57fec1fe 729 gen_op_movl_A0_reg(R_EDI);
2c0262af
FB
730 }
731 } else {
57fec1fe 732 gen_op_movl_A0_reg(R_EDI);
2c0262af 733 gen_op_andl_A0_ffff();
7162ab21 734 gen_op_addl_A0_seg(s, R_ES);
2c0262af
FB
735 }
736}
737
6e0d8677
FB
738static inline void gen_op_movl_T0_Dshift(int ot)
739{
317ac620 740 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, df));
6e0d8677 741 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
2c0262af
FB
742};
743
d824df34 744static TCGv gen_ext_tl(TCGv dst, TCGv src, int size, bool sign)
6e0d8677 745{
d824df34 746 switch (size) {
6e0d8677 747 case OT_BYTE:
d824df34
PB
748 if (sign) {
749 tcg_gen_ext8s_tl(dst, src);
750 } else {
751 tcg_gen_ext8u_tl(dst, src);
752 }
753 return dst;
6e0d8677 754 case OT_WORD:
d824df34
PB
755 if (sign) {
756 tcg_gen_ext16s_tl(dst, src);
757 } else {
758 tcg_gen_ext16u_tl(dst, src);
759 }
760 return dst;
761#ifdef TARGET_X86_64
6e0d8677 762 case OT_LONG:
d824df34
PB
763 if (sign) {
764 tcg_gen_ext32s_tl(dst, src);
765 } else {
766 tcg_gen_ext32u_tl(dst, src);
767 }
768 return dst;
769#endif
6e0d8677 770 default:
d824df34 771 return src;
6e0d8677
FB
772 }
773}
3b46e624 774
d824df34
PB
775static void gen_extu(int ot, TCGv reg)
776{
777 gen_ext_tl(reg, reg, ot, false);
778}
779
6e0d8677
FB
780static void gen_exts(int ot, TCGv reg)
781{
d824df34 782 gen_ext_tl(reg, reg, ot, true);
6e0d8677 783}
2c0262af 784
6e0d8677
FB
785static inline void gen_op_jnz_ecx(int size, int label1)
786{
cc739bb0 787 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[R_ECX]);
6e0d8677 788 gen_extu(size + 1, cpu_tmp0);
cb63669a 789 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
6e0d8677
FB
790}
791
792static inline void gen_op_jz_ecx(int size, int label1)
793{
cc739bb0 794 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[R_ECX]);
6e0d8677 795 gen_extu(size + 1, cpu_tmp0);
cb63669a 796 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
6e0d8677 797}
2c0262af 798
a7812ae4
PB
799static void gen_helper_in_func(int ot, TCGv v, TCGv_i32 n)
800{
801 switch (ot) {
93ab25d7
PB
802 case OT_BYTE:
803 gen_helper_inb(v, n);
804 break;
805 case OT_WORD:
806 gen_helper_inw(v, n);
807 break;
808 case OT_LONG:
809 gen_helper_inl(v, n);
810 break;
a7812ae4 811 }
a7812ae4 812}
2c0262af 813
a7812ae4
PB
814static void gen_helper_out_func(int ot, TCGv_i32 v, TCGv_i32 n)
815{
816 switch (ot) {
93ab25d7
PB
817 case OT_BYTE:
818 gen_helper_outb(v, n);
819 break;
820 case OT_WORD:
821 gen_helper_outw(v, n);
822 break;
823 case OT_LONG:
824 gen_helper_outl(v, n);
825 break;
a7812ae4 826 }
a7812ae4 827}
f115e911 828
b8b6a50b
FB
829static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
830 uint32_t svm_flags)
f115e911 831{
b8b6a50b
FB
832 int state_saved;
833 target_ulong next_eip;
834
835 state_saved = 0;
f115e911 836 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
773cdfcc 837 gen_update_cc_op(s);
14ce26e7 838 gen_jmp_im(cur_eip);
b8b6a50b 839 state_saved = 1;
b6abf97d 840 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
a7812ae4 841 switch (ot) {
93ab25d7 842 case OT_BYTE:
4a7443be
BS
843 gen_helper_check_iob(cpu_env, cpu_tmp2_i32);
844 break;
93ab25d7 845 case OT_WORD:
4a7443be
BS
846 gen_helper_check_iow(cpu_env, cpu_tmp2_i32);
847 break;
93ab25d7 848 case OT_LONG:
4a7443be
BS
849 gen_helper_check_iol(cpu_env, cpu_tmp2_i32);
850 break;
a7812ae4 851 }
b8b6a50b 852 }
872929aa 853 if(s->flags & HF_SVMI_MASK) {
b8b6a50b 854 if (!state_saved) {
773cdfcc 855 gen_update_cc_op(s);
b8b6a50b 856 gen_jmp_im(cur_eip);
b8b6a50b
FB
857 }
858 svm_flags |= (1 << (4 + ot));
859 next_eip = s->pc - s->cs_base;
b6abf97d 860 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
052e80d5
BS
861 gen_helper_svm_check_io(cpu_env, cpu_tmp2_i32,
862 tcg_const_i32(svm_flags),
a7812ae4 863 tcg_const_i32(next_eip - cur_eip));
f115e911
FB
864 }
865}
866
2c0262af
FB
867static inline void gen_movs(DisasContext *s, int ot)
868{
869 gen_string_movl_A0_ESI(s);
57fec1fe 870 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 871 gen_string_movl_A0_EDI(s);
57fec1fe 872 gen_op_st_T0_A0(ot + s->mem_index);
6e0d8677
FB
873 gen_op_movl_T0_Dshift(ot);
874 gen_op_add_reg_T0(s->aflag, R_ESI);
875 gen_op_add_reg_T0(s->aflag, R_EDI);
2c0262af
FB
876}
877
b6abf97d
FB
878static void gen_op_update1_cc(void)
879{
b6abf97d
FB
880 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
881}
882
883static void gen_op_update2_cc(void)
884{
885 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
886 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
887}
888
988c3eb0
RH
889static void gen_op_update3_cc(TCGv reg)
890{
891 tcg_gen_mov_tl(cpu_cc_src2, reg);
892 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
893 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
894}
895
b6abf97d
FB
896static inline void gen_op_testl_T0_T1_cc(void)
897{
b6abf97d
FB
898 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
899}
900
901static void gen_op_update_neg_cc(void)
902{
b6abf97d 903 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
a3251186
RH
904 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
905 tcg_gen_movi_tl(cpu_cc_srcT, 0);
b6abf97d
FB
906}
907
d229edce
RH
908/* compute all eflags to cc_src */
909static void gen_compute_eflags(DisasContext *s)
8e1c85e3 910{
988c3eb0 911 TCGv zero, dst, src1, src2;
db9f2597
RH
912 int live, dead;
913
d229edce
RH
914 if (s->cc_op == CC_OP_EFLAGS) {
915 return;
916 }
436ff2d2
RH
917 if (s->cc_op == CC_OP_CLR) {
918 tcg_gen_movi_tl(cpu_cc_src, CC_Z);
919 set_cc_op(s, CC_OP_EFLAGS);
920 return;
921 }
db9f2597
RH
922
923 TCGV_UNUSED(zero);
924 dst = cpu_cc_dst;
925 src1 = cpu_cc_src;
988c3eb0 926 src2 = cpu_cc_src2;
db9f2597
RH
927
928 /* Take care to not read values that are not live. */
929 live = cc_op_live[s->cc_op] & ~USES_CC_SRCT;
988c3eb0 930 dead = live ^ (USES_CC_DST | USES_CC_SRC | USES_CC_SRC2);
db9f2597
RH
931 if (dead) {
932 zero = tcg_const_tl(0);
933 if (dead & USES_CC_DST) {
934 dst = zero;
935 }
936 if (dead & USES_CC_SRC) {
937 src1 = zero;
938 }
988c3eb0
RH
939 if (dead & USES_CC_SRC2) {
940 src2 = zero;
941 }
db9f2597
RH
942 }
943
773cdfcc 944 gen_update_cc_op(s);
988c3eb0 945 gen_helper_cc_compute_all(cpu_cc_src, dst, src1, src2, cpu_cc_op);
d229edce 946 set_cc_op(s, CC_OP_EFLAGS);
db9f2597
RH
947
948 if (dead) {
949 tcg_temp_free(zero);
950 }
8e1c85e3
FB
951}
952
bec93d72
RH
953typedef struct CCPrepare {
954 TCGCond cond;
955 TCGv reg;
956 TCGv reg2;
957 target_ulong imm;
958 target_ulong mask;
959 bool use_reg2;
960 bool no_setcond;
961} CCPrepare;
962
06847f1f 963/* compute eflags.C to reg */
bec93d72 964static CCPrepare gen_prepare_eflags_c(DisasContext *s, TCGv reg)
06847f1f
RH
965{
966 TCGv t0, t1;
bec93d72 967 int size, shift;
06847f1f
RH
968
969 switch (s->cc_op) {
970 case CC_OP_SUBB ... CC_OP_SUBQ:
a3251186 971 /* (DATA_TYPE)CC_SRCT < (DATA_TYPE)CC_SRC */
06847f1f
RH
972 size = s->cc_op - CC_OP_SUBB;
973 t1 = gen_ext_tl(cpu_tmp0, cpu_cc_src, size, false);
974 /* If no temporary was used, be careful not to alias t1 and t0. */
975 t0 = TCGV_EQUAL(t1, cpu_cc_src) ? cpu_tmp0 : reg;
a3251186 976 tcg_gen_mov_tl(t0, cpu_cc_srcT);
06847f1f
RH
977 gen_extu(size, t0);
978 goto add_sub;
979
980 case CC_OP_ADDB ... CC_OP_ADDQ:
981 /* (DATA_TYPE)CC_DST < (DATA_TYPE)CC_SRC */
982 size = s->cc_op - CC_OP_ADDB;
983 t1 = gen_ext_tl(cpu_tmp0, cpu_cc_src, size, false);
984 t0 = gen_ext_tl(reg, cpu_cc_dst, size, false);
985 add_sub:
bec93d72
RH
986 return (CCPrepare) { .cond = TCG_COND_LTU, .reg = t0,
987 .reg2 = t1, .mask = -1, .use_reg2 = true };
06847f1f 988
06847f1f 989 case CC_OP_LOGICB ... CC_OP_LOGICQ:
436ff2d2 990 case CC_OP_CLR:
bec93d72 991 return (CCPrepare) { .cond = TCG_COND_NEVER, .mask = -1 };
06847f1f
RH
992
993 case CC_OP_INCB ... CC_OP_INCQ:
994 case CC_OP_DECB ... CC_OP_DECQ:
bec93d72
RH
995 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
996 .mask = -1, .no_setcond = true };
06847f1f
RH
997
998 case CC_OP_SHLB ... CC_OP_SHLQ:
999 /* (CC_SRC >> (DATA_BITS - 1)) & 1 */
1000 size = s->cc_op - CC_OP_SHLB;
bec93d72
RH
1001 shift = (8 << size) - 1;
1002 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1003 .mask = (target_ulong)1 << shift };
06847f1f
RH
1004
1005 case CC_OP_MULB ... CC_OP_MULQ:
bec93d72
RH
1006 return (CCPrepare) { .cond = TCG_COND_NE,
1007 .reg = cpu_cc_src, .mask = -1 };
06847f1f 1008
bc4b43dc
RH
1009 case CC_OP_BMILGB ... CC_OP_BMILGQ:
1010 size = s->cc_op - CC_OP_BMILGB;
1011 t0 = gen_ext_tl(reg, cpu_cc_src, size, false);
1012 return (CCPrepare) { .cond = TCG_COND_EQ, .reg = t0, .mask = -1 };
1013
cd7f97ca
RH
1014 case CC_OP_ADCX:
1015 case CC_OP_ADCOX:
1016 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_dst,
1017 .mask = -1, .no_setcond = true };
1018
06847f1f
RH
1019 case CC_OP_EFLAGS:
1020 case CC_OP_SARB ... CC_OP_SARQ:
1021 /* CC_SRC & 1 */
bec93d72
RH
1022 return (CCPrepare) { .cond = TCG_COND_NE,
1023 .reg = cpu_cc_src, .mask = CC_C };
06847f1f
RH
1024
1025 default:
1026 /* The need to compute only C from CC_OP_DYNAMIC is important
1027 in efficiently implementing e.g. INC at the start of a TB. */
1028 gen_update_cc_op(s);
988c3eb0
RH
1029 gen_helper_cc_compute_c(reg, cpu_cc_dst, cpu_cc_src,
1030 cpu_cc_src2, cpu_cc_op);
bec93d72
RH
1031 return (CCPrepare) { .cond = TCG_COND_NE, .reg = reg,
1032 .mask = -1, .no_setcond = true };
06847f1f
RH
1033 }
1034}
1035
1608ecca 1036/* compute eflags.P to reg */
bec93d72 1037static CCPrepare gen_prepare_eflags_p(DisasContext *s, TCGv reg)
1608ecca 1038{
d229edce 1039 gen_compute_eflags(s);
bec93d72
RH
1040 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1041 .mask = CC_P };
1608ecca
PB
1042}
1043
1044/* compute eflags.S to reg */
bec93d72 1045static CCPrepare gen_prepare_eflags_s(DisasContext *s, TCGv reg)
1608ecca 1046{
086c4077
RH
1047 switch (s->cc_op) {
1048 case CC_OP_DYNAMIC:
1049 gen_compute_eflags(s);
1050 /* FALLTHRU */
1051 case CC_OP_EFLAGS:
cd7f97ca
RH
1052 case CC_OP_ADCX:
1053 case CC_OP_ADOX:
1054 case CC_OP_ADCOX:
bec93d72
RH
1055 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1056 .mask = CC_S };
436ff2d2
RH
1057 case CC_OP_CLR:
1058 return (CCPrepare) { .cond = TCG_COND_NEVER, .mask = -1 };
086c4077
RH
1059 default:
1060 {
1061 int size = (s->cc_op - CC_OP_ADDB) & 3;
1062 TCGv t0 = gen_ext_tl(reg, cpu_cc_dst, size, true);
bec93d72 1063 return (CCPrepare) { .cond = TCG_COND_LT, .reg = t0, .mask = -1 };
086c4077 1064 }
086c4077 1065 }
1608ecca
PB
1066}
1067
1068/* compute eflags.O to reg */
bec93d72 1069static CCPrepare gen_prepare_eflags_o(DisasContext *s, TCGv reg)
1608ecca 1070{
cd7f97ca
RH
1071 switch (s->cc_op) {
1072 case CC_OP_ADOX:
1073 case CC_OP_ADCOX:
1074 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src2,
1075 .mask = -1, .no_setcond = true };
436ff2d2
RH
1076 case CC_OP_CLR:
1077 return (CCPrepare) { .cond = TCG_COND_NEVER, .mask = -1 };
cd7f97ca
RH
1078 default:
1079 gen_compute_eflags(s);
1080 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1081 .mask = CC_O };
1082 }
1608ecca
PB
1083}
1084
1085/* compute eflags.Z to reg */
bec93d72 1086static CCPrepare gen_prepare_eflags_z(DisasContext *s, TCGv reg)
1608ecca 1087{
086c4077
RH
1088 switch (s->cc_op) {
1089 case CC_OP_DYNAMIC:
1090 gen_compute_eflags(s);
1091 /* FALLTHRU */
1092 case CC_OP_EFLAGS:
cd7f97ca
RH
1093 case CC_OP_ADCX:
1094 case CC_OP_ADOX:
1095 case CC_OP_ADCOX:
bec93d72
RH
1096 return (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1097 .mask = CC_Z };
436ff2d2
RH
1098 case CC_OP_CLR:
1099 return (CCPrepare) { .cond = TCG_COND_ALWAYS, .mask = -1 };
086c4077
RH
1100 default:
1101 {
1102 int size = (s->cc_op - CC_OP_ADDB) & 3;
1103 TCGv t0 = gen_ext_tl(reg, cpu_cc_dst, size, false);
bec93d72 1104 return (CCPrepare) { .cond = TCG_COND_EQ, .reg = t0, .mask = -1 };
086c4077 1105 }
bec93d72
RH
1106 }
1107}
1108
c365395e
PB
1109/* perform a conditional store into register 'reg' according to jump opcode
1110 value 'b'. In the fast case, T0 is guaranted not to be used. */
276e6b5f 1111static CCPrepare gen_prepare_cc(DisasContext *s, int b, TCGv reg)
8e1c85e3 1112{
c365395e 1113 int inv, jcc_op, size, cond;
276e6b5f 1114 CCPrepare cc;
c365395e
PB
1115 TCGv t0;
1116
1117 inv = b & 1;
8e1c85e3 1118 jcc_op = (b >> 1) & 7;
c365395e
PB
1119
1120 switch (s->cc_op) {
69d1aa31
RH
1121 case CC_OP_SUBB ... CC_OP_SUBQ:
1122 /* We optimize relational operators for the cmp/jcc case. */
c365395e
PB
1123 size = s->cc_op - CC_OP_SUBB;
1124 switch (jcc_op) {
1125 case JCC_BE:
a3251186 1126 tcg_gen_mov_tl(cpu_tmp4, cpu_cc_srcT);
c365395e
PB
1127 gen_extu(size, cpu_tmp4);
1128 t0 = gen_ext_tl(cpu_tmp0, cpu_cc_src, size, false);
276e6b5f
RH
1129 cc = (CCPrepare) { .cond = TCG_COND_LEU, .reg = cpu_tmp4,
1130 .reg2 = t0, .mask = -1, .use_reg2 = true };
c365395e 1131 break;
8e1c85e3 1132
c365395e 1133 case JCC_L:
276e6b5f 1134 cond = TCG_COND_LT;
c365395e
PB
1135 goto fast_jcc_l;
1136 case JCC_LE:
276e6b5f 1137 cond = TCG_COND_LE;
c365395e 1138 fast_jcc_l:
a3251186 1139 tcg_gen_mov_tl(cpu_tmp4, cpu_cc_srcT);
c365395e
PB
1140 gen_exts(size, cpu_tmp4);
1141 t0 = gen_ext_tl(cpu_tmp0, cpu_cc_src, size, true);
276e6b5f
RH
1142 cc = (CCPrepare) { .cond = cond, .reg = cpu_tmp4,
1143 .reg2 = t0, .mask = -1, .use_reg2 = true };
c365395e 1144 break;
8e1c85e3 1145
c365395e 1146 default:
8e1c85e3 1147 goto slow_jcc;
c365395e 1148 }
8e1c85e3 1149 break;
c365395e 1150
8e1c85e3
FB
1151 default:
1152 slow_jcc:
69d1aa31
RH
1153 /* This actually generates good code for JC, JZ and JS. */
1154 switch (jcc_op) {
1155 case JCC_O:
1156 cc = gen_prepare_eflags_o(s, reg);
1157 break;
1158 case JCC_B:
1159 cc = gen_prepare_eflags_c(s, reg);
1160 break;
1161 case JCC_Z:
1162 cc = gen_prepare_eflags_z(s, reg);
1163 break;
1164 case JCC_BE:
1165 gen_compute_eflags(s);
1166 cc = (CCPrepare) { .cond = TCG_COND_NE, .reg = cpu_cc_src,
1167 .mask = CC_Z | CC_C };
1168 break;
1169 case JCC_S:
1170 cc = gen_prepare_eflags_s(s, reg);
1171 break;
1172 case JCC_P:
1173 cc = gen_prepare_eflags_p(s, reg);
1174 break;
1175 case JCC_L:
1176 gen_compute_eflags(s);
1177 if (TCGV_EQUAL(reg, cpu_cc_src)) {
1178 reg = cpu_tmp0;
1179 }
1180 tcg_gen_shri_tl(reg, cpu_cc_src, 4); /* CC_O -> CC_S */
1181 tcg_gen_xor_tl(reg, reg, cpu_cc_src);
1182 cc = (CCPrepare) { .cond = TCG_COND_NE, .reg = reg,
1183 .mask = CC_S };
1184 break;
1185 default:
1186 case JCC_LE:
1187 gen_compute_eflags(s);
1188 if (TCGV_EQUAL(reg, cpu_cc_src)) {
1189 reg = cpu_tmp0;
1190 }
1191 tcg_gen_shri_tl(reg, cpu_cc_src, 4); /* CC_O -> CC_S */
1192 tcg_gen_xor_tl(reg, reg, cpu_cc_src);
1193 cc = (CCPrepare) { .cond = TCG_COND_NE, .reg = reg,
1194 .mask = CC_S | CC_Z };
1195 break;
1196 }
c365395e 1197 break;
8e1c85e3 1198 }
276e6b5f
RH
1199
1200 if (inv) {
1201 cc.cond = tcg_invert_cond(cc.cond);
1202 }
1203 return cc;
8e1c85e3
FB
1204}
1205
cc8b6f5b
PB
1206static void gen_setcc1(DisasContext *s, int b, TCGv reg)
1207{
1208 CCPrepare cc = gen_prepare_cc(s, b, reg);
1209
1210 if (cc.no_setcond) {
1211 if (cc.cond == TCG_COND_EQ) {
1212 tcg_gen_xori_tl(reg, cc.reg, 1);
1213 } else {
1214 tcg_gen_mov_tl(reg, cc.reg);
1215 }
1216 return;
1217 }
1218
1219 if (cc.cond == TCG_COND_NE && !cc.use_reg2 && cc.imm == 0 &&
1220 cc.mask != 0 && (cc.mask & (cc.mask - 1)) == 0) {
1221 tcg_gen_shri_tl(reg, cc.reg, ctztl(cc.mask));
1222 tcg_gen_andi_tl(reg, reg, 1);
1223 return;
1224 }
1225 if (cc.mask != -1) {
1226 tcg_gen_andi_tl(reg, cc.reg, cc.mask);
1227 cc.reg = reg;
1228 }
1229 if (cc.use_reg2) {
1230 tcg_gen_setcond_tl(cc.cond, reg, cc.reg, cc.reg2);
1231 } else {
1232 tcg_gen_setcondi_tl(cc.cond, reg, cc.reg, cc.imm);
1233 }
1234}
1235
1236static inline void gen_compute_eflags_c(DisasContext *s, TCGv reg)
1237{
1238 gen_setcc1(s, JCC_B << 1, reg);
1239}
276e6b5f 1240
8e1c85e3
FB
1241/* generate a conditional jump to label 'l1' according to jump opcode
1242 value 'b'. In the fast case, T0 is guaranted not to be used. */
dc259201
RH
1243static inline void gen_jcc1_noeob(DisasContext *s, int b, int l1)
1244{
1245 CCPrepare cc = gen_prepare_cc(s, b, cpu_T[0]);
1246
1247 if (cc.mask != -1) {
1248 tcg_gen_andi_tl(cpu_T[0], cc.reg, cc.mask);
1249 cc.reg = cpu_T[0];
1250 }
1251 if (cc.use_reg2) {
1252 tcg_gen_brcond_tl(cc.cond, cc.reg, cc.reg2, l1);
1253 } else {
1254 tcg_gen_brcondi_tl(cc.cond, cc.reg, cc.imm, l1);
1255 }
1256}
1257
1258/* Generate a conditional jump to label 'l1' according to jump opcode
1259 value 'b'. In the fast case, T0 is guaranted not to be used.
1260 A translation block must end soon. */
b27fc131 1261static inline void gen_jcc1(DisasContext *s, int b, int l1)
8e1c85e3 1262{
943131ca 1263 CCPrepare cc = gen_prepare_cc(s, b, cpu_T[0]);
8e1c85e3 1264
dc259201 1265 gen_update_cc_op(s);
943131ca
PB
1266 if (cc.mask != -1) {
1267 tcg_gen_andi_tl(cpu_T[0], cc.reg, cc.mask);
1268 cc.reg = cpu_T[0];
1269 }
dc259201 1270 set_cc_op(s, CC_OP_DYNAMIC);
943131ca
PB
1271 if (cc.use_reg2) {
1272 tcg_gen_brcond_tl(cc.cond, cc.reg, cc.reg2, l1);
1273 } else {
1274 tcg_gen_brcondi_tl(cc.cond, cc.reg, cc.imm, l1);
8e1c85e3
FB
1275 }
1276}
1277
14ce26e7
FB
1278/* XXX: does not work with gdbstub "ice" single step - not a
1279 serious problem */
1280static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
2c0262af 1281{
14ce26e7
FB
1282 int l1, l2;
1283
1284 l1 = gen_new_label();
1285 l2 = gen_new_label();
6e0d8677 1286 gen_op_jnz_ecx(s->aflag, l1);
14ce26e7
FB
1287 gen_set_label(l2);
1288 gen_jmp_tb(s, next_eip, 1);
1289 gen_set_label(l1);
1290 return l2;
2c0262af
FB
1291}
1292
1293static inline void gen_stos(DisasContext *s, int ot)
1294{
57fec1fe 1295 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
2c0262af 1296 gen_string_movl_A0_EDI(s);
57fec1fe 1297 gen_op_st_T0_A0(ot + s->mem_index);
6e0d8677
FB
1298 gen_op_movl_T0_Dshift(ot);
1299 gen_op_add_reg_T0(s->aflag, R_EDI);
2c0262af
FB
1300}
1301
1302static inline void gen_lods(DisasContext *s, int ot)
1303{
1304 gen_string_movl_A0_ESI(s);
57fec1fe
FB
1305 gen_op_ld_T0_A0(ot + s->mem_index);
1306 gen_op_mov_reg_T0(ot, R_EAX);
6e0d8677
FB
1307 gen_op_movl_T0_Dshift(ot);
1308 gen_op_add_reg_T0(s->aflag, R_ESI);
2c0262af
FB
1309}
1310
1311static inline void gen_scas(DisasContext *s, int ot)
1312{
2c0262af 1313 gen_string_movl_A0_EDI(s);
57fec1fe 1314 gen_op_ld_T1_A0(ot + s->mem_index);
63633fe6 1315 gen_op(s, OP_CMPL, ot, R_EAX);
6e0d8677
FB
1316 gen_op_movl_T0_Dshift(ot);
1317 gen_op_add_reg_T0(s->aflag, R_EDI);
2c0262af
FB
1318}
1319
1320static inline void gen_cmps(DisasContext *s, int ot)
1321{
2c0262af 1322 gen_string_movl_A0_EDI(s);
57fec1fe 1323 gen_op_ld_T1_A0(ot + s->mem_index);
63633fe6
RH
1324 gen_string_movl_A0_ESI(s);
1325 gen_op(s, OP_CMPL, ot, OR_TMP0);
6e0d8677
FB
1326 gen_op_movl_T0_Dshift(ot);
1327 gen_op_add_reg_T0(s->aflag, R_ESI);
1328 gen_op_add_reg_T0(s->aflag, R_EDI);
2c0262af
FB
1329}
1330
1331static inline void gen_ins(DisasContext *s, int ot)
1332{
2e70f6ef
PB
1333 if (use_icount)
1334 gen_io_start();
2c0262af 1335 gen_string_movl_A0_EDI(s);
6e0d8677
FB
1336 /* Note: we must do this dummy write first to be restartable in
1337 case of page fault. */
9772c73b 1338 gen_op_movl_T0_0();
57fec1fe 1339 gen_op_st_T0_A0(ot + s->mem_index);
b8b6a50b 1340 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
b6abf97d
FB
1341 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1342 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
a7812ae4 1343 gen_helper_in_func(ot, cpu_T[0], cpu_tmp2_i32);
57fec1fe 1344 gen_op_st_T0_A0(ot + s->mem_index);
6e0d8677
FB
1345 gen_op_movl_T0_Dshift(ot);
1346 gen_op_add_reg_T0(s->aflag, R_EDI);
2e70f6ef
PB
1347 if (use_icount)
1348 gen_io_end();
2c0262af
FB
1349}
1350
1351static inline void gen_outs(DisasContext *s, int ot)
1352{
2e70f6ef
PB
1353 if (use_icount)
1354 gen_io_start();
2c0262af 1355 gen_string_movl_A0_ESI(s);
57fec1fe 1356 gen_op_ld_T0_A0(ot + s->mem_index);
b8b6a50b
FB
1357
1358 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
b6abf97d
FB
1359 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1360 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1361 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
a7812ae4 1362 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
b8b6a50b 1363
6e0d8677
FB
1364 gen_op_movl_T0_Dshift(ot);
1365 gen_op_add_reg_T0(s->aflag, R_ESI);
2e70f6ef
PB
1366 if (use_icount)
1367 gen_io_end();
2c0262af
FB
1368}
1369
1370/* same method as Valgrind : we generate jumps to current or next
1371 instruction */
1372#define GEN_REPZ(op) \
1373static inline void gen_repz_ ## op(DisasContext *s, int ot, \
14ce26e7 1374 target_ulong cur_eip, target_ulong next_eip) \
2c0262af 1375{ \
14ce26e7 1376 int l2;\
2c0262af 1377 gen_update_cc_op(s); \
14ce26e7 1378 l2 = gen_jz_ecx_string(s, next_eip); \
2c0262af 1379 gen_ ## op(s, ot); \
6e0d8677 1380 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
2c0262af
FB
1381 /* a loop would cause two single step exceptions if ECX = 1 \
1382 before rep string_insn */ \
1383 if (!s->jmp_opt) \
6e0d8677 1384 gen_op_jz_ecx(s->aflag, l2); \
2c0262af
FB
1385 gen_jmp(s, cur_eip); \
1386}
1387
1388#define GEN_REPZ2(op) \
1389static inline void gen_repz_ ## op(DisasContext *s, int ot, \
14ce26e7
FB
1390 target_ulong cur_eip, \
1391 target_ulong next_eip, \
2c0262af
FB
1392 int nz) \
1393{ \
14ce26e7 1394 int l2;\
2c0262af 1395 gen_update_cc_op(s); \
14ce26e7 1396 l2 = gen_jz_ecx_string(s, next_eip); \
2c0262af 1397 gen_ ## op(s, ot); \
6e0d8677 1398 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
773cdfcc 1399 gen_update_cc_op(s); \
b27fc131 1400 gen_jcc1(s, (JCC_Z << 1) | (nz ^ 1), l2); \
2c0262af 1401 if (!s->jmp_opt) \
6e0d8677 1402 gen_op_jz_ecx(s->aflag, l2); \
2c0262af
FB
1403 gen_jmp(s, cur_eip); \
1404}
1405
1406GEN_REPZ(movs)
1407GEN_REPZ(stos)
1408GEN_REPZ(lods)
1409GEN_REPZ(ins)
1410GEN_REPZ(outs)
1411GEN_REPZ2(scas)
1412GEN_REPZ2(cmps)
1413
a7812ae4
PB
1414static void gen_helper_fp_arith_ST0_FT0(int op)
1415{
1416 switch (op) {
d3eb5eae
BS
1417 case 0:
1418 gen_helper_fadd_ST0_FT0(cpu_env);
1419 break;
1420 case 1:
1421 gen_helper_fmul_ST0_FT0(cpu_env);
1422 break;
1423 case 2:
1424 gen_helper_fcom_ST0_FT0(cpu_env);
1425 break;
1426 case 3:
1427 gen_helper_fcom_ST0_FT0(cpu_env);
1428 break;
1429 case 4:
1430 gen_helper_fsub_ST0_FT0(cpu_env);
1431 break;
1432 case 5:
1433 gen_helper_fsubr_ST0_FT0(cpu_env);
1434 break;
1435 case 6:
1436 gen_helper_fdiv_ST0_FT0(cpu_env);
1437 break;
1438 case 7:
1439 gen_helper_fdivr_ST0_FT0(cpu_env);
1440 break;
a7812ae4
PB
1441 }
1442}
2c0262af
FB
1443
1444/* NOTE the exception in "r" op ordering */
a7812ae4
PB
1445static void gen_helper_fp_arith_STN_ST0(int op, int opreg)
1446{
1447 TCGv_i32 tmp = tcg_const_i32(opreg);
1448 switch (op) {
d3eb5eae
BS
1449 case 0:
1450 gen_helper_fadd_STN_ST0(cpu_env, tmp);
1451 break;
1452 case 1:
1453 gen_helper_fmul_STN_ST0(cpu_env, tmp);
1454 break;
1455 case 4:
1456 gen_helper_fsubr_STN_ST0(cpu_env, tmp);
1457 break;
1458 case 5:
1459 gen_helper_fsub_STN_ST0(cpu_env, tmp);
1460 break;
1461 case 6:
1462 gen_helper_fdivr_STN_ST0(cpu_env, tmp);
1463 break;
1464 case 7:
1465 gen_helper_fdiv_STN_ST0(cpu_env, tmp);
1466 break;
a7812ae4
PB
1467 }
1468}
2c0262af
FB
1469
1470/* if d == OR_TMP0, it means memory operand (address in A0) */
1471static void gen_op(DisasContext *s1, int op, int ot, int d)
1472{
2c0262af 1473 if (d != OR_TMP0) {
57fec1fe 1474 gen_op_mov_TN_reg(ot, 0, d);
2c0262af 1475 } else {
57fec1fe 1476 gen_op_ld_T0_A0(ot + s1->mem_index);
2c0262af
FB
1477 }
1478 switch(op) {
1479 case OP_ADCL:
cc8b6f5b 1480 gen_compute_eflags_c(s1, cpu_tmp4);
cad3a37d
FB
1481 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1482 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1483 if (d != OR_TMP0)
1484 gen_op_mov_reg_T0(ot, d);
1485 else
1486 gen_op_st_T0_A0(ot + s1->mem_index);
988c3eb0
RH
1487 gen_op_update3_cc(cpu_tmp4);
1488 set_cc_op(s1, CC_OP_ADCB + ot);
cad3a37d 1489 break;
2c0262af 1490 case OP_SBBL:
cc8b6f5b 1491 gen_compute_eflags_c(s1, cpu_tmp4);
cad3a37d
FB
1492 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1493 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1494 if (d != OR_TMP0)
57fec1fe 1495 gen_op_mov_reg_T0(ot, d);
cad3a37d
FB
1496 else
1497 gen_op_st_T0_A0(ot + s1->mem_index);
988c3eb0
RH
1498 gen_op_update3_cc(cpu_tmp4);
1499 set_cc_op(s1, CC_OP_SBBB + ot);
cad3a37d 1500 break;
2c0262af
FB
1501 case OP_ADDL:
1502 gen_op_addl_T0_T1();
cad3a37d
FB
1503 if (d != OR_TMP0)
1504 gen_op_mov_reg_T0(ot, d);
1505 else
1506 gen_op_st_T0_A0(ot + s1->mem_index);
1507 gen_op_update2_cc();
3ca51d07 1508 set_cc_op(s1, CC_OP_ADDB + ot);
2c0262af
FB
1509 break;
1510 case OP_SUBL:
a3251186 1511 tcg_gen_mov_tl(cpu_cc_srcT, cpu_T[0]);
57fec1fe 1512 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
cad3a37d
FB
1513 if (d != OR_TMP0)
1514 gen_op_mov_reg_T0(ot, d);
1515 else
1516 gen_op_st_T0_A0(ot + s1->mem_index);
1517 gen_op_update2_cc();
3ca51d07 1518 set_cc_op(s1, CC_OP_SUBB + ot);
2c0262af
FB
1519 break;
1520 default:
1521 case OP_ANDL:
57fec1fe 1522 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
cad3a37d
FB
1523 if (d != OR_TMP0)
1524 gen_op_mov_reg_T0(ot, d);
1525 else
1526 gen_op_st_T0_A0(ot + s1->mem_index);
1527 gen_op_update1_cc();
3ca51d07 1528 set_cc_op(s1, CC_OP_LOGICB + ot);
57fec1fe 1529 break;
2c0262af 1530 case OP_ORL:
57fec1fe 1531 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
cad3a37d
FB
1532 if (d != OR_TMP0)
1533 gen_op_mov_reg_T0(ot, d);
1534 else
1535 gen_op_st_T0_A0(ot + s1->mem_index);
1536 gen_op_update1_cc();
3ca51d07 1537 set_cc_op(s1, CC_OP_LOGICB + ot);
57fec1fe 1538 break;
2c0262af 1539 case OP_XORL:
57fec1fe 1540 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
cad3a37d
FB
1541 if (d != OR_TMP0)
1542 gen_op_mov_reg_T0(ot, d);
1543 else
1544 gen_op_st_T0_A0(ot + s1->mem_index);
1545 gen_op_update1_cc();
3ca51d07 1546 set_cc_op(s1, CC_OP_LOGICB + ot);
2c0262af
FB
1547 break;
1548 case OP_CMPL:
63633fe6 1549 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
a3251186 1550 tcg_gen_mov_tl(cpu_cc_srcT, cpu_T[0]);
63633fe6 1551 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
3ca51d07 1552 set_cc_op(s1, CC_OP_SUBB + ot);
2c0262af
FB
1553 break;
1554 }
b6abf97d
FB
1555}
1556
2c0262af
FB
1557/* if d == OR_TMP0, it means memory operand (address in A0) */
1558static void gen_inc(DisasContext *s1, int ot, int d, int c)
1559{
1560 if (d != OR_TMP0)
57fec1fe 1561 gen_op_mov_TN_reg(ot, 0, d);
2c0262af 1562 else
57fec1fe 1563 gen_op_ld_T0_A0(ot + s1->mem_index);
cc8b6f5b 1564 gen_compute_eflags_c(s1, cpu_cc_src);
2c0262af 1565 if (c > 0) {
b6abf97d 1566 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
3ca51d07 1567 set_cc_op(s1, CC_OP_INCB + ot);
2c0262af 1568 } else {
b6abf97d 1569 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
3ca51d07 1570 set_cc_op(s1, CC_OP_DECB + ot);
2c0262af
FB
1571 }
1572 if (d != OR_TMP0)
57fec1fe 1573 gen_op_mov_reg_T0(ot, d);
2c0262af 1574 else
57fec1fe 1575 gen_op_st_T0_A0(ot + s1->mem_index);
cd31fefa 1576 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
2c0262af
FB
1577}
1578
f437d0a3
RH
1579static void gen_shift_flags(DisasContext *s, int ot, TCGv result, TCGv shm1,
1580 TCGv count, bool is_right)
1581{
1582 TCGv_i32 z32, s32, oldop;
1583 TCGv z_tl;
1584
1585 /* Store the results into the CC variables. If we know that the
1586 variable must be dead, store unconditionally. Otherwise we'll
1587 need to not disrupt the current contents. */
1588 z_tl = tcg_const_tl(0);
1589 if (cc_op_live[s->cc_op] & USES_CC_DST) {
1590 tcg_gen_movcond_tl(TCG_COND_NE, cpu_cc_dst, count, z_tl,
1591 result, cpu_cc_dst);
1592 } else {
1593 tcg_gen_mov_tl(cpu_cc_dst, result);
1594 }
1595 if (cc_op_live[s->cc_op] & USES_CC_SRC) {
1596 tcg_gen_movcond_tl(TCG_COND_NE, cpu_cc_src, count, z_tl,
1597 shm1, cpu_cc_src);
1598 } else {
1599 tcg_gen_mov_tl(cpu_cc_src, shm1);
1600 }
1601 tcg_temp_free(z_tl);
1602
1603 /* Get the two potential CC_OP values into temporaries. */
1604 tcg_gen_movi_i32(cpu_tmp2_i32, (is_right ? CC_OP_SARB : CC_OP_SHLB) + ot);
1605 if (s->cc_op == CC_OP_DYNAMIC) {
1606 oldop = cpu_cc_op;
1607 } else {
1608 tcg_gen_movi_i32(cpu_tmp3_i32, s->cc_op);
1609 oldop = cpu_tmp3_i32;
1610 }
1611
1612 /* Conditionally store the CC_OP value. */
1613 z32 = tcg_const_i32(0);
1614 s32 = tcg_temp_new_i32();
1615 tcg_gen_trunc_tl_i32(s32, count);
1616 tcg_gen_movcond_i32(TCG_COND_NE, cpu_cc_op, s32, z32, cpu_tmp2_i32, oldop);
1617 tcg_temp_free_i32(z32);
1618 tcg_temp_free_i32(s32);
1619
1620 /* The CC_OP value is no longer predictable. */
1621 set_cc_op(s, CC_OP_DYNAMIC);
1622}
1623
b6abf97d
FB
1624static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1625 int is_right, int is_arith)
2c0262af 1626{
a41f62f5 1627 target_ulong mask = (ot == OT_QUAD ? 0x3f : 0x1f);
3b46e624 1628
b6abf97d 1629 /* load */
82786041 1630 if (op1 == OR_TMP0) {
b6abf97d 1631 gen_op_ld_T0_A0(ot + s->mem_index);
82786041 1632 } else {
b6abf97d 1633 gen_op_mov_TN_reg(ot, 0, op1);
82786041 1634 }
b6abf97d 1635
a41f62f5
RH
1636 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1637 tcg_gen_subi_tl(cpu_tmp0, cpu_T[1], 1);
b6abf97d
FB
1638
1639 if (is_right) {
1640 if (is_arith) {
f484d386 1641 gen_exts(ot, cpu_T[0]);
a41f62f5
RH
1642 tcg_gen_sar_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
1643 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
b6abf97d 1644 } else {
cad3a37d 1645 gen_extu(ot, cpu_T[0]);
a41f62f5
RH
1646 tcg_gen_shr_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
1647 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
b6abf97d
FB
1648 }
1649 } else {
a41f62f5
RH
1650 tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
1651 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
b6abf97d
FB
1652 }
1653
1654 /* store */
82786041 1655 if (op1 == OR_TMP0) {
b6abf97d 1656 gen_op_st_T0_A0(ot + s->mem_index);
82786041 1657 } else {
b6abf97d 1658 gen_op_mov_reg_T0(ot, op1);
82786041
RH
1659 }
1660
f437d0a3 1661 gen_shift_flags(s, ot, cpu_T[0], cpu_tmp0, cpu_T[1], is_right);
b6abf97d
FB
1662}
1663
c1c37968
FB
1664static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1665 int is_right, int is_arith)
1666{
a41f62f5 1667 int mask = (ot == OT_QUAD ? 0x3f : 0x1f);
c1c37968
FB
1668
1669 /* load */
1670 if (op1 == OR_TMP0)
1671 gen_op_ld_T0_A0(ot + s->mem_index);
1672 else
1673 gen_op_mov_TN_reg(ot, 0, op1);
1674
1675 op2 &= mask;
1676 if (op2 != 0) {
1677 if (is_right) {
1678 if (is_arith) {
1679 gen_exts(ot, cpu_T[0]);
2a449d14 1680 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
c1c37968
FB
1681 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1682 } else {
1683 gen_extu(ot, cpu_T[0]);
2a449d14 1684 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
c1c37968
FB
1685 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1686 }
1687 } else {
2a449d14 1688 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
c1c37968
FB
1689 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1690 }
1691 }
1692
1693 /* store */
1694 if (op1 == OR_TMP0)
1695 gen_op_st_T0_A0(ot + s->mem_index);
1696 else
1697 gen_op_mov_reg_T0(ot, op1);
1698
1699 /* update eflags if non zero shift */
1700 if (op2 != 0) {
2a449d14 1701 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
c1c37968 1702 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3ca51d07 1703 set_cc_op(s, (is_right ? CC_OP_SARB : CC_OP_SHLB) + ot);
c1c37968
FB
1704 }
1705}
1706
b6abf97d
FB
1707static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1708{
1709 if (arg2 >= 0)
1710 tcg_gen_shli_tl(ret, arg1, arg2);
1711 else
1712 tcg_gen_shri_tl(ret, arg1, -arg2);
1713}
1714
34d80a55 1715static void gen_rot_rm_T1(DisasContext *s, int ot, int op1, int is_right)
b6abf97d 1716{
34d80a55
RH
1717 target_ulong mask = (ot == OT_QUAD ? 0x3f : 0x1f);
1718 TCGv_i32 t0, t1;
b6abf97d
FB
1719
1720 /* load */
1e4840bf 1721 if (op1 == OR_TMP0) {
34d80a55 1722 gen_op_ld_T0_A0(ot + s->mem_index);
1e4840bf 1723 } else {
34d80a55 1724 gen_op_mov_TN_reg(ot, 0, op1);
1e4840bf 1725 }
b6abf97d 1726
34d80a55 1727 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
b6abf97d 1728
34d80a55
RH
1729 switch (ot) {
1730 case OT_BYTE:
1731 /* Replicate the 8-bit input so that a 32-bit rotate works. */
1732 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
1733 tcg_gen_muli_tl(cpu_T[0], cpu_T[0], 0x01010101);
1734 goto do_long;
1735 case OT_WORD:
1736 /* Replicate the 16-bit input so that a 32-bit rotate works. */
1737 tcg_gen_deposit_tl(cpu_T[0], cpu_T[0], cpu_T[0], 16, 16);
1738 goto do_long;
1739 do_long:
1740#ifdef TARGET_X86_64
1741 case OT_LONG:
1742 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1743 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
1744 if (is_right) {
1745 tcg_gen_rotr_i32(cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
1746 } else {
1747 tcg_gen_rotl_i32(cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
1748 }
1749 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
1750 break;
1751#endif
1752 default:
1753 if (is_right) {
1754 tcg_gen_rotr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1755 } else {
1756 tcg_gen_rotl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1757 }
1758 break;
b6abf97d 1759 }
b6abf97d 1760
b6abf97d 1761 /* store */
1e4840bf 1762 if (op1 == OR_TMP0) {
34d80a55 1763 gen_op_st_T0_A0(ot + s->mem_index);
1e4840bf 1764 } else {
34d80a55 1765 gen_op_mov_reg_T0(ot, op1);
1e4840bf 1766 }
b6abf97d 1767
34d80a55
RH
1768 /* We'll need the flags computed into CC_SRC. */
1769 gen_compute_eflags(s);
b6abf97d 1770
34d80a55
RH
1771 /* The value that was "rotated out" is now present at the other end
1772 of the word. Compute C into CC_DST and O into CC_SRC2. Note that
1773 since we've computed the flags into CC_SRC, these variables are
1774 currently dead. */
b6abf97d 1775 if (is_right) {
34d80a55
RH
1776 tcg_gen_shri_tl(cpu_cc_src2, cpu_T[0], mask - 1);
1777 tcg_gen_shri_tl(cpu_cc_dst, cpu_T[0], mask);
089305ac 1778 tcg_gen_andi_tl(cpu_cc_dst, cpu_cc_dst, 1);
34d80a55
RH
1779 } else {
1780 tcg_gen_shri_tl(cpu_cc_src2, cpu_T[0], mask);
1781 tcg_gen_andi_tl(cpu_cc_dst, cpu_T[0], 1);
b6abf97d 1782 }
34d80a55
RH
1783 tcg_gen_andi_tl(cpu_cc_src2, cpu_cc_src2, 1);
1784 tcg_gen_xor_tl(cpu_cc_src2, cpu_cc_src2, cpu_cc_dst);
1785
1786 /* Now conditionally store the new CC_OP value. If the shift count
1787 is 0 we keep the CC_OP_EFLAGS setting so that only CC_SRC is live.
1788 Otherwise reuse CC_OP_ADCOX which have the C and O flags split out
1789 exactly as we computed above. */
1790 t0 = tcg_const_i32(0);
1791 t1 = tcg_temp_new_i32();
1792 tcg_gen_trunc_tl_i32(t1, cpu_T[1]);
1793 tcg_gen_movi_i32(cpu_tmp2_i32, CC_OP_ADCOX);
1794 tcg_gen_movi_i32(cpu_tmp3_i32, CC_OP_EFLAGS);
1795 tcg_gen_movcond_i32(TCG_COND_NE, cpu_cc_op, t1, t0,
1796 cpu_tmp2_i32, cpu_tmp3_i32);
1797 tcg_temp_free_i32(t0);
1798 tcg_temp_free_i32(t1);
1799
1800 /* The CC_OP value is no longer predictable. */
1801 set_cc_op(s, CC_OP_DYNAMIC);
b6abf97d
FB
1802}
1803
8cd6345d 1804static void gen_rot_rm_im(DisasContext *s, int ot, int op1, int op2,
1805 int is_right)
1806{
34d80a55
RH
1807 int mask = (ot == OT_QUAD ? 0x3f : 0x1f);
1808 int shift;
8cd6345d 1809
1810 /* load */
1811 if (op1 == OR_TMP0) {
34d80a55 1812 gen_op_ld_T0_A0(ot + s->mem_index);
8cd6345d 1813 } else {
34d80a55 1814 gen_op_mov_TN_reg(ot, 0, op1);
8cd6345d 1815 }
1816
8cd6345d 1817 op2 &= mask;
8cd6345d 1818 if (op2 != 0) {
34d80a55
RH
1819 switch (ot) {
1820#ifdef TARGET_X86_64
1821 case OT_LONG:
1822 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1823 if (is_right) {
1824 tcg_gen_rotri_i32(cpu_tmp2_i32, cpu_tmp2_i32, op2);
1825 } else {
1826 tcg_gen_rotli_i32(cpu_tmp2_i32, cpu_tmp2_i32, op2);
1827 }
1828 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
1829 break;
1830#endif
1831 default:
1832 if (is_right) {
1833 tcg_gen_rotri_tl(cpu_T[0], cpu_T[0], op2);
1834 } else {
1835 tcg_gen_rotli_tl(cpu_T[0], cpu_T[0], op2);
1836 }
1837 break;
1838 case OT_BYTE:
1839 mask = 7;
1840 goto do_shifts;
1841 case OT_WORD:
1842 mask = 15;
1843 do_shifts:
1844 shift = op2 & mask;
1845 if (is_right) {
1846 shift = mask + 1 - shift;
1847 }
1848 gen_extu(ot, cpu_T[0]);
1849 tcg_gen_shli_tl(cpu_tmp0, cpu_T[0], shift);
1850 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], mask + 1 - shift);
1851 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1852 break;
8cd6345d 1853 }
8cd6345d 1854 }
1855
1856 /* store */
1857 if (op1 == OR_TMP0) {
34d80a55 1858 gen_op_st_T0_A0(ot + s->mem_index);
8cd6345d 1859 } else {
34d80a55 1860 gen_op_mov_reg_T0(ot, op1);
8cd6345d 1861 }
1862
1863 if (op2 != 0) {
34d80a55 1864 /* Compute the flags into CC_SRC. */
d229edce 1865 gen_compute_eflags(s);
0ff6addd 1866
34d80a55
RH
1867 /* The value that was "rotated out" is now present at the other end
1868 of the word. Compute C into CC_DST and O into CC_SRC2. Note that
1869 since we've computed the flags into CC_SRC, these variables are
1870 currently dead. */
8cd6345d 1871 if (is_right) {
34d80a55
RH
1872 tcg_gen_shri_tl(cpu_cc_src2, cpu_T[0], mask - 1);
1873 tcg_gen_shri_tl(cpu_cc_dst, cpu_T[0], mask);
38ebb396 1874 tcg_gen_andi_tl(cpu_cc_dst, cpu_cc_dst, 1);
34d80a55
RH
1875 } else {
1876 tcg_gen_shri_tl(cpu_cc_src2, cpu_T[0], mask);
1877 tcg_gen_andi_tl(cpu_cc_dst, cpu_T[0], 1);
8cd6345d 1878 }
34d80a55
RH
1879 tcg_gen_andi_tl(cpu_cc_src2, cpu_cc_src2, 1);
1880 tcg_gen_xor_tl(cpu_cc_src2, cpu_cc_src2, cpu_cc_dst);
1881 set_cc_op(s, CC_OP_ADCOX);
8cd6345d 1882 }
8cd6345d 1883}
1884
b6abf97d
FB
1885/* XXX: add faster immediate = 1 case */
1886static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1887 int is_right)
1888{
d229edce 1889 gen_compute_eflags(s);
c7b3c873 1890 assert(s->cc_op == CC_OP_EFLAGS);
b6abf97d
FB
1891
1892 /* load */
1893 if (op1 == OR_TMP0)
1894 gen_op_ld_T0_A0(ot + s->mem_index);
1895 else
1896 gen_op_mov_TN_reg(ot, 0, op1);
1897
a7812ae4
PB
1898 if (is_right) {
1899 switch (ot) {
93ab25d7 1900 case OT_BYTE:
7923057b
BS
1901 gen_helper_rcrb(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1902 break;
93ab25d7 1903 case OT_WORD:
7923057b
BS
1904 gen_helper_rcrw(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1905 break;
93ab25d7 1906 case OT_LONG:
7923057b
BS
1907 gen_helper_rcrl(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1908 break;
a7812ae4 1909#ifdef TARGET_X86_64
93ab25d7 1910 case OT_QUAD:
7923057b
BS
1911 gen_helper_rcrq(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1912 break;
a7812ae4
PB
1913#endif
1914 }
1915 } else {
1916 switch (ot) {
93ab25d7 1917 case OT_BYTE:
7923057b
BS
1918 gen_helper_rclb(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1919 break;
93ab25d7 1920 case OT_WORD:
7923057b
BS
1921 gen_helper_rclw(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1922 break;
93ab25d7 1923 case OT_LONG:
7923057b
BS
1924 gen_helper_rcll(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1925 break;
a7812ae4 1926#ifdef TARGET_X86_64
93ab25d7 1927 case OT_QUAD:
7923057b
BS
1928 gen_helper_rclq(cpu_T[0], cpu_env, cpu_T[0], cpu_T[1]);
1929 break;
a7812ae4
PB
1930#endif
1931 }
1932 }
b6abf97d
FB
1933 /* store */
1934 if (op1 == OR_TMP0)
1935 gen_op_st_T0_A0(ot + s->mem_index);
1936 else
1937 gen_op_mov_reg_T0(ot, op1);
b6abf97d
FB
1938}
1939
1940/* XXX: add faster immediate case */
3b9d3cf1 1941static void gen_shiftd_rm_T1(DisasContext *s, int ot, int op1,
f437d0a3 1942 bool is_right, TCGv count_in)
b6abf97d 1943{
f437d0a3
RH
1944 target_ulong mask = (ot == OT_QUAD ? 63 : 31);
1945 TCGv count;
b6abf97d
FB
1946
1947 /* load */
1e4840bf 1948 if (op1 == OR_TMP0) {
f437d0a3 1949 gen_op_ld_T0_A0(ot + s->mem_index);
1e4840bf 1950 } else {
f437d0a3 1951 gen_op_mov_TN_reg(ot, 0, op1);
1e4840bf 1952 }
b6abf97d 1953
f437d0a3
RH
1954 count = tcg_temp_new();
1955 tcg_gen_andi_tl(count, count_in, mask);
1e4840bf 1956
f437d0a3
RH
1957 switch (ot) {
1958 case OT_WORD:
1959 /* Note: we implement the Intel behaviour for shift count > 16.
1960 This means "shrdw C, B, A" shifts A:B:A >> C. Build the B:A
1961 portion by constructing it as a 32-bit value. */
b6abf97d 1962 if (is_right) {
f437d0a3
RH
1963 tcg_gen_deposit_tl(cpu_tmp0, cpu_T[0], cpu_T[1], 16, 16);
1964 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
1965 tcg_gen_mov_tl(cpu_T[0], cpu_tmp0);
b6abf97d 1966 } else {
f437d0a3 1967 tcg_gen_deposit_tl(cpu_T[1], cpu_T[0], cpu_T[1], 16, 16);
b6abf97d 1968 }
f437d0a3
RH
1969 /* FALLTHRU */
1970#ifdef TARGET_X86_64
1971 case OT_LONG:
1972 /* Concatenate the two 32-bit values and use a 64-bit shift. */
1973 tcg_gen_subi_tl(cpu_tmp0, count, 1);
b6abf97d 1974 if (is_right) {
f437d0a3
RH
1975 tcg_gen_concat_tl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1976 tcg_gen_shr_i64(cpu_tmp0, cpu_T[0], cpu_tmp0);
1977 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], count);
1978 } else {
1979 tcg_gen_concat_tl_i64(cpu_T[0], cpu_T[1], cpu_T[0]);
1980 tcg_gen_shl_i64(cpu_tmp0, cpu_T[0], cpu_tmp0);
1981 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], count);
1982 tcg_gen_shri_i64(cpu_tmp0, cpu_tmp0, 32);
1983 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], 32);
1984 }
1985 break;
1986#endif
1987 default:
1988 tcg_gen_subi_tl(cpu_tmp0, count, 1);
1989 if (is_right) {
1990 tcg_gen_shr_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
b6abf97d 1991
f437d0a3
RH
1992 tcg_gen_subfi_tl(cpu_tmp4, mask + 1, count);
1993 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], count);
1994 tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp4);
b6abf97d 1995 } else {
f437d0a3
RH
1996 tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp0);
1997 if (ot == OT_WORD) {
1998 /* Only needed if count > 16, for Intel behaviour. */
1999 tcg_gen_subfi_tl(cpu_tmp4, 33, count);
2000 tcg_gen_shr_tl(cpu_tmp4, cpu_T[1], cpu_tmp4);
2001 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, cpu_tmp4);
2002 }
2003
2004 tcg_gen_subfi_tl(cpu_tmp4, mask + 1, count);
2005 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], count);
2006 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp4);
b6abf97d 2007 }
f437d0a3
RH
2008 tcg_gen_movi_tl(cpu_tmp4, 0);
2009 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_T[1], count, cpu_tmp4,
2010 cpu_tmp4, cpu_T[1]);
2011 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2012 break;
b6abf97d 2013 }
b6abf97d 2014
b6abf97d 2015 /* store */
1e4840bf 2016 if (op1 == OR_TMP0) {
f437d0a3 2017 gen_op_st_T0_A0(ot + s->mem_index);
b6abf97d 2018 } else {
f437d0a3 2019 gen_op_mov_reg_T0(ot, op1);
b6abf97d 2020 }
1e4840bf 2021
f437d0a3
RH
2022 gen_shift_flags(s, ot, cpu_T[0], cpu_tmp0, count, is_right);
2023 tcg_temp_free(count);
b6abf97d
FB
2024}
2025
2026static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2027{
2028 if (s != OR_TMP1)
2029 gen_op_mov_TN_reg(ot, 1, s);
2030 switch(op) {
2031 case OP_ROL:
2032 gen_rot_rm_T1(s1, ot, d, 0);
2033 break;
2034 case OP_ROR:
2035 gen_rot_rm_T1(s1, ot, d, 1);
2036 break;
2037 case OP_SHL:
2038 case OP_SHL1:
2039 gen_shift_rm_T1(s1, ot, d, 0, 0);
2040 break;
2041 case OP_SHR:
2042 gen_shift_rm_T1(s1, ot, d, 1, 0);
2043 break;
2044 case OP_SAR:
2045 gen_shift_rm_T1(s1, ot, d, 1, 1);
2046 break;
2047 case OP_RCL:
2048 gen_rotc_rm_T1(s1, ot, d, 0);
2049 break;
2050 case OP_RCR:
2051 gen_rotc_rm_T1(s1, ot, d, 1);
2052 break;
2053 }
2c0262af
FB
2054}
2055
2056static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2057{
c1c37968 2058 switch(op) {
8cd6345d 2059 case OP_ROL:
2060 gen_rot_rm_im(s1, ot, d, c, 0);
2061 break;
2062 case OP_ROR:
2063 gen_rot_rm_im(s1, ot, d, c, 1);
2064 break;
c1c37968
FB
2065 case OP_SHL:
2066 case OP_SHL1:
2067 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2068 break;
2069 case OP_SHR:
2070 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2071 break;
2072 case OP_SAR:
2073 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2074 break;
2075 default:
2076 /* currently not optimized */
2077 gen_op_movl_T1_im(c);
2078 gen_shift(s1, op, ot, d, OR_TMP1);
2079 break;
2080 }
2c0262af
FB
2081}
2082
0af10c86
BS
2083static void gen_lea_modrm(CPUX86State *env, DisasContext *s, int modrm,
2084 int *reg_ptr, int *offset_ptr)
2c0262af 2085{
14ce26e7 2086 target_long disp;
2c0262af 2087 int havesib;
14ce26e7 2088 int base;
2c0262af
FB
2089 int index;
2090 int scale;
2091 int opreg;
2092 int mod, rm, code, override, must_add_seg;
7865eec4 2093 TCGv sum;
2c0262af
FB
2094
2095 override = s->override;
2096 must_add_seg = s->addseg;
2097 if (override >= 0)
2098 must_add_seg = 1;
2099 mod = (modrm >> 6) & 3;
2100 rm = modrm & 7;
2101
2102 if (s->aflag) {
2c0262af
FB
2103 havesib = 0;
2104 base = rm;
7865eec4 2105 index = -1;
2c0262af 2106 scale = 0;
3b46e624 2107
2c0262af
FB
2108 if (base == 4) {
2109 havesib = 1;
0af10c86 2110 code = cpu_ldub_code(env, s->pc++);
2c0262af 2111 scale = (code >> 6) & 3;
14ce26e7 2112 index = ((code >> 3) & 7) | REX_X(s);
7865eec4
RH
2113 if (index == 4) {
2114 index = -1; /* no index */
2115 }
14ce26e7 2116 base = (code & 7);
2c0262af 2117 }
14ce26e7 2118 base |= REX_B(s);
2c0262af
FB
2119
2120 switch (mod) {
2121 case 0:
14ce26e7 2122 if ((base & 7) == 5) {
2c0262af 2123 base = -1;
0af10c86 2124 disp = (int32_t)cpu_ldl_code(env, s->pc);
2c0262af 2125 s->pc += 4;
14ce26e7
FB
2126 if (CODE64(s) && !havesib) {
2127 disp += s->pc + s->rip_offset;
2128 }
2c0262af
FB
2129 } else {
2130 disp = 0;
2131 }
2132 break;
2133 case 1:
0af10c86 2134 disp = (int8_t)cpu_ldub_code(env, s->pc++);
2c0262af
FB
2135 break;
2136 default:
2137 case 2:
0af10c86 2138 disp = (int32_t)cpu_ldl_code(env, s->pc);
2c0262af
FB
2139 s->pc += 4;
2140 break;
2141 }
3b46e624 2142
7865eec4
RH
2143 /* For correct popl handling with esp. */
2144 if (base == R_ESP && s->popl_esp_hack) {
2145 disp += s->popl_esp_hack;
2146 }
2147
2148 /* Compute the address, with a minimum number of TCG ops. */
2149 TCGV_UNUSED(sum);
2150 if (index >= 0) {
2151 if (scale == 0) {
2152 sum = cpu_regs[index];
2153 } else {
2154 tcg_gen_shli_tl(cpu_A0, cpu_regs[index], scale);
2155 sum = cpu_A0;
14ce26e7 2156 }
7865eec4
RH
2157 if (base >= 0) {
2158 tcg_gen_add_tl(cpu_A0, sum, cpu_regs[base]);
2159 sum = cpu_A0;
14ce26e7 2160 }
7865eec4
RH
2161 } else if (base >= 0) {
2162 sum = cpu_regs[base];
2c0262af 2163 }
7865eec4
RH
2164 if (TCGV_IS_UNUSED(sum)) {
2165 tcg_gen_movi_tl(cpu_A0, disp);
2166 } else {
2167 tcg_gen_addi_tl(cpu_A0, sum, disp);
2c0262af 2168 }
7865eec4 2169
2c0262af
FB
2170 if (must_add_seg) {
2171 if (override < 0) {
7865eec4 2172 if (base == R_EBP || base == R_ESP) {
2c0262af 2173 override = R_SS;
7865eec4 2174 } else {
2c0262af 2175 override = R_DS;
7865eec4 2176 }
2c0262af 2177 }
7865eec4
RH
2178
2179 tcg_gen_ld_tl(cpu_tmp0, cpu_env,
2180 offsetof(CPUX86State, segs[override].base));
2181 if (CODE64(s)) {
2182 if (s->aflag != 2) {
2183 tcg_gen_ext32u_tl(cpu_A0, cpu_A0);
2184 }
2185 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
2186 goto done;
14ce26e7 2187 }
7865eec4
RH
2188
2189 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
2190 }
2191
2192 if (s->aflag != 2) {
2193 tcg_gen_ext32u_tl(cpu_A0, cpu_A0);
2c0262af
FB
2194 }
2195 } else {
2196 switch (mod) {
2197 case 0:
2198 if (rm == 6) {
0af10c86 2199 disp = cpu_lduw_code(env, s->pc);
2c0262af
FB
2200 s->pc += 2;
2201 gen_op_movl_A0_im(disp);
2202 rm = 0; /* avoid SS override */
2203 goto no_rm;
2204 } else {
2205 disp = 0;
2206 }
2207 break;
2208 case 1:
0af10c86 2209 disp = (int8_t)cpu_ldub_code(env, s->pc++);
2c0262af
FB
2210 break;
2211 default:
2212 case 2:
0af10c86 2213 disp = cpu_lduw_code(env, s->pc);
2c0262af
FB
2214 s->pc += 2;
2215 break;
2216 }
2217 switch(rm) {
2218 case 0:
57fec1fe
FB
2219 gen_op_movl_A0_reg(R_EBX);
2220 gen_op_addl_A0_reg_sN(0, R_ESI);
2c0262af
FB
2221 break;
2222 case 1:
57fec1fe
FB
2223 gen_op_movl_A0_reg(R_EBX);
2224 gen_op_addl_A0_reg_sN(0, R_EDI);
2c0262af
FB
2225 break;
2226 case 2:
57fec1fe
FB
2227 gen_op_movl_A0_reg(R_EBP);
2228 gen_op_addl_A0_reg_sN(0, R_ESI);
2c0262af
FB
2229 break;
2230 case 3:
57fec1fe
FB
2231 gen_op_movl_A0_reg(R_EBP);
2232 gen_op_addl_A0_reg_sN(0, R_EDI);
2c0262af
FB
2233 break;
2234 case 4:
57fec1fe 2235 gen_op_movl_A0_reg(R_ESI);
2c0262af
FB
2236 break;
2237 case 5:
57fec1fe 2238 gen_op_movl_A0_reg(R_EDI);
2c0262af
FB
2239 break;
2240 case 6:
57fec1fe 2241 gen_op_movl_A0_reg(R_EBP);
2c0262af
FB
2242 break;
2243 default:
2244 case 7:
57fec1fe 2245 gen_op_movl_A0_reg(R_EBX);
2c0262af
FB
2246 break;
2247 }
2248 if (disp != 0)
2249 gen_op_addl_A0_im(disp);
2250 gen_op_andl_A0_ffff();
2251 no_rm:
2252 if (must_add_seg) {
2253 if (override < 0) {
2254 if (rm == 2 || rm == 3 || rm == 6)
2255 override = R_SS;
2256 else
2257 override = R_DS;
2258 }
7162ab21 2259 gen_op_addl_A0_seg(s, override);
2c0262af
FB
2260 }
2261 }
2262
7865eec4 2263 done:
2c0262af
FB
2264 opreg = OR_A0;
2265 disp = 0;
2266 *reg_ptr = opreg;
2267 *offset_ptr = disp;
2268}
2269
0af10c86 2270static void gen_nop_modrm(CPUX86State *env, DisasContext *s, int modrm)
e17a36ce
FB
2271{
2272 int mod, rm, base, code;
2273
2274 mod = (modrm >> 6) & 3;
2275 if (mod == 3)
2276 return;
2277 rm = modrm & 7;
2278
2279 if (s->aflag) {
2280
2281 base = rm;
3b46e624 2282
e17a36ce 2283 if (base == 4) {
0af10c86 2284 code = cpu_ldub_code(env, s->pc++);
e17a36ce
FB
2285 base = (code & 7);
2286 }
3b46e624 2287
e17a36ce
FB
2288 switch (mod) {
2289 case 0:
2290 if (base == 5) {
2291 s->pc += 4;
2292 }
2293 break;
2294 case 1:
2295 s->pc++;
2296 break;
2297 default:
2298 case 2:
2299 s->pc += 4;
2300 break;
2301 }
2302 } else {
2303 switch (mod) {
2304 case 0:
2305 if (rm == 6) {
2306 s->pc += 2;
2307 }
2308 break;
2309 case 1:
2310 s->pc++;
2311 break;
2312 default:
2313 case 2:
2314 s->pc += 2;
2315 break;
2316 }
2317 }
2318}
2319
664e0f19
FB
2320/* used for LEA and MOV AX, mem */
2321static void gen_add_A0_ds_seg(DisasContext *s)
2322{
2323 int override, must_add_seg;
2324 must_add_seg = s->addseg;
2325 override = R_DS;
2326 if (s->override >= 0) {
2327 override = s->override;
2328 must_add_seg = 1;
664e0f19
FB
2329 }
2330 if (must_add_seg) {
8f091a59
FB
2331#ifdef TARGET_X86_64
2332 if (CODE64(s)) {
57fec1fe 2333 gen_op_addq_A0_seg(override);
5fafdf24 2334 } else
8f091a59
FB
2335#endif
2336 {
7162ab21 2337 gen_op_addl_A0_seg(s, override);
8f091a59 2338 }
664e0f19
FB
2339 }
2340}
2341
222a3336 2342/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2c0262af 2343 OR_TMP0 */
0af10c86
BS
2344static void gen_ldst_modrm(CPUX86State *env, DisasContext *s, int modrm,
2345 int ot, int reg, int is_store)
2c0262af
FB
2346{
2347 int mod, rm, opreg, disp;
2348
2349 mod = (modrm >> 6) & 3;
14ce26e7 2350 rm = (modrm & 7) | REX_B(s);
2c0262af
FB
2351 if (mod == 3) {
2352 if (is_store) {
2353 if (reg != OR_TMP0)
57fec1fe
FB
2354 gen_op_mov_TN_reg(ot, 0, reg);
2355 gen_op_mov_reg_T0(ot, rm);
2c0262af 2356 } else {
57fec1fe 2357 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af 2358 if (reg != OR_TMP0)
57fec1fe 2359 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
2360 }
2361 } else {
0af10c86 2362 gen_lea_modrm(env, s, modrm, &opreg, &disp);
2c0262af
FB
2363 if (is_store) {
2364 if (reg != OR_TMP0)
57fec1fe
FB
2365 gen_op_mov_TN_reg(ot, 0, reg);
2366 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 2367 } else {
57fec1fe 2368 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 2369 if (reg != OR_TMP0)
57fec1fe 2370 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
2371 }
2372 }
2373}
2374
0af10c86 2375static inline uint32_t insn_get(CPUX86State *env, DisasContext *s, int ot)
2c0262af
FB
2376{
2377 uint32_t ret;
2378
2379 switch(ot) {
2380 case OT_BYTE:
0af10c86 2381 ret = cpu_ldub_code(env, s->pc);
2c0262af
FB
2382 s->pc++;
2383 break;
2384 case OT_WORD:
0af10c86 2385 ret = cpu_lduw_code(env, s->pc);
2c0262af
FB
2386 s->pc += 2;
2387 break;
2388 default:
2389 case OT_LONG:
0af10c86 2390 ret = cpu_ldl_code(env, s->pc);
2c0262af
FB
2391 s->pc += 4;
2392 break;
2393 }
2394 return ret;
2395}
2396
14ce26e7
FB
2397static inline int insn_const_size(unsigned int ot)
2398{
2399 if (ot <= OT_LONG)
2400 return 1 << ot;
2401 else
2402 return 4;
2403}
2404
6e256c93
FB
2405static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2406{
2407 TranslationBlock *tb;
2408 target_ulong pc;
2409
2410 pc = s->cs_base + eip;
2411 tb = s->tb;
2412 /* NOTE: we handle the case where the TB spans two pages here */
2413 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2414 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2415 /* jump to same page: we can use a direct jump */
57fec1fe 2416 tcg_gen_goto_tb(tb_num);
6e256c93 2417 gen_jmp_im(eip);
8cfd0495 2418 tcg_gen_exit_tb((uintptr_t)tb + tb_num);
6e256c93
FB
2419 } else {
2420 /* jump to another page: currently not optimized */
2421 gen_jmp_im(eip);
2422 gen_eob(s);
2423 }
2424}
2425
5fafdf24 2426static inline void gen_jcc(DisasContext *s, int b,
14ce26e7 2427 target_ulong val, target_ulong next_eip)
2c0262af 2428{
b27fc131 2429 int l1, l2;
3b46e624 2430
2c0262af 2431 if (s->jmp_opt) {
14ce26e7 2432 l1 = gen_new_label();
b27fc131 2433 gen_jcc1(s, b, l1);
dc259201 2434
6e256c93 2435 gen_goto_tb(s, 0, next_eip);
14ce26e7
FB
2436
2437 gen_set_label(l1);
6e256c93 2438 gen_goto_tb(s, 1, val);
5779406a 2439 s->is_jmp = DISAS_TB_JUMP;
2c0262af 2440 } else {
14ce26e7
FB
2441 l1 = gen_new_label();
2442 l2 = gen_new_label();
b27fc131 2443 gen_jcc1(s, b, l1);
8e1c85e3 2444
14ce26e7 2445 gen_jmp_im(next_eip);
8e1c85e3
FB
2446 tcg_gen_br(l2);
2447
14ce26e7
FB
2448 gen_set_label(l1);
2449 gen_jmp_im(val);
2450 gen_set_label(l2);
2c0262af
FB
2451 gen_eob(s);
2452 }
2453}
2454
f32d3781
PB
2455static void gen_cmovcc1(CPUX86State *env, DisasContext *s, int ot, int b,
2456 int modrm, int reg)
2457{
57eb0cc8 2458 CCPrepare cc;
f32d3781 2459
57eb0cc8 2460 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
f32d3781 2461
57eb0cc8
RH
2462 cc = gen_prepare_cc(s, b, cpu_T[1]);
2463 if (cc.mask != -1) {
2464 TCGv t0 = tcg_temp_new();
2465 tcg_gen_andi_tl(t0, cc.reg, cc.mask);
2466 cc.reg = t0;
2467 }
2468 if (!cc.use_reg2) {
2469 cc.reg2 = tcg_const_tl(cc.imm);
f32d3781
PB
2470 }
2471
57eb0cc8
RH
2472 tcg_gen_movcond_tl(cc.cond, cpu_T[0], cc.reg, cc.reg2,
2473 cpu_T[0], cpu_regs[reg]);
2474 gen_op_mov_reg_T0(ot, reg);
2475
2476 if (cc.mask != -1) {
2477 tcg_temp_free(cc.reg);
2478 }
2479 if (!cc.use_reg2) {
2480 tcg_temp_free(cc.reg2);
2481 }
f32d3781
PB
2482}
2483
3bd7da9e
FB
2484static inline void gen_op_movl_T0_seg(int seg_reg)
2485{
2486 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2487 offsetof(CPUX86State,segs[seg_reg].selector));
2488}
2489
2490static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2491{
2492 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2493 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2494 offsetof(CPUX86State,segs[seg_reg].selector));
2495 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2496 tcg_gen_st_tl(cpu_T[0], cpu_env,
2497 offsetof(CPUX86State,segs[seg_reg].base));
2498}
2499
2c0262af
FB
2500/* move T0 to seg_reg and compute if the CPU state may change. Never
2501 call this function with seg_reg == R_CS */
14ce26e7 2502static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2c0262af 2503{
3415a4dd
FB
2504 if (s->pe && !s->vm86) {
2505 /* XXX: optimize by finding processor state dynamically */
773cdfcc 2506 gen_update_cc_op(s);
14ce26e7 2507 gen_jmp_im(cur_eip);
b6abf97d 2508 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2 2509 gen_helper_load_seg(cpu_env, tcg_const_i32(seg_reg), cpu_tmp2_i32);
dc196a57
FB
2510 /* abort translation because the addseg value may change or
2511 because ss32 may change. For R_SS, translation must always
2512 stop as a special handling must be done to disable hardware
2513 interrupts for the next instruction */
2514 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
5779406a 2515 s->is_jmp = DISAS_TB_JUMP;
3415a4dd 2516 } else {
3bd7da9e 2517 gen_op_movl_seg_T0_vm(seg_reg);
dc196a57 2518 if (seg_reg == R_SS)
5779406a 2519 s->is_jmp = DISAS_TB_JUMP;
3415a4dd 2520 }
2c0262af
FB
2521}
2522
0573fbfc
TS
2523static inline int svm_is_rep(int prefixes)
2524{
2525 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2526}
2527
872929aa 2528static inline void
0573fbfc 2529gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
b8b6a50b 2530 uint32_t type, uint64_t param)
0573fbfc 2531{
872929aa
FB
2532 /* no SVM activated; fast case */
2533 if (likely(!(s->flags & HF_SVMI_MASK)))
2534 return;
773cdfcc 2535 gen_update_cc_op(s);
872929aa 2536 gen_jmp_im(pc_start - s->cs_base);
052e80d5 2537 gen_helper_svm_check_intercept_param(cpu_env, tcg_const_i32(type),
a7812ae4 2538 tcg_const_i64(param));
0573fbfc
TS
2539}
2540
872929aa 2541static inline void
0573fbfc
TS
2542gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2543{
872929aa 2544 gen_svm_check_intercept_param(s, pc_start, type, 0);
0573fbfc
TS
2545}
2546
4f31916f
FB
2547static inline void gen_stack_update(DisasContext *s, int addend)
2548{
14ce26e7
FB
2549#ifdef TARGET_X86_64
2550 if (CODE64(s)) {
6e0d8677 2551 gen_op_add_reg_im(2, R_ESP, addend);
14ce26e7
FB
2552 } else
2553#endif
4f31916f 2554 if (s->ss32) {
6e0d8677 2555 gen_op_add_reg_im(1, R_ESP, addend);
4f31916f 2556 } else {
6e0d8677 2557 gen_op_add_reg_im(0, R_ESP, addend);
4f31916f
FB
2558 }
2559}
2560
2c0262af
FB
2561/* generate a push. It depends on ss32, addseg and dflag */
2562static void gen_push_T0(DisasContext *s)
2563{
14ce26e7
FB
2564#ifdef TARGET_X86_64
2565 if (CODE64(s)) {
57fec1fe 2566 gen_op_movq_A0_reg(R_ESP);
8f091a59 2567 if (s->dflag) {
57fec1fe
FB
2568 gen_op_addq_A0_im(-8);
2569 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
8f091a59 2570 } else {
57fec1fe
FB
2571 gen_op_addq_A0_im(-2);
2572 gen_op_st_T0_A0(OT_WORD + s->mem_index);
8f091a59 2573 }
57fec1fe 2574 gen_op_mov_reg_A0(2, R_ESP);
5fafdf24 2575 } else
14ce26e7
FB
2576#endif
2577 {
57fec1fe 2578 gen_op_movl_A0_reg(R_ESP);
14ce26e7 2579 if (!s->dflag)
57fec1fe 2580 gen_op_addl_A0_im(-2);
14ce26e7 2581 else
57fec1fe 2582 gen_op_addl_A0_im(-4);
14ce26e7
FB
2583 if (s->ss32) {
2584 if (s->addseg) {
bbf662ee 2585 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
7162ab21 2586 gen_op_addl_A0_seg(s, R_SS);
14ce26e7
FB
2587 }
2588 } else {
2589 gen_op_andl_A0_ffff();
bbf662ee 2590 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
7162ab21 2591 gen_op_addl_A0_seg(s, R_SS);
2c0262af 2592 }
57fec1fe 2593 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
14ce26e7 2594 if (s->ss32 && !s->addseg)
57fec1fe 2595 gen_op_mov_reg_A0(1, R_ESP);
14ce26e7 2596 else
57fec1fe 2597 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2c0262af
FB
2598 }
2599}
2600
4f31916f
FB
2601/* generate a push. It depends on ss32, addseg and dflag */
2602/* slower version for T1, only used for call Ev */
2603static void gen_push_T1(DisasContext *s)
2c0262af 2604{
14ce26e7
FB
2605#ifdef TARGET_X86_64
2606 if (CODE64(s)) {
57fec1fe 2607 gen_op_movq_A0_reg(R_ESP);
8f091a59 2608 if (s->dflag) {
57fec1fe
FB
2609 gen_op_addq_A0_im(-8);
2610 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
8f091a59 2611 } else {
57fec1fe
FB
2612 gen_op_addq_A0_im(-2);
2613 gen_op_st_T0_A0(OT_WORD + s->mem_index);
8f091a59 2614 }
57fec1fe 2615 gen_op_mov_reg_A0(2, R_ESP);
5fafdf24 2616 } else
14ce26e7
FB
2617#endif
2618 {
57fec1fe 2619 gen_op_movl_A0_reg(R_ESP);
14ce26e7 2620 if (!s->dflag)
57fec1fe 2621 gen_op_addl_A0_im(-2);
14ce26e7 2622 else
57fec1fe 2623 gen_op_addl_A0_im(-4);
14ce26e7
FB
2624 if (s->ss32) {
2625 if (s->addseg) {
7162ab21 2626 gen_op_addl_A0_seg(s, R_SS);
14ce26e7
FB
2627 }
2628 } else {
2629 gen_op_andl_A0_ffff();
7162ab21 2630 gen_op_addl_A0_seg(s, R_SS);
2c0262af 2631 }
57fec1fe 2632 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
3b46e624 2633
14ce26e7 2634 if (s->ss32 && !s->addseg)
57fec1fe 2635 gen_op_mov_reg_A0(1, R_ESP);
14ce26e7
FB
2636 else
2637 gen_stack_update(s, (-2) << s->dflag);
2c0262af
FB
2638 }
2639}
2640
4f31916f
FB
2641/* two step pop is necessary for precise exceptions */
2642static void gen_pop_T0(DisasContext *s)
2c0262af 2643{
14ce26e7
FB
2644#ifdef TARGET_X86_64
2645 if (CODE64(s)) {
57fec1fe
FB
2646 gen_op_movq_A0_reg(R_ESP);
2647 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
5fafdf24 2648 } else
14ce26e7
FB
2649#endif
2650 {
57fec1fe 2651 gen_op_movl_A0_reg(R_ESP);
14ce26e7
FB
2652 if (s->ss32) {
2653 if (s->addseg)
7162ab21 2654 gen_op_addl_A0_seg(s, R_SS);
14ce26e7
FB
2655 } else {
2656 gen_op_andl_A0_ffff();
7162ab21 2657 gen_op_addl_A0_seg(s, R_SS);
14ce26e7 2658 }
57fec1fe 2659 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2c0262af
FB
2660 }
2661}
2662
2663static void gen_pop_update(DisasContext *s)
2664{
14ce26e7 2665#ifdef TARGET_X86_64
8f091a59 2666 if (CODE64(s) && s->dflag) {
14ce26e7
FB
2667 gen_stack_update(s, 8);
2668 } else
2669#endif
2670 {
2671 gen_stack_update(s, 2 << s->dflag);
2672 }
2c0262af
FB
2673}
2674
2675static void gen_stack_A0(DisasContext *s)
2676{
57fec1fe 2677 gen_op_movl_A0_reg(R_ESP);
2c0262af
FB
2678 if (!s->ss32)
2679 gen_op_andl_A0_ffff();
bbf662ee 2680 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2c0262af 2681 if (s->addseg)
7162ab21 2682 gen_op_addl_A0_seg(s, R_SS);
2c0262af
FB
2683}
2684
2685/* NOTE: wrap around in 16 bit not fully handled */
2686static void gen_pusha(DisasContext *s)
2687{
2688 int i;
57fec1fe 2689 gen_op_movl_A0_reg(R_ESP);
2c0262af
FB
2690 gen_op_addl_A0_im(-16 << s->dflag);
2691 if (!s->ss32)
2692 gen_op_andl_A0_ffff();
bbf662ee 2693 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2c0262af 2694 if (s->addseg)
7162ab21 2695 gen_op_addl_A0_seg(s, R_SS);
2c0262af 2696 for(i = 0;i < 8; i++) {
57fec1fe
FB
2697 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2698 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2c0262af
FB
2699 gen_op_addl_A0_im(2 << s->dflag);
2700 }
57fec1fe 2701 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2c0262af
FB
2702}
2703
2704/* NOTE: wrap around in 16 bit not fully handled */
2705static void gen_popa(DisasContext *s)
2706{
2707 int i;
57fec1fe 2708 gen_op_movl_A0_reg(R_ESP);
2c0262af
FB
2709 if (!s->ss32)
2710 gen_op_andl_A0_ffff();
bbf662ee
FB
2711 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2712 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2c0262af 2713 if (s->addseg)
7162ab21 2714 gen_op_addl_A0_seg(s, R_SS);
2c0262af
FB
2715 for(i = 0;i < 8; i++) {
2716 /* ESP is not reloaded */
2717 if (i != 3) {
57fec1fe
FB
2718 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2719 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2c0262af
FB
2720 }
2721 gen_op_addl_A0_im(2 << s->dflag);
2722 }
57fec1fe 2723 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2c0262af
FB
2724}
2725
2c0262af
FB
2726static void gen_enter(DisasContext *s, int esp_addend, int level)
2727{
61a8c4ec 2728 int ot, opsize;
2c0262af 2729
2c0262af 2730 level &= 0x1f;
8f091a59
FB
2731#ifdef TARGET_X86_64
2732 if (CODE64(s)) {
2733 ot = s->dflag ? OT_QUAD : OT_WORD;
2734 opsize = 1 << ot;
3b46e624 2735
57fec1fe 2736 gen_op_movl_A0_reg(R_ESP);
8f091a59 2737 gen_op_addq_A0_im(-opsize);
bbf662ee 2738 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
8f091a59
FB
2739
2740 /* push bp */
57fec1fe
FB
2741 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2742 gen_op_st_T0_A0(ot + s->mem_index);
8f091a59 2743 if (level) {
b5b38f61 2744 /* XXX: must save state */
2999a0b2 2745 gen_helper_enter64_level(cpu_env, tcg_const_i32(level),
a7812ae4
PB
2746 tcg_const_i32((ot == OT_QUAD)),
2747 cpu_T[1]);
8f091a59 2748 }
57fec1fe 2749 gen_op_mov_reg_T1(ot, R_EBP);
bbf662ee 2750 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
57fec1fe 2751 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
5fafdf24 2752 } else
8f091a59
FB
2753#endif
2754 {
2755 ot = s->dflag + OT_WORD;
2756 opsize = 2 << s->dflag;
3b46e624 2757
57fec1fe 2758 gen_op_movl_A0_reg(R_ESP);
8f091a59
FB
2759 gen_op_addl_A0_im(-opsize);
2760 if (!s->ss32)
2761 gen_op_andl_A0_ffff();
bbf662ee 2762 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
8f091a59 2763 if (s->addseg)
7162ab21 2764 gen_op_addl_A0_seg(s, R_SS);
8f091a59 2765 /* push bp */
57fec1fe
FB
2766 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2767 gen_op_st_T0_A0(ot + s->mem_index);
8f091a59 2768 if (level) {
b5b38f61 2769 /* XXX: must save state */
2999a0b2 2770 gen_helper_enter_level(cpu_env, tcg_const_i32(level),
a7812ae4
PB
2771 tcg_const_i32(s->dflag),
2772 cpu_T[1]);
8f091a59 2773 }
57fec1fe 2774 gen_op_mov_reg_T1(ot, R_EBP);
bbf662ee 2775 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
57fec1fe 2776 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2c0262af 2777 }
2c0262af
FB
2778}
2779
14ce26e7 2780static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2c0262af 2781{
773cdfcc 2782 gen_update_cc_op(s);
14ce26e7 2783 gen_jmp_im(cur_eip);
77b2bc2c 2784 gen_helper_raise_exception(cpu_env, tcg_const_i32(trapno));
5779406a 2785 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
2786}
2787
2788/* an interrupt is different from an exception because of the
7f75ffd3 2789 privilege checks */
5fafdf24 2790static void gen_interrupt(DisasContext *s, int intno,
14ce26e7 2791 target_ulong cur_eip, target_ulong next_eip)
2c0262af 2792{
773cdfcc 2793 gen_update_cc_op(s);
14ce26e7 2794 gen_jmp_im(cur_eip);
77b2bc2c 2795 gen_helper_raise_interrupt(cpu_env, tcg_const_i32(intno),
a7812ae4 2796 tcg_const_i32(next_eip - cur_eip));
5779406a 2797 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
2798}
2799
14ce26e7 2800static void gen_debug(DisasContext *s, target_ulong cur_eip)
2c0262af 2801{
773cdfcc 2802 gen_update_cc_op(s);
14ce26e7 2803 gen_jmp_im(cur_eip);
4a7443be 2804 gen_helper_debug(cpu_env);
5779406a 2805 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
2806}
2807
2808/* generate a generic end of block. Trace exception is also generated
2809 if needed */
2810static void gen_eob(DisasContext *s)
2811{
773cdfcc 2812 gen_update_cc_op(s);
a2cc3b24 2813 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
f0967a1a 2814 gen_helper_reset_inhibit_irq(cpu_env);
a2cc3b24 2815 }
a2397807 2816 if (s->tb->flags & HF_RF_MASK) {
f0967a1a 2817 gen_helper_reset_rf(cpu_env);
a2397807 2818 }
34865134 2819 if (s->singlestep_enabled) {
4a7443be 2820 gen_helper_debug(cpu_env);
34865134 2821 } else if (s->tf) {
4a7443be 2822 gen_helper_single_step(cpu_env);
2c0262af 2823 } else {
57fec1fe 2824 tcg_gen_exit_tb(0);
2c0262af 2825 }
5779406a 2826 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
2827}
2828
2829/* generate a jump to eip. No segment change must happen before as a
2830 direct call to the next block may occur */
14ce26e7 2831static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2c0262af 2832{
a3251186
RH
2833 gen_update_cc_op(s);
2834 set_cc_op(s, CC_OP_DYNAMIC);
2c0262af 2835 if (s->jmp_opt) {
6e256c93 2836 gen_goto_tb(s, tb_num, eip);
5779406a 2837 s->is_jmp = DISAS_TB_JUMP;
2c0262af 2838 } else {
14ce26e7 2839 gen_jmp_im(eip);
2c0262af
FB
2840 gen_eob(s);
2841 }
2842}
2843
14ce26e7
FB
2844static void gen_jmp(DisasContext *s, target_ulong eip)
2845{
2846 gen_jmp_tb(s, eip, 0);
2847}
2848
8686c490
FB
2849static inline void gen_ldq_env_A0(int idx, int offset)
2850{
2851 int mem_index = (idx >> 2) - 1;
b6abf97d
FB
2852 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2853 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
8686c490 2854}
664e0f19 2855
8686c490
FB
2856static inline void gen_stq_env_A0(int idx, int offset)
2857{
2858 int mem_index = (idx >> 2) - 1;
b6abf97d
FB
2859 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2860 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
8686c490 2861}
664e0f19 2862
8686c490
FB
2863static inline void gen_ldo_env_A0(int idx, int offset)
2864{
2865 int mem_index = (idx >> 2) - 1;
b6abf97d
FB
2866 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2867 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
8686c490 2868 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
b6abf97d
FB
2869 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2870 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
8686c490 2871}
14ce26e7 2872
8686c490
FB
2873static inline void gen_sto_env_A0(int idx, int offset)
2874{
2875 int mem_index = (idx >> 2) - 1;
b6abf97d
FB
2876 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2877 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
8686c490 2878 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
b6abf97d
FB
2879 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2880 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
8686c490 2881}
14ce26e7 2882
5af45186
FB
2883static inline void gen_op_movo(int d_offset, int s_offset)
2884{
b6abf97d
FB
2885 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2886 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2887 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2888 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
5af45186
FB
2889}
2890
2891static inline void gen_op_movq(int d_offset, int s_offset)
2892{
b6abf97d
FB
2893 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2894 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
5af45186
FB
2895}
2896
2897static inline void gen_op_movl(int d_offset, int s_offset)
2898{
b6abf97d
FB
2899 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2900 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
5af45186
FB
2901}
2902
2903static inline void gen_op_movq_env_0(int d_offset)
2904{
b6abf97d
FB
2905 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2906 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
5af45186 2907}
664e0f19 2908
d3eb5eae
BS
2909typedef void (*SSEFunc_i_ep)(TCGv_i32 val, TCGv_ptr env, TCGv_ptr reg);
2910typedef void (*SSEFunc_l_ep)(TCGv_i64 val, TCGv_ptr env, TCGv_ptr reg);
2911typedef void (*SSEFunc_0_epi)(TCGv_ptr env, TCGv_ptr reg, TCGv_i32 val);
2912typedef void (*SSEFunc_0_epl)(TCGv_ptr env, TCGv_ptr reg, TCGv_i64 val);
2913typedef void (*SSEFunc_0_epp)(TCGv_ptr env, TCGv_ptr reg_a, TCGv_ptr reg_b);
2914typedef void (*SSEFunc_0_eppi)(TCGv_ptr env, TCGv_ptr reg_a, TCGv_ptr reg_b,
2915 TCGv_i32 val);
c4baa050 2916typedef void (*SSEFunc_0_ppi)(TCGv_ptr reg_a, TCGv_ptr reg_b, TCGv_i32 val);
d3eb5eae
BS
2917typedef void (*SSEFunc_0_eppt)(TCGv_ptr env, TCGv_ptr reg_a, TCGv_ptr reg_b,
2918 TCGv val);
c4baa050 2919
5af45186
FB
2920#define SSE_SPECIAL ((void *)1)
2921#define SSE_DUMMY ((void *)2)
664e0f19 2922
a7812ae4
PB
2923#define MMX_OP2(x) { gen_helper_ ## x ## _mmx, gen_helper_ ## x ## _xmm }
2924#define SSE_FOP(x) { gen_helper_ ## x ## ps, gen_helper_ ## x ## pd, \
2925 gen_helper_ ## x ## ss, gen_helper_ ## x ## sd, }
5af45186 2926
d3eb5eae 2927static const SSEFunc_0_epp sse_op_table1[256][4] = {
a35f3ec7
AJ
2928 /* 3DNow! extensions */
2929 [0x0e] = { SSE_DUMMY }, /* femms */
2930 [0x0f] = { SSE_DUMMY }, /* pf... */
664e0f19
FB
2931 /* pure SSE operations */
2932 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2933 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
465e9838 2934 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
664e0f19 2935 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
a7812ae4
PB
2936 [0x14] = { gen_helper_punpckldq_xmm, gen_helper_punpcklqdq_xmm },
2937 [0x15] = { gen_helper_punpckhdq_xmm, gen_helper_punpckhqdq_xmm },
664e0f19
FB
2938 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2939 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2940
2941 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2942 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2943 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
d9f4bb27 2944 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd, movntss, movntsd */
664e0f19
FB
2945 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2946 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
a7812ae4
PB
2947 [0x2e] = { gen_helper_ucomiss, gen_helper_ucomisd },
2948 [0x2f] = { gen_helper_comiss, gen_helper_comisd },
664e0f19
FB
2949 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2950 [0x51] = SSE_FOP(sqrt),
a7812ae4
PB
2951 [0x52] = { gen_helper_rsqrtps, NULL, gen_helper_rsqrtss, NULL },
2952 [0x53] = { gen_helper_rcpps, NULL, gen_helper_rcpss, NULL },
2953 [0x54] = { gen_helper_pand_xmm, gen_helper_pand_xmm }, /* andps, andpd */
2954 [0x55] = { gen_helper_pandn_xmm, gen_helper_pandn_xmm }, /* andnps, andnpd */
2955 [0x56] = { gen_helper_por_xmm, gen_helper_por_xmm }, /* orps, orpd */
2956 [0x57] = { gen_helper_pxor_xmm, gen_helper_pxor_xmm }, /* xorps, xorpd */
664e0f19
FB
2957 [0x58] = SSE_FOP(add),
2958 [0x59] = SSE_FOP(mul),
a7812ae4
PB
2959 [0x5a] = { gen_helper_cvtps2pd, gen_helper_cvtpd2ps,
2960 gen_helper_cvtss2sd, gen_helper_cvtsd2ss },
2961 [0x5b] = { gen_helper_cvtdq2ps, gen_helper_cvtps2dq, gen_helper_cvttps2dq },
664e0f19
FB
2962 [0x5c] = SSE_FOP(sub),
2963 [0x5d] = SSE_FOP(min),
2964 [0x5e] = SSE_FOP(div),
2965 [0x5f] = SSE_FOP(max),
2966
2967 [0xc2] = SSE_FOP(cmpeq),
d3eb5eae
BS
2968 [0xc6] = { (SSEFunc_0_epp)gen_helper_shufps,
2969 (SSEFunc_0_epp)gen_helper_shufpd }, /* XXX: casts */
664e0f19 2970
7073fbad
RH
2971 /* SSSE3, SSE4, MOVBE, CRC32, BMI1, BMI2, ADX. */
2972 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2973 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
4242b1bd 2974
664e0f19
FB
2975 /* MMX ops and their SSE extensions */
2976 [0x60] = MMX_OP2(punpcklbw),
2977 [0x61] = MMX_OP2(punpcklwd),
2978 [0x62] = MMX_OP2(punpckldq),
2979 [0x63] = MMX_OP2(packsswb),
2980 [0x64] = MMX_OP2(pcmpgtb),
2981 [0x65] = MMX_OP2(pcmpgtw),
2982 [0x66] = MMX_OP2(pcmpgtl),
2983 [0x67] = MMX_OP2(packuswb),
2984 [0x68] = MMX_OP2(punpckhbw),
2985 [0x69] = MMX_OP2(punpckhwd),
2986 [0x6a] = MMX_OP2(punpckhdq),
2987 [0x6b] = MMX_OP2(packssdw),
a7812ae4
PB
2988 [0x6c] = { NULL, gen_helper_punpcklqdq_xmm },
2989 [0x6d] = { NULL, gen_helper_punpckhqdq_xmm },
664e0f19
FB
2990 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2991 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
d3eb5eae
BS
2992 [0x70] = { (SSEFunc_0_epp)gen_helper_pshufw_mmx,
2993 (SSEFunc_0_epp)gen_helper_pshufd_xmm,
2994 (SSEFunc_0_epp)gen_helper_pshufhw_xmm,
2995 (SSEFunc_0_epp)gen_helper_pshuflw_xmm }, /* XXX: casts */
664e0f19
FB
2996 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2997 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2998 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2999 [0x74] = MMX_OP2(pcmpeqb),
3000 [0x75] = MMX_OP2(pcmpeqw),
3001 [0x76] = MMX_OP2(pcmpeql),
a35f3ec7 3002 [0x77] = { SSE_DUMMY }, /* emms */
d9f4bb27
AP
3003 [0x78] = { NULL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* extrq_i, insertq_i */
3004 [0x79] = { NULL, gen_helper_extrq_r, NULL, gen_helper_insertq_r },
a7812ae4
PB
3005 [0x7c] = { NULL, gen_helper_haddpd, NULL, gen_helper_haddps },
3006 [0x7d] = { NULL, gen_helper_hsubpd, NULL, gen_helper_hsubps },
664e0f19
FB
3007 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3008 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3009 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3010 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
a7812ae4 3011 [0xd0] = { NULL, gen_helper_addsubpd, NULL, gen_helper_addsubps },
664e0f19
FB
3012 [0xd1] = MMX_OP2(psrlw),
3013 [0xd2] = MMX_OP2(psrld),
3014 [0xd3] = MMX_OP2(psrlq),
3015 [0xd4] = MMX_OP2(paddq),
3016 [0xd5] = MMX_OP2(pmullw),
3017 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3018 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3019 [0xd8] = MMX_OP2(psubusb),
3020 [0xd9] = MMX_OP2(psubusw),
3021 [0xda] = MMX_OP2(pminub),
3022 [0xdb] = MMX_OP2(pand),
3023 [0xdc] = MMX_OP2(paddusb),
3024 [0xdd] = MMX_OP2(paddusw),
3025 [0xde] = MMX_OP2(pmaxub),
3026 [0xdf] = MMX_OP2(pandn),
3027 [0xe0] = MMX_OP2(pavgb),
3028 [0xe1] = MMX_OP2(psraw),
3029 [0xe2] = MMX_OP2(psrad),
3030 [0xe3] = MMX_OP2(pavgw),
3031 [0xe4] = MMX_OP2(pmulhuw),
3032 [0xe5] = MMX_OP2(pmulhw),
a7812ae4 3033 [0xe6] = { NULL, gen_helper_cvttpd2dq, gen_helper_cvtdq2pd, gen_helper_cvtpd2dq },
664e0f19
FB
3034 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3035 [0xe8] = MMX_OP2(psubsb),
3036 [0xe9] = MMX_OP2(psubsw),
3037 [0xea] = MMX_OP2(pminsw),
3038 [0xeb] = MMX_OP2(por),
3039 [0xec] = MMX_OP2(paddsb),
3040 [0xed] = MMX_OP2(paddsw),
3041 [0xee] = MMX_OP2(pmaxsw),
3042 [0xef] = MMX_OP2(pxor),
465e9838 3043 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
664e0f19
FB
3044 [0xf1] = MMX_OP2(psllw),
3045 [0xf2] = MMX_OP2(pslld),
3046 [0xf3] = MMX_OP2(psllq),
3047 [0xf4] = MMX_OP2(pmuludq),
3048 [0xf5] = MMX_OP2(pmaddwd),
3049 [0xf6] = MMX_OP2(psadbw),
d3eb5eae
BS
3050 [0xf7] = { (SSEFunc_0_epp)gen_helper_maskmov_mmx,
3051 (SSEFunc_0_epp)gen_helper_maskmov_xmm }, /* XXX: casts */
664e0f19
FB
3052 [0xf8] = MMX_OP2(psubb),
3053 [0xf9] = MMX_OP2(psubw),
3054 [0xfa] = MMX_OP2(psubl),
3055 [0xfb] = MMX_OP2(psubq),
3056 [0xfc] = MMX_OP2(paddb),
3057 [0xfd] = MMX_OP2(paddw),
3058 [0xfe] = MMX_OP2(paddl),
3059};
3060
d3eb5eae 3061static const SSEFunc_0_epp sse_op_table2[3 * 8][2] = {
664e0f19
FB
3062 [0 + 2] = MMX_OP2(psrlw),
3063 [0 + 4] = MMX_OP2(psraw),
3064 [0 + 6] = MMX_OP2(psllw),
3065 [8 + 2] = MMX_OP2(psrld),
3066 [8 + 4] = MMX_OP2(psrad),
3067 [8 + 6] = MMX_OP2(pslld),
3068 [16 + 2] = MMX_OP2(psrlq),
a7812ae4 3069 [16 + 3] = { NULL, gen_helper_psrldq_xmm },
664e0f19 3070 [16 + 6] = MMX_OP2(psllq),
a7812ae4 3071 [16 + 7] = { NULL, gen_helper_pslldq_xmm },
664e0f19
FB
3072};
3073
d3eb5eae 3074static const SSEFunc_0_epi sse_op_table3ai[] = {
a7812ae4 3075 gen_helper_cvtsi2ss,
11f8cdbc 3076 gen_helper_cvtsi2sd
c4baa050 3077};
a7812ae4 3078
11f8cdbc 3079#ifdef TARGET_X86_64
d3eb5eae 3080static const SSEFunc_0_epl sse_op_table3aq[] = {
11f8cdbc
SW
3081 gen_helper_cvtsq2ss,
3082 gen_helper_cvtsq2sd
3083};
3084#endif
3085
d3eb5eae 3086static const SSEFunc_i_ep sse_op_table3bi[] = {
a7812ae4 3087 gen_helper_cvttss2si,
a7812ae4 3088 gen_helper_cvtss2si,
bedc2ac1 3089 gen_helper_cvttsd2si,
11f8cdbc 3090 gen_helper_cvtsd2si
664e0f19 3091};
3b46e624 3092
11f8cdbc 3093#ifdef TARGET_X86_64
d3eb5eae 3094static const SSEFunc_l_ep sse_op_table3bq[] = {
11f8cdbc 3095 gen_helper_cvttss2sq,
11f8cdbc 3096 gen_helper_cvtss2sq,
bedc2ac1 3097 gen_helper_cvttsd2sq,
11f8cdbc
SW
3098 gen_helper_cvtsd2sq
3099};
3100#endif
3101
d3eb5eae 3102static const SSEFunc_0_epp sse_op_table4[8][4] = {
664e0f19
FB
3103 SSE_FOP(cmpeq),
3104 SSE_FOP(cmplt),
3105 SSE_FOP(cmple),
3106 SSE_FOP(cmpunord),
3107 SSE_FOP(cmpneq),
3108 SSE_FOP(cmpnlt),
3109 SSE_FOP(cmpnle),
3110 SSE_FOP(cmpord),
3111};
3b46e624 3112
d3eb5eae 3113static const SSEFunc_0_epp sse_op_table5[256] = {
a7812ae4
PB
3114 [0x0c] = gen_helper_pi2fw,
3115 [0x0d] = gen_helper_pi2fd,
3116 [0x1c] = gen_helper_pf2iw,
3117 [0x1d] = gen_helper_pf2id,
3118 [0x8a] = gen_helper_pfnacc,
3119 [0x8e] = gen_helper_pfpnacc,
3120 [0x90] = gen_helper_pfcmpge,
3121 [0x94] = gen_helper_pfmin,
3122 [0x96] = gen_helper_pfrcp,
3123 [0x97] = gen_helper_pfrsqrt,
3124 [0x9a] = gen_helper_pfsub,
3125 [0x9e] = gen_helper_pfadd,
3126 [0xa0] = gen_helper_pfcmpgt,
3127 [0xa4] = gen_helper_pfmax,
3128 [0xa6] = gen_helper_movq, /* pfrcpit1; no need to actually increase precision */
3129 [0xa7] = gen_helper_movq, /* pfrsqit1 */
3130 [0xaa] = gen_helper_pfsubr,
3131 [0xae] = gen_helper_pfacc,
3132 [0xb0] = gen_helper_pfcmpeq,
3133 [0xb4] = gen_helper_pfmul,
3134 [0xb6] = gen_helper_movq, /* pfrcpit2 */
3135 [0xb7] = gen_helper_pmulhrw_mmx,
3136 [0xbb] = gen_helper_pswapd,
3137 [0xbf] = gen_helper_pavgb_mmx /* pavgusb */
a35f3ec7
AJ
3138};
3139
d3eb5eae
BS
3140struct SSEOpHelper_epp {
3141 SSEFunc_0_epp op[2];
c4baa050
BS
3142 uint32_t ext_mask;
3143};
3144
d3eb5eae
BS
3145struct SSEOpHelper_eppi {
3146 SSEFunc_0_eppi op[2];
c4baa050 3147 uint32_t ext_mask;
222a3336 3148};
c4baa050 3149
222a3336 3150#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
a7812ae4
PB
3151#define SSE41_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3152#define SSE42_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
222a3336 3153#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
e71827bc
AJ
3154#define PCLMULQDQ_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, \
3155 CPUID_EXT_PCLMULQDQ }
d640045a 3156#define AESNI_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_AES }
c4baa050 3157
d3eb5eae 3158static const struct SSEOpHelper_epp sse_op_table6[256] = {
222a3336
AZ
3159 [0x00] = SSSE3_OP(pshufb),
3160 [0x01] = SSSE3_OP(phaddw),
3161 [0x02] = SSSE3_OP(phaddd),
3162 [0x03] = SSSE3_OP(phaddsw),
3163 [0x04] = SSSE3_OP(pmaddubsw),
3164 [0x05] = SSSE3_OP(phsubw),
3165 [0x06] = SSSE3_OP(phsubd),
3166 [0x07] = SSSE3_OP(phsubsw),
3167 [0x08] = SSSE3_OP(psignb),
3168 [0x09] = SSSE3_OP(psignw),
3169 [0x0a] = SSSE3_OP(psignd),
3170 [0x0b] = SSSE3_OP(pmulhrsw),
3171 [0x10] = SSE41_OP(pblendvb),
3172 [0x14] = SSE41_OP(blendvps),
3173 [0x15] = SSE41_OP(blendvpd),
3174 [0x17] = SSE41_OP(ptest),
3175 [0x1c] = SSSE3_OP(pabsb),
3176 [0x1d] = SSSE3_OP(pabsw),
3177 [0x1e] = SSSE3_OP(pabsd),
3178 [0x20] = SSE41_OP(pmovsxbw),
3179 [0x21] = SSE41_OP(pmovsxbd),
3180 [0x22] = SSE41_OP(pmovsxbq),
3181 [0x23] = SSE41_OP(pmovsxwd),
3182 [0x24] = SSE41_OP(pmovsxwq),
3183 [0x25] = SSE41_OP(pmovsxdq),
3184 [0x28] = SSE41_OP(pmuldq),
3185 [0x29] = SSE41_OP(pcmpeqq),
3186 [0x2a] = SSE41_SPECIAL, /* movntqda */
3187 [0x2b] = SSE41_OP(packusdw),
3188 [0x30] = SSE41_OP(pmovzxbw),
3189 [0x31] = SSE41_OP(pmovzxbd),
3190 [0x32] = SSE41_OP(pmovzxbq),
3191 [0x33] = SSE41_OP(pmovzxwd),
3192 [0x34] = SSE41_OP(pmovzxwq),
3193 [0x35] = SSE41_OP(pmovzxdq),
3194 [0x37] = SSE42_OP(pcmpgtq),
3195 [0x38] = SSE41_OP(pminsb),
3196 [0x39] = SSE41_OP(pminsd),
3197 [0x3a] = SSE41_OP(pminuw),
3198 [0x3b] = SSE41_OP(pminud),
3199 [0x3c] = SSE41_OP(pmaxsb),
3200 [0x3d] = SSE41_OP(pmaxsd),
3201 [0x3e] = SSE41_OP(pmaxuw),
3202 [0x3f] = SSE41_OP(pmaxud),
3203 [0x40] = SSE41_OP(pmulld),
3204 [0x41] = SSE41_OP(phminposuw),
d640045a
AJ
3205 [0xdb] = AESNI_OP(aesimc),
3206 [0xdc] = AESNI_OP(aesenc),
3207 [0xdd] = AESNI_OP(aesenclast),
3208 [0xde] = AESNI_OP(aesdec),
3209 [0xdf] = AESNI_OP(aesdeclast),
4242b1bd
AZ
3210};
3211
d3eb5eae 3212static const struct SSEOpHelper_eppi sse_op_table7[256] = {
222a3336
AZ
3213 [0x08] = SSE41_OP(roundps),
3214 [0x09] = SSE41_OP(roundpd),
3215 [0x0a] = SSE41_OP(roundss),
3216 [0x0b] = SSE41_OP(roundsd),
3217 [0x0c] = SSE41_OP(blendps),
3218 [0x0d] = SSE41_OP(blendpd),
3219 [0x0e] = SSE41_OP(pblendw),
3220 [0x0f] = SSSE3_OP(palignr),
3221 [0x14] = SSE41_SPECIAL, /* pextrb */
3222 [0x15] = SSE41_SPECIAL, /* pextrw */
3223 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3224 [0x17] = SSE41_SPECIAL, /* extractps */
3225 [0x20] = SSE41_SPECIAL, /* pinsrb */
3226 [0x21] = SSE41_SPECIAL, /* insertps */
3227 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3228 [0x40] = SSE41_OP(dpps),
3229 [0x41] = SSE41_OP(dppd),
3230 [0x42] = SSE41_OP(mpsadbw),
e71827bc 3231 [0x44] = PCLMULQDQ_OP(pclmulqdq),
222a3336
AZ
3232 [0x60] = SSE42_OP(pcmpestrm),
3233 [0x61] = SSE42_OP(pcmpestri),
3234 [0x62] = SSE42_OP(pcmpistrm),
3235 [0x63] = SSE42_OP(pcmpistri),
d640045a 3236 [0xdf] = AESNI_OP(aeskeygenassist),
4242b1bd
AZ
3237};
3238
0af10c86
BS
3239static void gen_sse(CPUX86State *env, DisasContext *s, int b,
3240 target_ulong pc_start, int rex_r)
664e0f19
FB
3241{
3242 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3243 int modrm, mod, rm, reg, reg_addr, offset_addr;
d3eb5eae
BS
3244 SSEFunc_0_epp sse_fn_epp;
3245 SSEFunc_0_eppi sse_fn_eppi;
c4baa050 3246 SSEFunc_0_ppi sse_fn_ppi;
d3eb5eae 3247 SSEFunc_0_eppt sse_fn_eppt;
664e0f19
FB
3248
3249 b &= 0xff;
5fafdf24 3250 if (s->prefix & PREFIX_DATA)
664e0f19 3251 b1 = 1;
5fafdf24 3252 else if (s->prefix & PREFIX_REPZ)
664e0f19 3253 b1 = 2;
5fafdf24 3254 else if (s->prefix & PREFIX_REPNZ)
664e0f19
FB
3255 b1 = 3;
3256 else
3257 b1 = 0;
d3eb5eae
BS
3258 sse_fn_epp = sse_op_table1[b][b1];
3259 if (!sse_fn_epp) {
664e0f19 3260 goto illegal_op;
c4baa050 3261 }
a35f3ec7 3262 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
664e0f19
FB
3263 is_xmm = 1;
3264 } else {
3265 if (b1 == 0) {
3266 /* MMX case */
3267 is_xmm = 0;
3268 } else {
3269 is_xmm = 1;
3270 }
3271 }
3272 /* simple MMX/SSE operation */
3273 if (s->flags & HF_TS_MASK) {
3274 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3275 return;
3276 }
3277 if (s->flags & HF_EM_MASK) {
3278 illegal_op:
3279 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3280 return;
3281 }
3282 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
4242b1bd
AZ
3283 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3284 goto illegal_op;
e771edab
AJ
3285 if (b == 0x0e) {
3286 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3287 goto illegal_op;
3288 /* femms */
d3eb5eae 3289 gen_helper_emms(cpu_env);
e771edab
AJ
3290 return;
3291 }
3292 if (b == 0x77) {
3293 /* emms */
d3eb5eae 3294 gen_helper_emms(cpu_env);
664e0f19
FB
3295 return;
3296 }
3297 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3298 the static cpu state) */
3299 if (!is_xmm) {
d3eb5eae 3300 gen_helper_enter_mmx(cpu_env);
664e0f19
FB
3301 }
3302
0af10c86 3303 modrm = cpu_ldub_code(env, s->pc++);
664e0f19
FB
3304 reg = ((modrm >> 3) & 7);
3305 if (is_xmm)
3306 reg |= rex_r;
3307 mod = (modrm >> 6) & 3;
d3eb5eae 3308 if (sse_fn_epp == SSE_SPECIAL) {
664e0f19
FB
3309 b |= (b1 << 8);
3310 switch(b) {
3311 case 0x0e7: /* movntq */
5fafdf24 3312 if (mod == 3)
664e0f19 3313 goto illegal_op;
0af10c86 3314 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3315 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
664e0f19
FB
3316 break;
3317 case 0x1e7: /* movntdq */
3318 case 0x02b: /* movntps */
3319 case 0x12b: /* movntps */
2e21e749
T
3320 if (mod == 3)
3321 goto illegal_op;
0af10c86 3322 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2e21e749
T
3323 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3324 break;
465e9838
FB
3325 case 0x3f0: /* lddqu */
3326 if (mod == 3)
664e0f19 3327 goto illegal_op;
0af10c86 3328 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
c2254920 3329 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
664e0f19 3330 break;
d9f4bb27
AP
3331 case 0x22b: /* movntss */
3332 case 0x32b: /* movntsd */
3333 if (mod == 3)
3334 goto illegal_op;
0af10c86 3335 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
d9f4bb27
AP
3336 if (b1 & 1) {
3337 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,
3338 xmm_regs[reg]));
3339 } else {
3340 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3341 xmm_regs[reg].XMM_L(0)));
3342 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3343 }
3344 break;
664e0f19 3345 case 0x6e: /* movd mm, ea */
dabd98dd
FB
3346#ifdef TARGET_X86_64
3347 if (s->dflag == 2) {
0af10c86 3348 gen_ldst_modrm(env, s, modrm, OT_QUAD, OR_TMP0, 0);
5af45186 3349 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
5fafdf24 3350 } else
dabd98dd
FB
3351#endif
3352 {
0af10c86 3353 gen_ldst_modrm(env, s, modrm, OT_LONG, OR_TMP0, 0);
5af45186
FB
3354 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3355 offsetof(CPUX86State,fpregs[reg].mmx));
a7812ae4
PB
3356 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3357 gen_helper_movl_mm_T0_mmx(cpu_ptr0, cpu_tmp2_i32);
dabd98dd 3358 }
664e0f19
FB
3359 break;
3360 case 0x16e: /* movd xmm, ea */
dabd98dd
FB
3361#ifdef TARGET_X86_64
3362 if (s->dflag == 2) {
0af10c86 3363 gen_ldst_modrm(env, s, modrm, OT_QUAD, OR_TMP0, 0);
5af45186
FB
3364 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3365 offsetof(CPUX86State,xmm_regs[reg]));
a7812ae4 3366 gen_helper_movq_mm_T0_xmm(cpu_ptr0, cpu_T[0]);
5fafdf24 3367 } else
dabd98dd
FB
3368#endif
3369 {
0af10c86 3370 gen_ldst_modrm(env, s, modrm, OT_LONG, OR_TMP0, 0);
5af45186
FB
3371 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3372 offsetof(CPUX86State,xmm_regs[reg]));
b6abf97d 3373 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
a7812ae4 3374 gen_helper_movl_mm_T0_xmm(cpu_ptr0, cpu_tmp2_i32);
dabd98dd 3375 }
664e0f19
FB
3376 break;
3377 case 0x6f: /* movq mm, ea */
3378 if (mod != 3) {
0af10c86 3379 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3380 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
664e0f19
FB
3381 } else {
3382 rm = (modrm & 7);
b6abf97d 3383 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
5af45186 3384 offsetof(CPUX86State,fpregs[rm].mmx));
b6abf97d 3385 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
5af45186 3386 offsetof(CPUX86State,fpregs[reg].mmx));
664e0f19
FB
3387 }
3388 break;
3389 case 0x010: /* movups */
3390 case 0x110: /* movupd */
3391 case 0x028: /* movaps */
3392 case 0x128: /* movapd */
3393 case 0x16f: /* movdqa xmm, ea */
3394 case 0x26f: /* movdqu xmm, ea */
3395 if (mod != 3) {
0af10c86 3396 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3397 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
664e0f19
FB
3398 } else {
3399 rm = (modrm & 7) | REX_B(s);
3400 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3401 offsetof(CPUX86State,xmm_regs[rm]));
3402 }
3403 break;
3404 case 0x210: /* movss xmm, ea */
3405 if (mod != 3) {
0af10c86 3406 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 3407 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
651ba608 3408 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
664e0f19 3409 gen_op_movl_T0_0();
651ba608
FB
3410 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3411 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3412 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
664e0f19
FB
3413 } else {
3414 rm = (modrm & 7) | REX_B(s);
3415 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3416 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3417 }
3418 break;
3419 case 0x310: /* movsd xmm, ea */
3420 if (mod != 3) {
0af10c86 3421 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3422 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19 3423 gen_op_movl_T0_0();
651ba608
FB
3424 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3425 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
664e0f19
FB
3426 } else {
3427 rm = (modrm & 7) | REX_B(s);
3428 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3429 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3430 }
3431 break;
3432 case 0x012: /* movlps */
3433 case 0x112: /* movlpd */
3434 if (mod != 3) {
0af10c86 3435 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3436 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3437 } else {
3438 /* movhlps */
3439 rm = (modrm & 7) | REX_B(s);
3440 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3441 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3442 }
3443 break;
465e9838
FB
3444 case 0x212: /* movsldup */
3445 if (mod != 3) {
0af10c86 3446 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3447 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
465e9838
FB
3448 } else {
3449 rm = (modrm & 7) | REX_B(s);
3450 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3451 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3452 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3453 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3454 }
3455 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3456 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3457 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3458 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3459 break;
3460 case 0x312: /* movddup */
3461 if (mod != 3) {
0af10c86 3462 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3463 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
465e9838
FB
3464 } else {
3465 rm = (modrm & 7) | REX_B(s);
3466 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3467 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3468 }
3469 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
ba6526df 3470 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
465e9838 3471 break;
664e0f19
FB
3472 case 0x016: /* movhps */
3473 case 0x116: /* movhpd */
3474 if (mod != 3) {
0af10c86 3475 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3476 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
664e0f19
FB
3477 } else {
3478 /* movlhps */
3479 rm = (modrm & 7) | REX_B(s);
3480 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3481 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3482 }
3483 break;
3484 case 0x216: /* movshdup */
3485 if (mod != 3) {
0af10c86 3486 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3487 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
664e0f19
FB
3488 } else {
3489 rm = (modrm & 7) | REX_B(s);
3490 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3491 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3492 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3493 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3494 }
3495 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3496 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3497 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3498 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3499 break;
d9f4bb27
AP
3500 case 0x178:
3501 case 0x378:
3502 {
3503 int bit_index, field_length;
3504
3505 if (b1 == 1 && reg != 0)
3506 goto illegal_op;
0af10c86
BS
3507 field_length = cpu_ldub_code(env, s->pc++) & 0x3F;
3508 bit_index = cpu_ldub_code(env, s->pc++) & 0x3F;
d9f4bb27
AP
3509 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3510 offsetof(CPUX86State,xmm_regs[reg]));
3511 if (b1 == 1)
d3eb5eae
BS
3512 gen_helper_extrq_i(cpu_env, cpu_ptr0,
3513 tcg_const_i32(bit_index),
3514 tcg_const_i32(field_length));
d9f4bb27 3515 else
d3eb5eae
BS
3516 gen_helper_insertq_i(cpu_env, cpu_ptr0,
3517 tcg_const_i32(bit_index),
3518 tcg_const_i32(field_length));
d9f4bb27
AP
3519 }
3520 break;
664e0f19 3521 case 0x7e: /* movd ea, mm */
dabd98dd
FB
3522#ifdef TARGET_X86_64
3523 if (s->dflag == 2) {
5af45186
FB
3524 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3525 offsetof(CPUX86State,fpregs[reg].mmx));
0af10c86 3526 gen_ldst_modrm(env, s, modrm, OT_QUAD, OR_TMP0, 1);
5fafdf24 3527 } else
dabd98dd
FB
3528#endif
3529 {
5af45186
FB
3530 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3531 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
0af10c86 3532 gen_ldst_modrm(env, s, modrm, OT_LONG, OR_TMP0, 1);
dabd98dd 3533 }
664e0f19
FB
3534 break;
3535 case 0x17e: /* movd ea, xmm */
dabd98dd
FB
3536#ifdef TARGET_X86_64
3537 if (s->dflag == 2) {
5af45186
FB
3538 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3539 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
0af10c86 3540 gen_ldst_modrm(env, s, modrm, OT_QUAD, OR_TMP0, 1);
5fafdf24 3541 } else
dabd98dd
FB
3542#endif
3543 {
5af45186
FB
3544 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3545 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
0af10c86 3546 gen_ldst_modrm(env, s, modrm, OT_LONG, OR_TMP0, 1);
dabd98dd 3547 }
664e0f19
FB
3548 break;
3549 case 0x27e: /* movq xmm, ea */
3550 if (mod != 3) {
0af10c86 3551 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3552 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3553 } else {
3554 rm = (modrm & 7) | REX_B(s);
3555 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3556 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3557 }
3558 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3559 break;
3560 case 0x7f: /* movq ea, mm */
3561 if (mod != 3) {
0af10c86 3562 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3563 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
664e0f19
FB
3564 } else {
3565 rm = (modrm & 7);
3566 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3567 offsetof(CPUX86State,fpregs[reg].mmx));
3568 }
3569 break;
3570 case 0x011: /* movups */
3571 case 0x111: /* movupd */
3572 case 0x029: /* movaps */
3573 case 0x129: /* movapd */
3574 case 0x17f: /* movdqa ea, xmm */
3575 case 0x27f: /* movdqu ea, xmm */
3576 if (mod != 3) {
0af10c86 3577 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3578 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
664e0f19
FB
3579 } else {
3580 rm = (modrm & 7) | REX_B(s);
3581 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3582 offsetof(CPUX86State,xmm_regs[reg]));
3583 }
3584 break;
3585 case 0x211: /* movss ea, xmm */
3586 if (mod != 3) {
0af10c86 3587 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
651ba608 3588 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
57fec1fe 3589 gen_op_st_T0_A0(OT_LONG + s->mem_index);
664e0f19
FB
3590 } else {
3591 rm = (modrm & 7) | REX_B(s);
3592 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3593 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3594 }
3595 break;
3596 case 0x311: /* movsd ea, xmm */
3597 if (mod != 3) {
0af10c86 3598 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3599 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3600 } else {
3601 rm = (modrm & 7) | REX_B(s);
3602 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3603 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3604 }
3605 break;
3606 case 0x013: /* movlps */
3607 case 0x113: /* movlpd */
3608 if (mod != 3) {
0af10c86 3609 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3610 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3611 } else {
3612 goto illegal_op;
3613 }
3614 break;
3615 case 0x017: /* movhps */
3616 case 0x117: /* movhpd */
3617 if (mod != 3) {
0af10c86 3618 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3619 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
664e0f19
FB
3620 } else {
3621 goto illegal_op;
3622 }
3623 break;
3624 case 0x71: /* shift mm, im */
3625 case 0x72:
3626 case 0x73:
3627 case 0x171: /* shift xmm, im */
3628 case 0x172:
3629 case 0x173:
c045af25
AK
3630 if (b1 >= 2) {
3631 goto illegal_op;
3632 }
0af10c86 3633 val = cpu_ldub_code(env, s->pc++);
664e0f19
FB
3634 if (is_xmm) {
3635 gen_op_movl_T0_im(val);
651ba608 3636 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
664e0f19 3637 gen_op_movl_T0_0();
651ba608 3638 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
664e0f19
FB
3639 op1_offset = offsetof(CPUX86State,xmm_t0);
3640 } else {
3641 gen_op_movl_T0_im(val);
651ba608 3642 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
664e0f19 3643 gen_op_movl_T0_0();
651ba608 3644 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
664e0f19
FB
3645 op1_offset = offsetof(CPUX86State,mmx_t0);
3646 }
d3eb5eae
BS
3647 sse_fn_epp = sse_op_table2[((b - 1) & 3) * 8 +
3648 (((modrm >> 3)) & 7)][b1];
3649 if (!sse_fn_epp) {
664e0f19 3650 goto illegal_op;
c4baa050 3651 }
664e0f19
FB
3652 if (is_xmm) {
3653 rm = (modrm & 7) | REX_B(s);
3654 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3655 } else {
3656 rm = (modrm & 7);
3657 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3658 }
5af45186
FB
3659 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3660 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
d3eb5eae 3661 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3662 break;
3663 case 0x050: /* movmskps */
664e0f19 3664 rm = (modrm & 7) | REX_B(s);
5af45186
FB
3665 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3666 offsetof(CPUX86State,xmm_regs[rm]));
d3eb5eae 3667 gen_helper_movmskps(cpu_tmp2_i32, cpu_env, cpu_ptr0);
b6abf97d 3668 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
57fec1fe 3669 gen_op_mov_reg_T0(OT_LONG, reg);
664e0f19
FB
3670 break;
3671 case 0x150: /* movmskpd */
664e0f19 3672 rm = (modrm & 7) | REX_B(s);
5af45186
FB
3673 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3674 offsetof(CPUX86State,xmm_regs[rm]));
d3eb5eae 3675 gen_helper_movmskpd(cpu_tmp2_i32, cpu_env, cpu_ptr0);
b6abf97d 3676 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
57fec1fe 3677 gen_op_mov_reg_T0(OT_LONG, reg);
664e0f19
FB
3678 break;
3679 case 0x02a: /* cvtpi2ps */
3680 case 0x12a: /* cvtpi2pd */
d3eb5eae 3681 gen_helper_enter_mmx(cpu_env);
664e0f19 3682 if (mod != 3) {
0af10c86 3683 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 3684 op2_offset = offsetof(CPUX86State,mmx_t0);
8686c490 3685 gen_ldq_env_A0(s->mem_index, op2_offset);
664e0f19
FB
3686 } else {
3687 rm = (modrm & 7);
3688 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3689 }
3690 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
5af45186
FB
3691 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3692 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
664e0f19
FB
3693 switch(b >> 8) {
3694 case 0x0:
d3eb5eae 3695 gen_helper_cvtpi2ps(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3696 break;
3697 default:
3698 case 0x1:
d3eb5eae 3699 gen_helper_cvtpi2pd(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3700 break;
3701 }
3702 break;
3703 case 0x22a: /* cvtsi2ss */
3704 case 0x32a: /* cvtsi2sd */
3705 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
0af10c86 3706 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
664e0f19 3707 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
5af45186 3708 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
28e10711 3709 if (ot == OT_LONG) {
d3eb5eae 3710 SSEFunc_0_epi sse_fn_epi = sse_op_table3ai[(b >> 8) & 1];
28e10711 3711 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 3712 sse_fn_epi(cpu_env, cpu_ptr0, cpu_tmp2_i32);
28e10711 3713 } else {
11f8cdbc 3714#ifdef TARGET_X86_64
d3eb5eae
BS
3715 SSEFunc_0_epl sse_fn_epl = sse_op_table3aq[(b >> 8) & 1];
3716 sse_fn_epl(cpu_env, cpu_ptr0, cpu_T[0]);
11f8cdbc
SW
3717#else
3718 goto illegal_op;
3719#endif
28e10711 3720 }
664e0f19
FB
3721 break;
3722 case 0x02c: /* cvttps2pi */
3723 case 0x12c: /* cvttpd2pi */
3724 case 0x02d: /* cvtps2pi */
3725 case 0x12d: /* cvtpd2pi */
d3eb5eae 3726 gen_helper_enter_mmx(cpu_env);
664e0f19 3727 if (mod != 3) {
0af10c86 3728 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 3729 op2_offset = offsetof(CPUX86State,xmm_t0);
8686c490 3730 gen_ldo_env_A0(s->mem_index, op2_offset);
664e0f19
FB
3731 } else {
3732 rm = (modrm & 7) | REX_B(s);
3733 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3734 }
3735 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
5af45186
FB
3736 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3737 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
664e0f19
FB
3738 switch(b) {
3739 case 0x02c:
d3eb5eae 3740 gen_helper_cvttps2pi(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3741 break;
3742 case 0x12c:
d3eb5eae 3743 gen_helper_cvttpd2pi(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3744 break;
3745 case 0x02d:
d3eb5eae 3746 gen_helper_cvtps2pi(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3747 break;
3748 case 0x12d:
d3eb5eae 3749 gen_helper_cvtpd2pi(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
3750 break;
3751 }
3752 break;
3753 case 0x22c: /* cvttss2si */
3754 case 0x32c: /* cvttsd2si */
3755 case 0x22d: /* cvtss2si */
3756 case 0x32d: /* cvtsd2si */
3757 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
31313213 3758 if (mod != 3) {
0af10c86 3759 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
31313213 3760 if ((b >> 8) & 1) {
8686c490 3761 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
31313213 3762 } else {
57fec1fe 3763 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
651ba608 3764 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
31313213
FB
3765 }
3766 op2_offset = offsetof(CPUX86State,xmm_t0);
3767 } else {
3768 rm = (modrm & 7) | REX_B(s);
3769 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3770 }
5af45186
FB
3771 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3772 if (ot == OT_LONG) {
d3eb5eae 3773 SSEFunc_i_ep sse_fn_i_ep =
bedc2ac1 3774 sse_op_table3bi[((b >> 7) & 2) | (b & 1)];
d3eb5eae 3775 sse_fn_i_ep(cpu_tmp2_i32, cpu_env, cpu_ptr0);
b6abf97d 3776 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5af45186 3777 } else {
11f8cdbc 3778#ifdef TARGET_X86_64
d3eb5eae 3779 SSEFunc_l_ep sse_fn_l_ep =
bedc2ac1 3780 sse_op_table3bq[((b >> 7) & 2) | (b & 1)];
d3eb5eae 3781 sse_fn_l_ep(cpu_T[0], cpu_env, cpu_ptr0);
11f8cdbc
SW
3782#else
3783 goto illegal_op;
3784#endif
5af45186 3785 }
57fec1fe 3786 gen_op_mov_reg_T0(ot, reg);
664e0f19
FB
3787 break;
3788 case 0xc4: /* pinsrw */
5fafdf24 3789 case 0x1c4:
d1e42c5c 3790 s->rip_offset = 1;
0af10c86
BS
3791 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
3792 val = cpu_ldub_code(env, s->pc++);
664e0f19
FB
3793 if (b1) {
3794 val &= 7;
5af45186
FB
3795 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3796 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
664e0f19
FB
3797 } else {
3798 val &= 3;
5af45186
FB
3799 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3800 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
664e0f19
FB
3801 }
3802 break;
3803 case 0xc5: /* pextrw */
5fafdf24 3804 case 0x1c5:
664e0f19
FB
3805 if (mod != 3)
3806 goto illegal_op;
6dc2d0da 3807 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
0af10c86 3808 val = cpu_ldub_code(env, s->pc++);
664e0f19
FB
3809 if (b1) {
3810 val &= 7;
3811 rm = (modrm & 7) | REX_B(s);
5af45186
FB
3812 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3813 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
664e0f19
FB
3814 } else {
3815 val &= 3;
3816 rm = (modrm & 7);
5af45186
FB
3817 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3818 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
664e0f19
FB
3819 }
3820 reg = ((modrm >> 3) & 7) | rex_r;
6dc2d0da 3821 gen_op_mov_reg_T0(ot, reg);
664e0f19
FB
3822 break;
3823 case 0x1d6: /* movq ea, xmm */
3824 if (mod != 3) {
0af10c86 3825 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8686c490 3826 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
664e0f19
FB
3827 } else {
3828 rm = (modrm & 7) | REX_B(s);
3829 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3830 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3831 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3832 }
3833 break;
3834 case 0x2d6: /* movq2dq */
d3eb5eae 3835 gen_helper_enter_mmx(cpu_env);
480c1cdb
FB
3836 rm = (modrm & 7);
3837 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3838 offsetof(CPUX86State,fpregs[rm].mmx));
3839 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
664e0f19
FB
3840 break;
3841 case 0x3d6: /* movdq2q */
d3eb5eae 3842 gen_helper_enter_mmx(cpu_env);
480c1cdb
FB
3843 rm = (modrm & 7) | REX_B(s);
3844 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3845 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
664e0f19
FB
3846 break;
3847 case 0xd7: /* pmovmskb */
3848 case 0x1d7:
3849 if (mod != 3)
3850 goto illegal_op;
3851 if (b1) {
3852 rm = (modrm & 7) | REX_B(s);
5af45186 3853 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
d3eb5eae 3854 gen_helper_pmovmskb_xmm(cpu_tmp2_i32, cpu_env, cpu_ptr0);
664e0f19
FB
3855 } else {
3856 rm = (modrm & 7);
5af45186 3857 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
d3eb5eae 3858 gen_helper_pmovmskb_mmx(cpu_tmp2_i32, cpu_env, cpu_ptr0);
664e0f19 3859 }
b6abf97d 3860 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
664e0f19 3861 reg = ((modrm >> 3) & 7) | rex_r;
57fec1fe 3862 gen_op_mov_reg_T0(OT_LONG, reg);
664e0f19 3863 break;
111994ee 3864
4242b1bd 3865 case 0x138:
000cacf6 3866 case 0x038:
4242b1bd 3867 b = modrm;
111994ee
RH
3868 if ((b & 0xf0) == 0xf0) {
3869 goto do_0f_38_fx;
3870 }
0af10c86 3871 modrm = cpu_ldub_code(env, s->pc++);
4242b1bd
AZ
3872 rm = modrm & 7;
3873 reg = ((modrm >> 3) & 7) | rex_r;
3874 mod = (modrm >> 6) & 3;
c045af25
AK
3875 if (b1 >= 2) {
3876 goto illegal_op;
3877 }
4242b1bd 3878
d3eb5eae
BS
3879 sse_fn_epp = sse_op_table6[b].op[b1];
3880 if (!sse_fn_epp) {
4242b1bd 3881 goto illegal_op;
c4baa050 3882 }
222a3336
AZ
3883 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
3884 goto illegal_op;
4242b1bd
AZ
3885
3886 if (b1) {
3887 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3888 if (mod == 3) {
3889 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3890 } else {
3891 op2_offset = offsetof(CPUX86State,xmm_t0);
0af10c86 3892 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
222a3336
AZ
3893 switch (b) {
3894 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
3895 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
3896 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
3897 gen_ldq_env_A0(s->mem_index, op2_offset +
3898 offsetof(XMMReg, XMM_Q(0)));
3899 break;
3900 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
3901 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
a7812ae4 3902 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
222a3336 3903 (s->mem_index >> 2) - 1);
a7812ae4 3904 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
222a3336
AZ
3905 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
3906 offsetof(XMMReg, XMM_L(0)));
3907 break;
3908 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
3909 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
3910 (s->mem_index >> 2) - 1);
3911 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
3912 offsetof(XMMReg, XMM_W(0)));
3913 break;
3914 case 0x2a: /* movntqda */
3915 gen_ldo_env_A0(s->mem_index, op1_offset);
3916 return;
3917 default:
3918 gen_ldo_env_A0(s->mem_index, op2_offset);
3919 }
4242b1bd
AZ
3920 }
3921 } else {
3922 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3923 if (mod == 3) {
3924 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3925 } else {
3926 op2_offset = offsetof(CPUX86State,mmx_t0);
0af10c86 3927 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
4242b1bd
AZ
3928 gen_ldq_env_A0(s->mem_index, op2_offset);
3929 }
3930 }
d3eb5eae 3931 if (sse_fn_epp == SSE_SPECIAL) {
222a3336 3932 goto illegal_op;
c4baa050 3933 }
222a3336 3934
4242b1bd
AZ
3935 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3936 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 3937 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
222a3336 3938
3ca51d07
RH
3939 if (b == 0x17) {
3940 set_cc_op(s, CC_OP_EFLAGS);
3941 }
4242b1bd 3942 break;
111994ee
RH
3943
3944 case 0x238:
3945 case 0x338:
3946 do_0f_38_fx:
3947 /* Various integer extensions at 0f 38 f[0-f]. */
3948 b = modrm | (b1 << 8);
0af10c86 3949 modrm = cpu_ldub_code(env, s->pc++);
222a3336
AZ
3950 reg = ((modrm >> 3) & 7) | rex_r;
3951
111994ee
RH
3952 switch (b) {
3953 case 0x3f0: /* crc32 Gd,Eb */
3954 case 0x3f1: /* crc32 Gd,Ey */
3955 do_crc32:
3956 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42)) {
3957 goto illegal_op;
3958 }
3959 if ((b & 0xff) == 0xf0) {
3960 ot = OT_BYTE;
3961 } else if (s->dflag != 2) {
3962 ot = (s->prefix & PREFIX_DATA ? OT_WORD : OT_LONG);
3963 } else {
3964 ot = OT_QUAD;
3965 }
4242b1bd 3966
111994ee
RH
3967 gen_op_mov_TN_reg(OT_LONG, 0, reg);
3968 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3969 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
3970 gen_helper_crc32(cpu_T[0], cpu_tmp2_i32,
3971 cpu_T[0], tcg_const_i32(8 << ot));
222a3336 3972
111994ee
RH
3973 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3974 gen_op_mov_reg_T0(ot, reg);
3975 break;
222a3336 3976
111994ee
RH
3977 case 0x1f0: /* crc32 or movbe */
3978 case 0x1f1:
3979 /* For these insns, the f3 prefix is supposed to have priority
3980 over the 66 prefix, but that's not what we implement above
3981 setting b1. */
3982 if (s->prefix & PREFIX_REPNZ) {
3983 goto do_crc32;
3984 }
3985 /* FALLTHRU */
3986 case 0x0f0: /* movbe Gy,My */
3987 case 0x0f1: /* movbe My,Gy */
3988 if (!(s->cpuid_ext_features & CPUID_EXT_MOVBE)) {
3989 goto illegal_op;
3990 }
3991 if (s->dflag != 2) {
3992 ot = (s->prefix & PREFIX_DATA ? OT_WORD : OT_LONG);
3993 } else {
3994 ot = OT_QUAD;
3995 }
3996
3997 /* Load the data incoming to the bswap. Note that the TCG
3998 implementation of bswap requires the input be zero
3999 extended. In the case of the loads, we simply know that
4000 gen_op_ld_v via gen_ldst_modrm does that already. */
4001 if ((b & 1) == 0) {
4002 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4003 } else {
4004 switch (ot) {
4005 case OT_WORD:
4006 tcg_gen_ext16u_tl(cpu_T[0], cpu_regs[reg]);
4007 break;
4008 default:
4009 tcg_gen_ext32u_tl(cpu_T[0], cpu_regs[reg]);
4010 break;
4011 case OT_QUAD:
4012 tcg_gen_mov_tl(cpu_T[0], cpu_regs[reg]);
4013 break;
4014 }
4015 }
4016
4017 switch (ot) {
4018 case OT_WORD:
4019 tcg_gen_bswap16_tl(cpu_T[0], cpu_T[0]);
4020 break;
4021 default:
4022 tcg_gen_bswap32_tl(cpu_T[0], cpu_T[0]);
4023 break;
4024#ifdef TARGET_X86_64
4025 case OT_QUAD:
4026 tcg_gen_bswap64_tl(cpu_T[0], cpu_T[0]);
4027 break;
4028#endif
4029 }
4030
4031 if ((b & 1) == 0) {
4032 gen_op_mov_reg_T0(ot, reg);
4033 } else {
4034 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
4035 }
4036 break;
4037
7073fbad
RH
4038 case 0x0f2: /* andn Gy, By, Ey */
4039 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI1)
4040 || !(s->prefix & PREFIX_VEX)
4041 || s->vex_l != 0) {
4042 goto illegal_op;
4043 }
4044 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4045 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4046 tcg_gen_andc_tl(cpu_T[0], cpu_regs[s->vex_v], cpu_T[0]);
4047 gen_op_mov_reg_T0(ot, reg);
4048 gen_op_update1_cc();
4049 set_cc_op(s, CC_OP_LOGICB + ot);
4050 break;
4051
c7ab7565
RH
4052 case 0x0f7: /* bextr Gy, Ey, By */
4053 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI1)
4054 || !(s->prefix & PREFIX_VEX)
4055 || s->vex_l != 0) {
4056 goto illegal_op;
4057 }
4058 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4059 {
4060 TCGv bound, zero;
4061
4062 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4063 /* Extract START, and shift the operand.
4064 Shifts larger than operand size get zeros. */
4065 tcg_gen_ext8u_tl(cpu_A0, cpu_regs[s->vex_v]);
4066 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_A0);
4067
4068 bound = tcg_const_tl(ot == OT_QUAD ? 63 : 31);
4069 zero = tcg_const_tl(0);
4070 tcg_gen_movcond_tl(TCG_COND_LEU, cpu_T[0], cpu_A0, bound,
4071 cpu_T[0], zero);
4072 tcg_temp_free(zero);
4073
4074 /* Extract the LEN into a mask. Lengths larger than
4075 operand size get all ones. */
4076 tcg_gen_shri_tl(cpu_A0, cpu_regs[s->vex_v], 8);
4077 tcg_gen_ext8u_tl(cpu_A0, cpu_A0);
4078 tcg_gen_movcond_tl(TCG_COND_LEU, cpu_A0, cpu_A0, bound,
4079 cpu_A0, bound);
4080 tcg_temp_free(bound);
4081 tcg_gen_movi_tl(cpu_T[1], 1);
4082 tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_A0);
4083 tcg_gen_subi_tl(cpu_T[1], cpu_T[1], 1);
4084 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4085
4086 gen_op_mov_reg_T0(ot, reg);
4087 gen_op_update1_cc();
4088 set_cc_op(s, CC_OP_LOGICB + ot);
4089 }
4090 break;
4091
02ea1e6b
RH
4092 case 0x0f5: /* bzhi Gy, Ey, By */
4093 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4094 || !(s->prefix & PREFIX_VEX)
4095 || s->vex_l != 0) {
4096 goto illegal_op;
4097 }
4098 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4099 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4100 tcg_gen_ext8u_tl(cpu_T[1], cpu_regs[s->vex_v]);
4101 {
4102 TCGv bound = tcg_const_tl(ot == OT_QUAD ? 63 : 31);
4103 /* Note that since we're using BMILG (in order to get O
4104 cleared) we need to store the inverse into C. */
4105 tcg_gen_setcond_tl(TCG_COND_LT, cpu_cc_src,
4106 cpu_T[1], bound);
4107 tcg_gen_movcond_tl(TCG_COND_GT, cpu_T[1], cpu_T[1],
4108 bound, bound, cpu_T[1]);
4109 tcg_temp_free(bound);
4110 }
4111 tcg_gen_movi_tl(cpu_A0, -1);
4112 tcg_gen_shl_tl(cpu_A0, cpu_A0, cpu_T[1]);
4113 tcg_gen_andc_tl(cpu_T[0], cpu_T[0], cpu_A0);
4114 gen_op_mov_reg_T0(ot, reg);
4115 gen_op_update1_cc();
4116 set_cc_op(s, CC_OP_BMILGB + ot);
4117 break;
4118
5f1f4b17
RH
4119 case 0x3f6: /* mulx By, Gy, rdx, Ey */
4120 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4121 || !(s->prefix & PREFIX_VEX)
4122 || s->vex_l != 0) {
4123 goto illegal_op;
4124 }
4125 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4126 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4127 switch (ot) {
5f1f4b17 4128 default:
a4bcea3d
RH
4129 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4130 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_regs[R_EDX]);
4131 tcg_gen_mulu2_i32(cpu_tmp2_i32, cpu_tmp3_i32,
4132 cpu_tmp2_i32, cpu_tmp3_i32);
4133 tcg_gen_extu_i32_tl(cpu_regs[s->vex_v], cpu_tmp2_i32);
4134 tcg_gen_extu_i32_tl(cpu_regs[reg], cpu_tmp3_i32);
5f1f4b17
RH
4135 break;
4136#ifdef TARGET_X86_64
4137 case OT_QUAD:
a4bcea3d
RH
4138 tcg_gen_mulu2_i64(cpu_regs[s->vex_v], cpu_regs[reg],
4139 cpu_T[0], cpu_regs[R_EDX]);
5f1f4b17
RH
4140 break;
4141#endif
4142 }
4143 break;
4144
0592f74a
RH
4145 case 0x3f5: /* pdep Gy, By, Ey */
4146 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4147 || !(s->prefix & PREFIX_VEX)
4148 || s->vex_l != 0) {
4149 goto illegal_op;
4150 }
4151 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4152 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4153 /* Note that by zero-extending the mask operand, we
4154 automatically handle zero-extending the result. */
4155 if (s->dflag == 2) {
4156 tcg_gen_mov_tl(cpu_T[1], cpu_regs[s->vex_v]);
4157 } else {
4158 tcg_gen_ext32u_tl(cpu_T[1], cpu_regs[s->vex_v]);
4159 }
4160 gen_helper_pdep(cpu_regs[reg], cpu_T[0], cpu_T[1]);
4161 break;
4162
4163 case 0x2f5: /* pext Gy, By, Ey */
4164 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4165 || !(s->prefix & PREFIX_VEX)
4166 || s->vex_l != 0) {
4167 goto illegal_op;
4168 }
4169 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4170 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4171 /* Note that by zero-extending the mask operand, we
4172 automatically handle zero-extending the result. */
4173 if (s->dflag == 2) {
4174 tcg_gen_mov_tl(cpu_T[1], cpu_regs[s->vex_v]);
4175 } else {
4176 tcg_gen_ext32u_tl(cpu_T[1], cpu_regs[s->vex_v]);
4177 }
4178 gen_helper_pext(cpu_regs[reg], cpu_T[0], cpu_T[1]);
4179 break;
4180
cd7f97ca
RH
4181 case 0x1f6: /* adcx Gy, Ey */
4182 case 0x2f6: /* adox Gy, Ey */
4183 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_ADX)) {
4184 goto illegal_op;
4185 } else {
76f13133 4186 TCGv carry_in, carry_out, zero;
cd7f97ca
RH
4187 int end_op;
4188
4189 ot = (s->dflag == 2 ? OT_QUAD : OT_LONG);
4190 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4191
4192 /* Re-use the carry-out from a previous round. */
4193 TCGV_UNUSED(carry_in);
4194 carry_out = (b == 0x1f6 ? cpu_cc_dst : cpu_cc_src2);
4195 switch (s->cc_op) {
4196 case CC_OP_ADCX:
4197 if (b == 0x1f6) {
4198 carry_in = cpu_cc_dst;
4199 end_op = CC_OP_ADCX;
4200 } else {
4201 end_op = CC_OP_ADCOX;
4202 }
4203 break;
4204 case CC_OP_ADOX:
4205 if (b == 0x1f6) {
4206 end_op = CC_OP_ADCOX;
4207 } else {
4208 carry_in = cpu_cc_src2;
4209 end_op = CC_OP_ADOX;
4210 }
4211 break;
4212 case CC_OP_ADCOX:
4213 end_op = CC_OP_ADCOX;
4214 carry_in = carry_out;
4215 break;
4216 default:
c53de1a2 4217 end_op = (b == 0x1f6 ? CC_OP_ADCX : CC_OP_ADOX);
cd7f97ca
RH
4218 break;
4219 }
4220 /* If we can't reuse carry-out, get it out of EFLAGS. */
4221 if (TCGV_IS_UNUSED(carry_in)) {
4222 if (s->cc_op != CC_OP_ADCX && s->cc_op != CC_OP_ADOX) {
4223 gen_compute_eflags(s);
4224 }
4225 carry_in = cpu_tmp0;
4226 tcg_gen_shri_tl(carry_in, cpu_cc_src,
4227 ctz32(b == 0x1f6 ? CC_C : CC_O));
4228 tcg_gen_andi_tl(carry_in, carry_in, 1);
4229 }
4230
4231 switch (ot) {
4232#ifdef TARGET_X86_64
4233 case OT_LONG:
4234 /* If we know TL is 64-bit, and we want a 32-bit
4235 result, just do everything in 64-bit arithmetic. */
4236 tcg_gen_ext32u_i64(cpu_regs[reg], cpu_regs[reg]);
4237 tcg_gen_ext32u_i64(cpu_T[0], cpu_T[0]);
4238 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_regs[reg]);
4239 tcg_gen_add_i64(cpu_T[0], cpu_T[0], carry_in);
4240 tcg_gen_ext32u_i64(cpu_regs[reg], cpu_T[0]);
4241 tcg_gen_shri_i64(carry_out, cpu_T[0], 32);
4242 break;
4243#endif
4244 default:
4245 /* Otherwise compute the carry-out in two steps. */
76f13133
RH
4246 zero = tcg_const_tl(0);
4247 tcg_gen_add2_tl(cpu_T[0], carry_out,
4248 cpu_T[0], zero,
4249 carry_in, zero);
4250 tcg_gen_add2_tl(cpu_regs[reg], carry_out,
4251 cpu_regs[reg], carry_out,
4252 cpu_T[0], zero);
4253 tcg_temp_free(zero);
cd7f97ca
RH
4254 break;
4255 }
cd7f97ca
RH
4256 set_cc_op(s, end_op);
4257 }
4258 break;
4259
4a554890
RH
4260 case 0x1f7: /* shlx Gy, Ey, By */
4261 case 0x2f7: /* sarx Gy, Ey, By */
4262 case 0x3f7: /* shrx Gy, Ey, By */
4263 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4264 || !(s->prefix & PREFIX_VEX)
4265 || s->vex_l != 0) {
4266 goto illegal_op;
4267 }
4268 ot = (s->dflag == 2 ? OT_QUAD : OT_LONG);
4269 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4270 if (ot == OT_QUAD) {
4271 tcg_gen_andi_tl(cpu_T[1], cpu_regs[s->vex_v], 63);
4272 } else {
4273 tcg_gen_andi_tl(cpu_T[1], cpu_regs[s->vex_v], 31);
4274 }
4275 if (b == 0x1f7) {
4276 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4277 } else if (b == 0x2f7) {
4278 if (ot != OT_QUAD) {
4279 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4280 }
4281 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4282 } else {
4283 if (ot != OT_QUAD) {
4284 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4285 }
4286 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4287 }
4288 gen_op_mov_reg_T0(ot, reg);
4289 break;
4290
bc4b43dc
RH
4291 case 0x0f3:
4292 case 0x1f3:
4293 case 0x2f3:
4294 case 0x3f3: /* Group 17 */
4295 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI1)
4296 || !(s->prefix & PREFIX_VEX)
4297 || s->vex_l != 0) {
4298 goto illegal_op;
4299 }
4300 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4301 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4302
4303 switch (reg & 7) {
4304 case 1: /* blsr By,Ey */
4305 tcg_gen_neg_tl(cpu_T[1], cpu_T[0]);
4306 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4307 gen_op_mov_reg_T0(ot, s->vex_v);
4308 gen_op_update2_cc();
4309 set_cc_op(s, CC_OP_BMILGB + ot);
4310 break;
4311
4312 case 2: /* blsmsk By,Ey */
4313 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4314 tcg_gen_subi_tl(cpu_T[0], cpu_T[0], 1);
4315 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_cc_src);
4316 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4317 set_cc_op(s, CC_OP_BMILGB + ot);
4318 break;
4319
4320 case 3: /* blsi By, Ey */
4321 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4322 tcg_gen_subi_tl(cpu_T[0], cpu_T[0], 1);
4323 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_cc_src);
4324 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4325 set_cc_op(s, CC_OP_BMILGB + ot);
4326 break;
4327
4328 default:
4329 goto illegal_op;
4330 }
4331 break;
4332
111994ee
RH
4333 default:
4334 goto illegal_op;
4335 }
222a3336 4336 break;
111994ee 4337
222a3336
AZ
4338 case 0x03a:
4339 case 0x13a:
4242b1bd 4340 b = modrm;
0af10c86 4341 modrm = cpu_ldub_code(env, s->pc++);
4242b1bd
AZ
4342 rm = modrm & 7;
4343 reg = ((modrm >> 3) & 7) | rex_r;
4344 mod = (modrm >> 6) & 3;
c045af25
AK
4345 if (b1 >= 2) {
4346 goto illegal_op;
4347 }
4242b1bd 4348
d3eb5eae
BS
4349 sse_fn_eppi = sse_op_table7[b].op[b1];
4350 if (!sse_fn_eppi) {
4242b1bd 4351 goto illegal_op;
c4baa050 4352 }
222a3336
AZ
4353 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4354 goto illegal_op;
4355
d3eb5eae 4356 if (sse_fn_eppi == SSE_SPECIAL) {
222a3336
AZ
4357 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4358 rm = (modrm & 7) | REX_B(s);
4359 if (mod != 3)
0af10c86 4360 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
222a3336 4361 reg = ((modrm >> 3) & 7) | rex_r;
0af10c86 4362 val = cpu_ldub_code(env, s->pc++);
222a3336
AZ
4363 switch (b) {
4364 case 0x14: /* pextrb */
4365 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4366 xmm_regs[reg].XMM_B(val & 15)));
4367 if (mod == 3)
4368 gen_op_mov_reg_T0(ot, rm);
4369 else
4370 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4371 (s->mem_index >> 2) - 1);
4372 break;
4373 case 0x15: /* pextrw */
4374 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4375 xmm_regs[reg].XMM_W(val & 7)));
4376 if (mod == 3)
4377 gen_op_mov_reg_T0(ot, rm);
4378 else
4379 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4380 (s->mem_index >> 2) - 1);
4381 break;
4382 case 0x16:
4383 if (ot == OT_LONG) { /* pextrd */
4384 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4385 offsetof(CPUX86State,
4386 xmm_regs[reg].XMM_L(val & 3)));
a7812ae4 4387 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
222a3336 4388 if (mod == 3)
a7812ae4 4389 gen_op_mov_reg_v(ot, rm, cpu_T[0]);
222a3336 4390 else
a7812ae4 4391 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
222a3336
AZ
4392 (s->mem_index >> 2) - 1);
4393 } else { /* pextrq */
a7812ae4 4394#ifdef TARGET_X86_64
222a3336
AZ
4395 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4396 offsetof(CPUX86State,
4397 xmm_regs[reg].XMM_Q(val & 1)));
4398 if (mod == 3)
4399 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4400 else
4401 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4402 (s->mem_index >> 2) - 1);
a7812ae4
PB
4403#else
4404 goto illegal_op;
4405#endif
222a3336
AZ
4406 }
4407 break;
4408 case 0x17: /* extractps */
4409 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4410 xmm_regs[reg].XMM_L(val & 3)));
4411 if (mod == 3)
4412 gen_op_mov_reg_T0(ot, rm);
4413 else
4414 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4415 (s->mem_index >> 2) - 1);
4416 break;
4417 case 0x20: /* pinsrb */
4418 if (mod == 3)
4419 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4420 else
34c6addd 4421 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
222a3336 4422 (s->mem_index >> 2) - 1);
34c6addd 4423 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
222a3336
AZ
4424 xmm_regs[reg].XMM_B(val & 15)));
4425 break;
4426 case 0x21: /* insertps */
a7812ae4 4427 if (mod == 3) {
222a3336
AZ
4428 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4429 offsetof(CPUX86State,xmm_regs[rm]
4430 .XMM_L((val >> 6) & 3)));
a7812ae4
PB
4431 } else {
4432 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
222a3336 4433 (s->mem_index >> 2) - 1);
a7812ae4
PB
4434 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
4435 }
222a3336
AZ
4436 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4437 offsetof(CPUX86State,xmm_regs[reg]
4438 .XMM_L((val >> 4) & 3)));
4439 if ((val >> 0) & 1)
4440 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4441 cpu_env, offsetof(CPUX86State,
4442 xmm_regs[reg].XMM_L(0)));
4443 if ((val >> 1) & 1)
4444 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4445 cpu_env, offsetof(CPUX86State,
4446 xmm_regs[reg].XMM_L(1)));
4447 if ((val >> 2) & 1)
4448 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4449 cpu_env, offsetof(CPUX86State,
4450 xmm_regs[reg].XMM_L(2)));
4451 if ((val >> 3) & 1)
4452 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4453 cpu_env, offsetof(CPUX86State,
4454 xmm_regs[reg].XMM_L(3)));
4455 break;
4456 case 0x22:
4457 if (ot == OT_LONG) { /* pinsrd */
4458 if (mod == 3)
a7812ae4 4459 gen_op_mov_v_reg(ot, cpu_tmp0, rm);
222a3336 4460 else
a7812ae4 4461 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
222a3336 4462 (s->mem_index >> 2) - 1);
a7812ae4 4463 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
222a3336
AZ
4464 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4465 offsetof(CPUX86State,
4466 xmm_regs[reg].XMM_L(val & 3)));
4467 } else { /* pinsrq */
a7812ae4 4468#ifdef TARGET_X86_64
222a3336
AZ
4469 if (mod == 3)
4470 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4471 else
4472 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4473 (s->mem_index >> 2) - 1);
4474 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4475 offsetof(CPUX86State,
4476 xmm_regs[reg].XMM_Q(val & 1)));
a7812ae4
PB
4477#else
4478 goto illegal_op;
4479#endif
222a3336
AZ
4480 }
4481 break;
4482 }
4483 return;
4484 }
4242b1bd
AZ
4485
4486 if (b1) {
4487 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4488 if (mod == 3) {
4489 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4490 } else {
4491 op2_offset = offsetof(CPUX86State,xmm_t0);
0af10c86 4492 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
4242b1bd
AZ
4493 gen_ldo_env_A0(s->mem_index, op2_offset);
4494 }
4495 } else {
4496 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4497 if (mod == 3) {
4498 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4499 } else {
4500 op2_offset = offsetof(CPUX86State,mmx_t0);
0af10c86 4501 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
4242b1bd
AZ
4502 gen_ldq_env_A0(s->mem_index, op2_offset);
4503 }
4504 }
0af10c86 4505 val = cpu_ldub_code(env, s->pc++);
4242b1bd 4506
222a3336 4507 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
3ca51d07 4508 set_cc_op(s, CC_OP_EFLAGS);
222a3336
AZ
4509
4510 if (s->dflag == 2)
4511 /* The helper must use entire 64-bit gp registers */
4512 val |= 1 << 8;
4513 }
4514
4242b1bd
AZ
4515 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4516 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 4517 sse_fn_eppi(cpu_env, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4242b1bd 4518 break;
e2c3c2c5
RH
4519
4520 case 0x33a:
4521 /* Various integer extensions at 0f 3a f[0-f]. */
4522 b = modrm | (b1 << 8);
4523 modrm = cpu_ldub_code(env, s->pc++);
4524 reg = ((modrm >> 3) & 7) | rex_r;
4525
4526 switch (b) {
4527 case 0x3f0: /* rorx Gy,Ey, Ib */
4528 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI2)
4529 || !(s->prefix & PREFIX_VEX)
4530 || s->vex_l != 0) {
4531 goto illegal_op;
4532 }
4533 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
4534 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
4535 b = cpu_ldub_code(env, s->pc++);
4536 if (ot == OT_QUAD) {
4537 tcg_gen_rotri_tl(cpu_T[0], cpu_T[0], b & 63);
4538 } else {
4539 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4540 tcg_gen_rotri_i32(cpu_tmp2_i32, cpu_tmp2_i32, b & 31);
4541 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4542 }
4543 gen_op_mov_reg_T0(ot, reg);
4544 break;
4545
4546 default:
4547 goto illegal_op;
4548 }
4549 break;
4550
664e0f19
FB
4551 default:
4552 goto illegal_op;
4553 }
4554 } else {
4555 /* generic MMX or SSE operation */
d1e42c5c 4556 switch(b) {
d1e42c5c
FB
4557 case 0x70: /* pshufx insn */
4558 case 0xc6: /* pshufx insn */
4559 case 0xc2: /* compare insns */
4560 s->rip_offset = 1;
4561 break;
4562 default:
4563 break;
664e0f19
FB
4564 }
4565 if (is_xmm) {
4566 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4567 if (mod != 3) {
0af10c86 4568 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 4569 op2_offset = offsetof(CPUX86State,xmm_t0);
480c1cdb 4570 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
664e0f19
FB
4571 b == 0xc2)) {
4572 /* specific case for SSE single instructions */
4573 if (b1 == 2) {
4574 /* 32 bit access */
57fec1fe 4575 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
651ba608 4576 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
664e0f19
FB
4577 } else {
4578 /* 64 bit access */
8686c490 4579 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
664e0f19
FB
4580 }
4581 } else {
8686c490 4582 gen_ldo_env_A0(s->mem_index, op2_offset);
664e0f19
FB
4583 }
4584 } else {
4585 rm = (modrm & 7) | REX_B(s);
4586 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4587 }
4588 } else {
4589 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4590 if (mod != 3) {
0af10c86 4591 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 4592 op2_offset = offsetof(CPUX86State,mmx_t0);
8686c490 4593 gen_ldq_env_A0(s->mem_index, op2_offset);
664e0f19
FB
4594 } else {
4595 rm = (modrm & 7);
4596 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4597 }
4598 }
4599 switch(b) {
a35f3ec7 4600 case 0x0f: /* 3DNow! data insns */
e771edab
AJ
4601 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4602 goto illegal_op;
0af10c86 4603 val = cpu_ldub_code(env, s->pc++);
d3eb5eae
BS
4604 sse_fn_epp = sse_op_table5[val];
4605 if (!sse_fn_epp) {
a35f3ec7 4606 goto illegal_op;
c4baa050 4607 }
5af45186
FB
4608 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4609 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 4610 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
a35f3ec7 4611 break;
664e0f19
FB
4612 case 0x70: /* pshufx insn */
4613 case 0xc6: /* pshufx insn */
0af10c86 4614 val = cpu_ldub_code(env, s->pc++);
5af45186
FB
4615 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4616 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
c4baa050 4617 /* XXX: introduce a new table? */
d3eb5eae 4618 sse_fn_ppi = (SSEFunc_0_ppi)sse_fn_epp;
c4baa050 4619 sse_fn_ppi(cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
664e0f19
FB
4620 break;
4621 case 0xc2:
4622 /* compare insns */
0af10c86 4623 val = cpu_ldub_code(env, s->pc++);
664e0f19
FB
4624 if (val >= 8)
4625 goto illegal_op;
d3eb5eae 4626 sse_fn_epp = sse_op_table4[val][b1];
c4baa050 4627
5af45186
FB
4628 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4629 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 4630 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19 4631 break;
b8b6a50b
FB
4632 case 0xf7:
4633 /* maskmov : we must prepare A0 */
4634 if (mod != 3)
4635 goto illegal_op;
4636#ifdef TARGET_X86_64
4637 if (s->aflag == 2) {
4638 gen_op_movq_A0_reg(R_EDI);
4639 } else
4640#endif
4641 {
4642 gen_op_movl_A0_reg(R_EDI);
4643 if (s->aflag == 0)
4644 gen_op_andl_A0_ffff();
4645 }
4646 gen_add_A0_ds_seg(s);
4647
4648 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4649 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
c4baa050 4650 /* XXX: introduce a new table? */
d3eb5eae
BS
4651 sse_fn_eppt = (SSEFunc_0_eppt)sse_fn_epp;
4652 sse_fn_eppt(cpu_env, cpu_ptr0, cpu_ptr1, cpu_A0);
b8b6a50b 4653 break;
664e0f19 4654 default:
5af45186
FB
4655 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4656 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
d3eb5eae 4657 sse_fn_epp(cpu_env, cpu_ptr0, cpu_ptr1);
664e0f19
FB
4658 break;
4659 }
4660 if (b == 0x2e || b == 0x2f) {
3ca51d07 4661 set_cc_op(s, CC_OP_EFLAGS);
664e0f19
FB
4662 }
4663 }
4664}
4665
2c0262af
FB
4666/* convert one instruction. s->is_jmp is set if the translation must
4667 be stopped. Return the next pc value */
0af10c86
BS
4668static target_ulong disas_insn(CPUX86State *env, DisasContext *s,
4669 target_ulong pc_start)
2c0262af
FB
4670{
4671 int b, prefixes, aflag, dflag;
4672 int shift, ot;
4673 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
14ce26e7
FB
4674 target_ulong next_eip, tval;
4675 int rex_w, rex_r;
2c0262af 4676
fdefe51c 4677 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
70cff25e 4678 tcg_gen_debug_insn_start(pc_start);
fdefe51c 4679 }
2c0262af
FB
4680 s->pc = pc_start;
4681 prefixes = 0;
2c0262af 4682 s->override = -1;
14ce26e7
FB
4683 rex_w = -1;
4684 rex_r = 0;
4685#ifdef TARGET_X86_64
4686 s->rex_x = 0;
4687 s->rex_b = 0;
5fafdf24 4688 x86_64_hregs = 0;
14ce26e7
FB
4689#endif
4690 s->rip_offset = 0; /* for relative ip address */
701ed211
RH
4691 s->vex_l = 0;
4692 s->vex_v = 0;
2c0262af 4693 next_byte:
0af10c86 4694 b = cpu_ldub_code(env, s->pc);
2c0262af 4695 s->pc++;
4a6fd938
RH
4696 /* Collect prefixes. */
4697 switch (b) {
4698 case 0xf3:
4699 prefixes |= PREFIX_REPZ;
4700 goto next_byte;
4701 case 0xf2:
4702 prefixes |= PREFIX_REPNZ;
4703 goto next_byte;
4704 case 0xf0:
4705 prefixes |= PREFIX_LOCK;
4706 goto next_byte;
4707 case 0x2e:
4708 s->override = R_CS;
4709 goto next_byte;
4710 case 0x36:
4711 s->override = R_SS;
4712 goto next_byte;
4713 case 0x3e:
4714 s->override = R_DS;
4715 goto next_byte;
4716 case 0x26:
4717 s->override = R_ES;
4718 goto next_byte;
4719 case 0x64:
4720 s->override = R_FS;
4721 goto next_byte;
4722 case 0x65:
4723 s->override = R_GS;
4724 goto next_byte;
4725 case 0x66:
4726 prefixes |= PREFIX_DATA;
4727 goto next_byte;
4728 case 0x67:
4729 prefixes |= PREFIX_ADR;
4730 goto next_byte;
14ce26e7 4731#ifdef TARGET_X86_64
4a6fd938
RH
4732 case 0x40 ... 0x4f:
4733 if (CODE64(s)) {
14ce26e7
FB
4734 /* REX prefix */
4735 rex_w = (b >> 3) & 1;
4736 rex_r = (b & 0x4) << 1;
4737 s->rex_x = (b & 0x2) << 2;
4738 REX_B(s) = (b & 0x1) << 3;
4739 x86_64_hregs = 1; /* select uniform byte register addressing */
4740 goto next_byte;
4741 }
4a6fd938
RH
4742 break;
4743#endif
701ed211
RH
4744 case 0xc5: /* 2-byte VEX */
4745 case 0xc4: /* 3-byte VEX */
4746 /* VEX prefixes cannot be used except in 32-bit mode.
4747 Otherwise the instruction is LES or LDS. */
4748 if (s->code32 && !s->vm86) {
4749 static const int pp_prefix[4] = {
4750 0, PREFIX_DATA, PREFIX_REPZ, PREFIX_REPNZ
4751 };
4752 int vex3, vex2 = cpu_ldub_code(env, s->pc);
4753
4754 if (!CODE64(s) && (vex2 & 0xc0) != 0xc0) {
4755 /* 4.1.4.6: In 32-bit mode, bits [7:6] must be 11b,
4756 otherwise the instruction is LES or LDS. */
4757 break;
4758 }
4759 s->pc++;
4760
085d8134 4761 /* 4.1.1-4.1.3: No preceding lock, 66, f2, f3, or rex prefixes. */
701ed211
RH
4762 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ
4763 | PREFIX_LOCK | PREFIX_DATA)) {
4764 goto illegal_op;
4765 }
4766#ifdef TARGET_X86_64
4767 if (x86_64_hregs) {
4768 goto illegal_op;
4769 }
4770#endif
4771 rex_r = (~vex2 >> 4) & 8;
4772 if (b == 0xc5) {
4773 vex3 = vex2;
4774 b = cpu_ldub_code(env, s->pc++);
4775 } else {
4776#ifdef TARGET_X86_64
4777 s->rex_x = (~vex2 >> 3) & 8;
4778 s->rex_b = (~vex2 >> 2) & 8;
4779#endif
4780 vex3 = cpu_ldub_code(env, s->pc++);
4781 rex_w = (vex3 >> 7) & 1;
4782 switch (vex2 & 0x1f) {
4783 case 0x01: /* Implied 0f leading opcode bytes. */
4784 b = cpu_ldub_code(env, s->pc++) | 0x100;
4785 break;
4786 case 0x02: /* Implied 0f 38 leading opcode bytes. */
4787 b = 0x138;
4788 break;
4789 case 0x03: /* Implied 0f 3a leading opcode bytes. */
4790 b = 0x13a;
4791 break;
4792 default: /* Reserved for future use. */
4793 goto illegal_op;
4794 }
4795 }
4796 s->vex_v = (~vex3 >> 3) & 0xf;
4797 s->vex_l = (vex3 >> 2) & 1;
4798 prefixes |= pp_prefix[vex3 & 3] | PREFIX_VEX;
4799 }
4800 break;
4a6fd938
RH
4801 }
4802
4803 /* Post-process prefixes. */
4a6fd938 4804 if (CODE64(s)) {
dec3fc96
RH
4805 /* In 64-bit mode, the default data size is 32-bit. Select 64-bit
4806 data with rex_w, and 16-bit data with 0x66; rex_w takes precedence
4807 over 0x66 if both are present. */
4808 dflag = (rex_w > 0 ? 2 : prefixes & PREFIX_DATA ? 0 : 1);
4809 /* In 64-bit mode, 0x67 selects 32-bit addressing. */
4810 aflag = (prefixes & PREFIX_ADR ? 1 : 2);
4811 } else {
4812 /* In 16/32-bit mode, 0x66 selects the opposite data size. */
4813 dflag = s->code32;
4814 if (prefixes & PREFIX_DATA) {
4815 dflag ^= 1;
14ce26e7 4816 }
dec3fc96
RH
4817 /* In 16/32-bit mode, 0x67 selects the opposite addressing. */
4818 aflag = s->code32;
4819 if (prefixes & PREFIX_ADR) {
4820 aflag ^= 1;
14ce26e7 4821 }
2c0262af
FB
4822 }
4823
2c0262af
FB
4824 s->prefix = prefixes;
4825 s->aflag = aflag;
4826 s->dflag = dflag;
4827
4828 /* lock generation */
4829 if (prefixes & PREFIX_LOCK)
a7812ae4 4830 gen_helper_lock();
2c0262af
FB
4831
4832 /* now check op code */
4833 reswitch:
4834 switch(b) {
4835 case 0x0f:
4836 /**************************/
4837 /* extended op code */
0af10c86 4838 b = cpu_ldub_code(env, s->pc++) | 0x100;
2c0262af 4839 goto reswitch;
3b46e624 4840
2c0262af
FB
4841 /**************************/
4842 /* arith & logic */
4843 case 0x00 ... 0x05:
4844 case 0x08 ... 0x0d:
4845 case 0x10 ... 0x15:
4846 case 0x18 ... 0x1d:
4847 case 0x20 ... 0x25:
4848 case 0x28 ... 0x2d:
4849 case 0x30 ... 0x35:
4850 case 0x38 ... 0x3d:
4851 {
4852 int op, f, val;
4853 op = (b >> 3) & 7;
4854 f = (b >> 1) & 3;
4855
4856 if ((b & 1) == 0)
4857 ot = OT_BYTE;
4858 else
14ce26e7 4859 ot = dflag + OT_WORD;
3b46e624 4860
2c0262af
FB
4861 switch(f) {
4862 case 0: /* OP Ev, Gv */
0af10c86 4863 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 4864 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af 4865 mod = (modrm >> 6) & 3;
14ce26e7 4866 rm = (modrm & 7) | REX_B(s);
2c0262af 4867 if (mod != 3) {
0af10c86 4868 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
4869 opreg = OR_TMP0;
4870 } else if (op == OP_XORL && rm == reg) {
4871 xor_zero:
4872 /* xor reg, reg optimisation */
436ff2d2 4873 set_cc_op(s, CC_OP_CLR);
2c0262af 4874 gen_op_movl_T0_0();
57fec1fe 4875 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
4876 break;
4877 } else {
4878 opreg = rm;
4879 }
57fec1fe 4880 gen_op_mov_TN_reg(ot, 1, reg);
2c0262af
FB
4881 gen_op(s, op, ot, opreg);
4882 break;
4883 case 1: /* OP Gv, Ev */
0af10c86 4884 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 4885 mod = (modrm >> 6) & 3;
14ce26e7
FB
4886 reg = ((modrm >> 3) & 7) | rex_r;
4887 rm = (modrm & 7) | REX_B(s);
2c0262af 4888 if (mod != 3) {
0af10c86 4889 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 4890 gen_op_ld_T1_A0(ot + s->mem_index);
2c0262af
FB
4891 } else if (op == OP_XORL && rm == reg) {
4892 goto xor_zero;
4893 } else {
57fec1fe 4894 gen_op_mov_TN_reg(ot, 1, rm);
2c0262af
FB
4895 }
4896 gen_op(s, op, ot, reg);
4897 break;
4898 case 2: /* OP A, Iv */
0af10c86 4899 val = insn_get(env, s, ot);
2c0262af
FB
4900 gen_op_movl_T1_im(val);
4901 gen_op(s, op, ot, OR_EAX);
4902 break;
4903 }
4904 }
4905 break;
4906
ec9d6075
FB
4907 case 0x82:
4908 if (CODE64(s))
4909 goto illegal_op;
2c0262af
FB
4910 case 0x80: /* GRP1 */
4911 case 0x81:
4912 case 0x83:
4913 {
4914 int val;
4915
4916 if ((b & 1) == 0)
4917 ot = OT_BYTE;
4918 else
14ce26e7 4919 ot = dflag + OT_WORD;
3b46e624 4920
0af10c86 4921 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 4922 mod = (modrm >> 6) & 3;
14ce26e7 4923 rm = (modrm & 7) | REX_B(s);
2c0262af 4924 op = (modrm >> 3) & 7;
3b46e624 4925
2c0262af 4926 if (mod != 3) {
14ce26e7
FB
4927 if (b == 0x83)
4928 s->rip_offset = 1;
4929 else
4930 s->rip_offset = insn_const_size(ot);
0af10c86 4931 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
4932 opreg = OR_TMP0;
4933 } else {
14ce26e7 4934 opreg = rm;
2c0262af
FB
4935 }
4936
4937 switch(b) {
4938 default:
4939 case 0x80:
4940 case 0x81:
d64477af 4941 case 0x82:
0af10c86 4942 val = insn_get(env, s, ot);
2c0262af
FB
4943 break;
4944 case 0x83:
0af10c86 4945 val = (int8_t)insn_get(env, s, OT_BYTE);
2c0262af
FB
4946 break;
4947 }
4948 gen_op_movl_T1_im(val);
4949 gen_op(s, op, ot, opreg);
4950 }
4951 break;
4952
4953 /**************************/
4954 /* inc, dec, and other misc arith */
4955 case 0x40 ... 0x47: /* inc Gv */
4956 ot = dflag ? OT_LONG : OT_WORD;
4957 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4958 break;
4959 case 0x48 ... 0x4f: /* dec Gv */
4960 ot = dflag ? OT_LONG : OT_WORD;
4961 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4962 break;
4963 case 0xf6: /* GRP3 */
4964 case 0xf7:
4965 if ((b & 1) == 0)
4966 ot = OT_BYTE;
4967 else
14ce26e7 4968 ot = dflag + OT_WORD;
2c0262af 4969
0af10c86 4970 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 4971 mod = (modrm >> 6) & 3;
14ce26e7 4972 rm = (modrm & 7) | REX_B(s);
2c0262af
FB
4973 op = (modrm >> 3) & 7;
4974 if (mod != 3) {
14ce26e7
FB
4975 if (op == 0)
4976 s->rip_offset = insn_const_size(ot);
0af10c86 4977 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 4978 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 4979 } else {
57fec1fe 4980 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af
FB
4981 }
4982
4983 switch(op) {
4984 case 0: /* test */
0af10c86 4985 val = insn_get(env, s, ot);
2c0262af
FB
4986 gen_op_movl_T1_im(val);
4987 gen_op_testl_T0_T1_cc();
3ca51d07 4988 set_cc_op(s, CC_OP_LOGICB + ot);
2c0262af
FB
4989 break;
4990 case 2: /* not */
b6abf97d 4991 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
2c0262af 4992 if (mod != 3) {
57fec1fe 4993 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 4994 } else {
57fec1fe 4995 gen_op_mov_reg_T0(ot, rm);
2c0262af
FB
4996 }
4997 break;
4998 case 3: /* neg */
b6abf97d 4999 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
2c0262af 5000 if (mod != 3) {
57fec1fe 5001 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 5002 } else {
57fec1fe 5003 gen_op_mov_reg_T0(ot, rm);
2c0262af
FB
5004 }
5005 gen_op_update_neg_cc();
3ca51d07 5006 set_cc_op(s, CC_OP_SUBB + ot);
2c0262af
FB
5007 break;
5008 case 4: /* mul */
5009 switch(ot) {
5010 case OT_BYTE:
0211e5af
FB
5011 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5012 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5013 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
5014 /* XXX: use 32 bit mul which could be faster */
5015 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5016 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5017 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5018 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
3ca51d07 5019 set_cc_op(s, CC_OP_MULB);
2c0262af
FB
5020 break;
5021 case OT_WORD:
0211e5af
FB
5022 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5023 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5024 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
5025 /* XXX: use 32 bit mul which could be faster */
5026 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5027 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5028 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5029 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5030 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5031 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3ca51d07 5032 set_cc_op(s, CC_OP_MULW);
2c0262af
FB
5033 break;
5034 default:
5035 case OT_LONG:
a4bcea3d
RH
5036 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5037 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_regs[R_EAX]);
5038 tcg_gen_mulu2_i32(cpu_tmp2_i32, cpu_tmp3_i32,
5039 cpu_tmp2_i32, cpu_tmp3_i32);
5040 tcg_gen_extu_i32_tl(cpu_regs[R_EAX], cpu_tmp2_i32);
5041 tcg_gen_extu_i32_tl(cpu_regs[R_EDX], cpu_tmp3_i32);
5042 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[R_EAX]);
5043 tcg_gen_mov_tl(cpu_cc_src, cpu_regs[R_EDX]);
3ca51d07 5044 set_cc_op(s, CC_OP_MULL);
2c0262af 5045 break;
14ce26e7
FB
5046#ifdef TARGET_X86_64
5047 case OT_QUAD:
a4bcea3d
RH
5048 tcg_gen_mulu2_i64(cpu_regs[R_EAX], cpu_regs[R_EDX],
5049 cpu_T[0], cpu_regs[R_EAX]);
5050 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[R_EAX]);
5051 tcg_gen_mov_tl(cpu_cc_src, cpu_regs[R_EDX]);
3ca51d07 5052 set_cc_op(s, CC_OP_MULQ);
14ce26e7
FB
5053 break;
5054#endif
2c0262af 5055 }
2c0262af
FB
5056 break;
5057 case 5: /* imul */
5058 switch(ot) {
5059 case OT_BYTE:
0211e5af
FB
5060 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5061 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5062 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5063 /* XXX: use 32 bit mul which could be faster */
5064 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5065 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5066 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5067 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5068 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3ca51d07 5069 set_cc_op(s, CC_OP_MULB);
2c0262af
FB
5070 break;
5071 case OT_WORD:
0211e5af
FB
5072 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5073 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5074 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5075 /* XXX: use 32 bit mul which could be faster */
5076 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5077 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5078 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5079 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5080 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5081 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5082 gen_op_mov_reg_T0(OT_WORD, R_EDX);
3ca51d07 5083 set_cc_op(s, CC_OP_MULW);
2c0262af
FB
5084 break;
5085 default:
5086 case OT_LONG:
a4bcea3d
RH
5087 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5088 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_regs[R_EAX]);
5089 tcg_gen_muls2_i32(cpu_tmp2_i32, cpu_tmp3_i32,
5090 cpu_tmp2_i32, cpu_tmp3_i32);
5091 tcg_gen_extu_i32_tl(cpu_regs[R_EAX], cpu_tmp2_i32);
5092 tcg_gen_extu_i32_tl(cpu_regs[R_EDX], cpu_tmp3_i32);
5093 tcg_gen_sari_i32(cpu_tmp2_i32, cpu_tmp2_i32, 31);
5094 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[R_EAX]);
5095 tcg_gen_sub_i32(cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
5096 tcg_gen_extu_i32_tl(cpu_cc_src, cpu_tmp2_i32);
3ca51d07 5097 set_cc_op(s, CC_OP_MULL);
2c0262af 5098 break;
14ce26e7
FB
5099#ifdef TARGET_X86_64
5100 case OT_QUAD:
a4bcea3d
RH
5101 tcg_gen_muls2_i64(cpu_regs[R_EAX], cpu_regs[R_EDX],
5102 cpu_T[0], cpu_regs[R_EAX]);
5103 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[R_EAX]);
5104 tcg_gen_sari_tl(cpu_cc_src, cpu_regs[R_EAX], 63);
5105 tcg_gen_sub_tl(cpu_cc_src, cpu_cc_src, cpu_regs[R_EDX]);
3ca51d07 5106 set_cc_op(s, CC_OP_MULQ);
14ce26e7
FB
5107 break;
5108#endif
2c0262af 5109 }
2c0262af
FB
5110 break;
5111 case 6: /* div */
5112 switch(ot) {
5113 case OT_BYTE:
14ce26e7 5114 gen_jmp_im(pc_start - s->cs_base);
7923057b 5115 gen_helper_divb_AL(cpu_env, cpu_T[0]);
2c0262af
FB
5116 break;
5117 case OT_WORD:
14ce26e7 5118 gen_jmp_im(pc_start - s->cs_base);
7923057b 5119 gen_helper_divw_AX(cpu_env, cpu_T[0]);
2c0262af
FB
5120 break;
5121 default:
5122 case OT_LONG:
14ce26e7 5123 gen_jmp_im(pc_start - s->cs_base);
7923057b 5124 gen_helper_divl_EAX(cpu_env, cpu_T[0]);
14ce26e7
FB
5125 break;
5126#ifdef TARGET_X86_64
5127 case OT_QUAD:
5128 gen_jmp_im(pc_start - s->cs_base);
7923057b 5129 gen_helper_divq_EAX(cpu_env, cpu_T[0]);
2c0262af 5130 break;
14ce26e7 5131#endif
2c0262af
FB
5132 }
5133 break;
5134 case 7: /* idiv */
5135 switch(ot) {
5136 case OT_BYTE:
14ce26e7 5137 gen_jmp_im(pc_start - s->cs_base);
7923057b 5138 gen_helper_idivb_AL(cpu_env, cpu_T[0]);
2c0262af
FB
5139 break;
5140 case OT_WORD:
14ce26e7 5141 gen_jmp_im(pc_start - s->cs_base);
7923057b 5142 gen_helper_idivw_AX(cpu_env, cpu_T[0]);
2c0262af
FB
5143 break;
5144 default:
5145 case OT_LONG:
14ce26e7 5146 gen_jmp_im(pc_start - s->cs_base);
7923057b 5147 gen_helper_idivl_EAX(cpu_env, cpu_T[0]);
14ce26e7
FB
5148 break;
5149#ifdef TARGET_X86_64
5150 case OT_QUAD:
5151 gen_jmp_im(pc_start - s->cs_base);
7923057b 5152 gen_helper_idivq_EAX(cpu_env, cpu_T[0]);
2c0262af 5153 break;
14ce26e7 5154#endif
2c0262af
FB
5155 }
5156 break;
5157 default:
5158 goto illegal_op;
5159 }
5160 break;
5161
5162 case 0xfe: /* GRP4 */
5163 case 0xff: /* GRP5 */
5164 if ((b & 1) == 0)
5165 ot = OT_BYTE;
5166 else
14ce26e7 5167 ot = dflag + OT_WORD;
2c0262af 5168
0af10c86 5169 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 5170 mod = (modrm >> 6) & 3;
14ce26e7 5171 rm = (modrm & 7) | REX_B(s);
2c0262af
FB
5172 op = (modrm >> 3) & 7;
5173 if (op >= 2 && b == 0xfe) {
5174 goto illegal_op;
5175 }
14ce26e7 5176 if (CODE64(s)) {
aba9d61e 5177 if (op == 2 || op == 4) {
14ce26e7
FB
5178 /* operand size for jumps is 64 bit */
5179 ot = OT_QUAD;
aba9d61e 5180 } else if (op == 3 || op == 5) {
41b1e61f 5181 ot = dflag ? OT_LONG + (rex_w == 1) : OT_WORD;
14ce26e7
FB
5182 } else if (op == 6) {
5183 /* default push size is 64 bit */
5184 ot = dflag ? OT_QUAD : OT_WORD;
5185 }
5186 }
2c0262af 5187 if (mod != 3) {
0af10c86 5188 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af 5189 if (op >= 2 && op != 3 && op != 5)
57fec1fe 5190 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 5191 } else {
57fec1fe 5192 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af
FB
5193 }
5194
5195 switch(op) {
5196 case 0: /* inc Ev */
5197 if (mod != 3)
5198 opreg = OR_TMP0;
5199 else
5200 opreg = rm;
5201 gen_inc(s, ot, opreg, 1);
5202 break;
5203 case 1: /* dec Ev */
5204 if (mod != 3)
5205 opreg = OR_TMP0;
5206 else
5207 opreg = rm;
5208 gen_inc(s, ot, opreg, -1);
5209 break;
5210 case 2: /* call Ev */
4f31916f 5211 /* XXX: optimize if memory (no 'and' is necessary) */
2c0262af
FB
5212 if (s->dflag == 0)
5213 gen_op_andl_T0_ffff();
2c0262af 5214 next_eip = s->pc - s->cs_base;
1ef38687 5215 gen_movtl_T1_im(next_eip);
4f31916f
FB
5216 gen_push_T1(s);
5217 gen_op_jmp_T0();
2c0262af
FB
5218 gen_eob(s);
5219 break;
61382a50 5220 case 3: /* lcall Ev */
57fec1fe 5221 gen_op_ld_T1_A0(ot + s->mem_index);
aba9d61e 5222 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
57fec1fe 5223 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
5224 do_lcall:
5225 if (s->pe && !s->vm86) {
773cdfcc 5226 gen_update_cc_op(s);
14ce26e7 5227 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 5228 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2
BS
5229 gen_helper_lcall_protected(cpu_env, cpu_tmp2_i32, cpu_T[1],
5230 tcg_const_i32(dflag),
a7812ae4 5231 tcg_const_i32(s->pc - pc_start));
2c0262af 5232 } else {
b6abf97d 5233 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2
BS
5234 gen_helper_lcall_real(cpu_env, cpu_tmp2_i32, cpu_T[1],
5235 tcg_const_i32(dflag),
a7812ae4 5236 tcg_const_i32(s->pc - s->cs_base));
2c0262af
FB
5237 }
5238 gen_eob(s);
5239 break;
5240 case 4: /* jmp Ev */
5241 if (s->dflag == 0)
5242 gen_op_andl_T0_ffff();
5243 gen_op_jmp_T0();
5244 gen_eob(s);
5245 break;
5246 case 5: /* ljmp Ev */
57fec1fe 5247 gen_op_ld_T1_A0(ot + s->mem_index);
aba9d61e 5248 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
57fec1fe 5249 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
5250 do_ljmp:
5251 if (s->pe && !s->vm86) {
773cdfcc 5252 gen_update_cc_op(s);
14ce26e7 5253 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 5254 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2 5255 gen_helper_ljmp_protected(cpu_env, cpu_tmp2_i32, cpu_T[1],
a7812ae4 5256 tcg_const_i32(s->pc - pc_start));
2c0262af 5257 } else {
3bd7da9e 5258 gen_op_movl_seg_T0_vm(R_CS);
2c0262af
FB
5259 gen_op_movl_T0_T1();
5260 gen_op_jmp_T0();
5261 }
5262 gen_eob(s);
5263 break;
5264 case 6: /* push Ev */
5265 gen_push_T0(s);
5266 break;
5267 default:
5268 goto illegal_op;
5269 }
5270 break;
5271
5272 case 0x84: /* test Ev, Gv */
5fafdf24 5273 case 0x85:
2c0262af
FB
5274 if ((b & 1) == 0)
5275 ot = OT_BYTE;
5276 else
14ce26e7 5277 ot = dflag + OT_WORD;
2c0262af 5278
0af10c86 5279 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5280 reg = ((modrm >> 3) & 7) | rex_r;
3b46e624 5281
0af10c86 5282 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
57fec1fe 5283 gen_op_mov_TN_reg(ot, 1, reg);
2c0262af 5284 gen_op_testl_T0_T1_cc();
3ca51d07 5285 set_cc_op(s, CC_OP_LOGICB + ot);
2c0262af 5286 break;
3b46e624 5287
2c0262af
FB
5288 case 0xa8: /* test eAX, Iv */
5289 case 0xa9:
5290 if ((b & 1) == 0)
5291 ot = OT_BYTE;
5292 else
14ce26e7 5293 ot = dflag + OT_WORD;
0af10c86 5294 val = insn_get(env, s, ot);
2c0262af 5295
57fec1fe 5296 gen_op_mov_TN_reg(ot, 0, OR_EAX);
2c0262af
FB
5297 gen_op_movl_T1_im(val);
5298 gen_op_testl_T0_T1_cc();
3ca51d07 5299 set_cc_op(s, CC_OP_LOGICB + ot);
2c0262af 5300 break;
3b46e624 5301
2c0262af 5302 case 0x98: /* CWDE/CBW */
14ce26e7
FB
5303#ifdef TARGET_X86_64
5304 if (dflag == 2) {
e108dd01
FB
5305 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5306 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5307 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
14ce26e7
FB
5308 } else
5309#endif
e108dd01
FB
5310 if (dflag == 1) {
5311 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5312 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5313 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5314 } else {
5315 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5316 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5317 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5318 }
2c0262af
FB
5319 break;
5320 case 0x99: /* CDQ/CWD */
14ce26e7
FB
5321#ifdef TARGET_X86_64
5322 if (dflag == 2) {
e108dd01
FB
5323 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5324 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5325 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
14ce26e7
FB
5326 } else
5327#endif
e108dd01
FB
5328 if (dflag == 1) {
5329 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5330 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5331 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5332 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5333 } else {
5334 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5335 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5336 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5337 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5338 }
2c0262af
FB
5339 break;
5340 case 0x1af: /* imul Gv, Ev */
5341 case 0x69: /* imul Gv, Ev, I */
5342 case 0x6b:
14ce26e7 5343 ot = dflag + OT_WORD;
0af10c86 5344 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7
FB
5345 reg = ((modrm >> 3) & 7) | rex_r;
5346 if (b == 0x69)
5347 s->rip_offset = insn_const_size(ot);
5348 else if (b == 0x6b)
5349 s->rip_offset = 1;
0af10c86 5350 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
2c0262af 5351 if (b == 0x69) {
0af10c86 5352 val = insn_get(env, s, ot);
2c0262af
FB
5353 gen_op_movl_T1_im(val);
5354 } else if (b == 0x6b) {
0af10c86 5355 val = (int8_t)insn_get(env, s, OT_BYTE);
2c0262af
FB
5356 gen_op_movl_T1_im(val);
5357 } else {
57fec1fe 5358 gen_op_mov_TN_reg(ot, 1, reg);
2c0262af 5359 }
a4bcea3d 5360 switch (ot) {
0211e5af 5361#ifdef TARGET_X86_64
a4bcea3d
RH
5362 case OT_QUAD:
5363 tcg_gen_muls2_i64(cpu_regs[reg], cpu_T[1], cpu_T[0], cpu_T[1]);
5364 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[reg]);
5365 tcg_gen_sari_tl(cpu_cc_src, cpu_cc_dst, 63);
5366 tcg_gen_sub_tl(cpu_cc_src, cpu_cc_src, cpu_T[1]);
5367 break;
0211e5af 5368#endif
a4bcea3d
RH
5369 case OT_LONG:
5370 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5371 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5372 tcg_gen_muls2_i32(cpu_tmp2_i32, cpu_tmp3_i32,
5373 cpu_tmp2_i32, cpu_tmp3_i32);
5374 tcg_gen_extu_i32_tl(cpu_regs[reg], cpu_tmp2_i32);
5375 tcg_gen_sari_i32(cpu_tmp2_i32, cpu_tmp2_i32, 31);
5376 tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[reg]);
5377 tcg_gen_sub_i32(cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
5378 tcg_gen_extu_i32_tl(cpu_cc_src, cpu_tmp2_i32);
5379 break;
5380 default:
0211e5af
FB
5381 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5382 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5383 /* XXX: use 32 bit mul which could be faster */
5384 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5385 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5386 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5387 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
a4bcea3d
RH
5388 gen_op_mov_reg_T0(ot, reg);
5389 break;
2c0262af 5390 }
3ca51d07 5391 set_cc_op(s, CC_OP_MULB + ot);
2c0262af
FB
5392 break;
5393 case 0x1c0:
5394 case 0x1c1: /* xadd Ev, Gv */
5395 if ((b & 1) == 0)
5396 ot = OT_BYTE;
5397 else
14ce26e7 5398 ot = dflag + OT_WORD;
0af10c86 5399 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5400 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af
FB
5401 mod = (modrm >> 6) & 3;
5402 if (mod == 3) {
14ce26e7 5403 rm = (modrm & 7) | REX_B(s);
57fec1fe
FB
5404 gen_op_mov_TN_reg(ot, 0, reg);
5405 gen_op_mov_TN_reg(ot, 1, rm);
2c0262af 5406 gen_op_addl_T0_T1();
57fec1fe
FB
5407 gen_op_mov_reg_T1(ot, reg);
5408 gen_op_mov_reg_T0(ot, rm);
2c0262af 5409 } else {
0af10c86 5410 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe
FB
5411 gen_op_mov_TN_reg(ot, 0, reg);
5412 gen_op_ld_T1_A0(ot + s->mem_index);
2c0262af 5413 gen_op_addl_T0_T1();
57fec1fe
FB
5414 gen_op_st_T0_A0(ot + s->mem_index);
5415 gen_op_mov_reg_T1(ot, reg);
2c0262af
FB
5416 }
5417 gen_op_update2_cc();
3ca51d07 5418 set_cc_op(s, CC_OP_ADDB + ot);
2c0262af
FB
5419 break;
5420 case 0x1b0:
5421 case 0x1b1: /* cmpxchg Ev, Gv */
cad3a37d 5422 {
1130328e 5423 int label1, label2;
1e4840bf 5424 TCGv t0, t1, t2, a0;
cad3a37d
FB
5425
5426 if ((b & 1) == 0)
5427 ot = OT_BYTE;
5428 else
5429 ot = dflag + OT_WORD;
0af10c86 5430 modrm = cpu_ldub_code(env, s->pc++);
cad3a37d
FB
5431 reg = ((modrm >> 3) & 7) | rex_r;
5432 mod = (modrm >> 6) & 3;
a7812ae4
PB
5433 t0 = tcg_temp_local_new();
5434 t1 = tcg_temp_local_new();
5435 t2 = tcg_temp_local_new();
5436 a0 = tcg_temp_local_new();
1e4840bf 5437 gen_op_mov_v_reg(ot, t1, reg);
cad3a37d
FB
5438 if (mod == 3) {
5439 rm = (modrm & 7) | REX_B(s);
1e4840bf 5440 gen_op_mov_v_reg(ot, t0, rm);
cad3a37d 5441 } else {
0af10c86 5442 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
1e4840bf
FB
5443 tcg_gen_mov_tl(a0, cpu_A0);
5444 gen_op_ld_v(ot + s->mem_index, t0, a0);
cad3a37d
FB
5445 rm = 0; /* avoid warning */
5446 }
5447 label1 = gen_new_label();
a3251186
RH
5448 tcg_gen_mov_tl(t2, cpu_regs[R_EAX]);
5449 gen_extu(ot, t0);
1e4840bf 5450 gen_extu(ot, t2);
a3251186 5451 tcg_gen_brcond_tl(TCG_COND_EQ, t2, t0, label1);
f7e80adf 5452 label2 = gen_new_label();
cad3a37d 5453 if (mod == 3) {
1e4840bf 5454 gen_op_mov_reg_v(ot, R_EAX, t0);
1130328e
FB
5455 tcg_gen_br(label2);
5456 gen_set_label(label1);
1e4840bf 5457 gen_op_mov_reg_v(ot, rm, t1);
cad3a37d 5458 } else {
f7e80adf
AG
5459 /* perform no-op store cycle like physical cpu; must be
5460 before changing accumulator to ensure idempotency if
5461 the store faults and the instruction is restarted */
5462 gen_op_st_v(ot + s->mem_index, t0, a0);
1e4840bf 5463 gen_op_mov_reg_v(ot, R_EAX, t0);
f7e80adf 5464 tcg_gen_br(label2);
1130328e 5465 gen_set_label(label1);
1e4840bf 5466 gen_op_st_v(ot + s->mem_index, t1, a0);
cad3a37d 5467 }
f7e80adf 5468 gen_set_label(label2);
1e4840bf 5469 tcg_gen_mov_tl(cpu_cc_src, t0);
a3251186
RH
5470 tcg_gen_mov_tl(cpu_cc_srcT, t2);
5471 tcg_gen_sub_tl(cpu_cc_dst, t2, t0);
3ca51d07 5472 set_cc_op(s, CC_OP_SUBB + ot);
1e4840bf
FB
5473 tcg_temp_free(t0);
5474 tcg_temp_free(t1);
5475 tcg_temp_free(t2);
5476 tcg_temp_free(a0);
2c0262af 5477 }
2c0262af
FB
5478 break;
5479 case 0x1c7: /* cmpxchg8b */
0af10c86 5480 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 5481 mod = (modrm >> 6) & 3;
71c3558e 5482 if ((mod == 3) || ((modrm & 0x38) != 0x8))
2c0262af 5483 goto illegal_op;
1b9d9ebb
FB
5484#ifdef TARGET_X86_64
5485 if (dflag == 2) {
5486 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5487 goto illegal_op;
5488 gen_jmp_im(pc_start - s->cs_base);
773cdfcc 5489 gen_update_cc_op(s);
0af10c86 5490 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
92fc4b58 5491 gen_helper_cmpxchg16b(cpu_env, cpu_A0);
1b9d9ebb
FB
5492 } else
5493#endif
5494 {
5495 if (!(s->cpuid_features & CPUID_CX8))
5496 goto illegal_op;
5497 gen_jmp_im(pc_start - s->cs_base);
773cdfcc 5498 gen_update_cc_op(s);
0af10c86 5499 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
92fc4b58 5500 gen_helper_cmpxchg8b(cpu_env, cpu_A0);
1b9d9ebb 5501 }
3ca51d07 5502 set_cc_op(s, CC_OP_EFLAGS);
2c0262af 5503 break;
3b46e624 5504
2c0262af
FB
5505 /**************************/
5506 /* push/pop */
5507 case 0x50 ... 0x57: /* push */
57fec1fe 5508 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
2c0262af
FB
5509 gen_push_T0(s);
5510 break;
5511 case 0x58 ... 0x5f: /* pop */
14ce26e7
FB
5512 if (CODE64(s)) {
5513 ot = dflag ? OT_QUAD : OT_WORD;
5514 } else {
5515 ot = dflag + OT_WORD;
5516 }
2c0262af 5517 gen_pop_T0(s);
77729c24 5518 /* NOTE: order is important for pop %sp */
2c0262af 5519 gen_pop_update(s);
57fec1fe 5520 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
2c0262af
FB
5521 break;
5522 case 0x60: /* pusha */
14ce26e7
FB
5523 if (CODE64(s))
5524 goto illegal_op;
2c0262af
FB
5525 gen_pusha(s);
5526 break;
5527 case 0x61: /* popa */
14ce26e7
FB
5528 if (CODE64(s))
5529 goto illegal_op;
2c0262af
FB
5530 gen_popa(s);
5531 break;
5532 case 0x68: /* push Iv */
5533 case 0x6a:
14ce26e7
FB
5534 if (CODE64(s)) {
5535 ot = dflag ? OT_QUAD : OT_WORD;
5536 } else {
5537 ot = dflag + OT_WORD;
5538 }
2c0262af 5539 if (b == 0x68)
0af10c86 5540 val = insn_get(env, s, ot);
2c0262af 5541 else
0af10c86 5542 val = (int8_t)insn_get(env, s, OT_BYTE);
2c0262af
FB
5543 gen_op_movl_T0_im(val);
5544 gen_push_T0(s);
5545 break;
5546 case 0x8f: /* pop Ev */
14ce26e7
FB
5547 if (CODE64(s)) {
5548 ot = dflag ? OT_QUAD : OT_WORD;
5549 } else {
5550 ot = dflag + OT_WORD;
5551 }
0af10c86 5552 modrm = cpu_ldub_code(env, s->pc++);
77729c24 5553 mod = (modrm >> 6) & 3;
2c0262af 5554 gen_pop_T0(s);
77729c24
FB
5555 if (mod == 3) {
5556 /* NOTE: order is important for pop %sp */
5557 gen_pop_update(s);
14ce26e7 5558 rm = (modrm & 7) | REX_B(s);
57fec1fe 5559 gen_op_mov_reg_T0(ot, rm);
77729c24
FB
5560 } else {
5561 /* NOTE: order is important too for MMU exceptions */
14ce26e7 5562 s->popl_esp_hack = 1 << ot;
0af10c86 5563 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
77729c24
FB
5564 s->popl_esp_hack = 0;
5565 gen_pop_update(s);
5566 }
2c0262af
FB
5567 break;
5568 case 0xc8: /* enter */
5569 {
5570 int level;
0af10c86 5571 val = cpu_lduw_code(env, s->pc);
2c0262af 5572 s->pc += 2;
0af10c86 5573 level = cpu_ldub_code(env, s->pc++);
2c0262af
FB
5574 gen_enter(s, val, level);
5575 }
5576 break;
5577 case 0xc9: /* leave */
5578 /* XXX: exception not precise (ESP is updated before potential exception) */
14ce26e7 5579 if (CODE64(s)) {
57fec1fe
FB
5580 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5581 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
14ce26e7 5582 } else if (s->ss32) {
57fec1fe
FB
5583 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5584 gen_op_mov_reg_T0(OT_LONG, R_ESP);
2c0262af 5585 } else {
57fec1fe
FB
5586 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5587 gen_op_mov_reg_T0(OT_WORD, R_ESP);
2c0262af
FB
5588 }
5589 gen_pop_T0(s);
14ce26e7
FB
5590 if (CODE64(s)) {
5591 ot = dflag ? OT_QUAD : OT_WORD;
5592 } else {
5593 ot = dflag + OT_WORD;
5594 }
57fec1fe 5595 gen_op_mov_reg_T0(ot, R_EBP);
2c0262af
FB
5596 gen_pop_update(s);
5597 break;
5598 case 0x06: /* push es */
5599 case 0x0e: /* push cs */
5600 case 0x16: /* push ss */
5601 case 0x1e: /* push ds */
14ce26e7
FB
5602 if (CODE64(s))
5603 goto illegal_op;
2c0262af
FB
5604 gen_op_movl_T0_seg(b >> 3);
5605 gen_push_T0(s);
5606 break;
5607 case 0x1a0: /* push fs */
5608 case 0x1a8: /* push gs */
5609 gen_op_movl_T0_seg((b >> 3) & 7);
5610 gen_push_T0(s);
5611 break;
5612 case 0x07: /* pop es */
5613 case 0x17: /* pop ss */
5614 case 0x1f: /* pop ds */
14ce26e7
FB
5615 if (CODE64(s))
5616 goto illegal_op;
2c0262af
FB
5617 reg = b >> 3;
5618 gen_pop_T0(s);
5619 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5620 gen_pop_update(s);
5621 if (reg == R_SS) {
a2cc3b24
FB
5622 /* if reg == SS, inhibit interrupts/trace. */
5623 /* If several instructions disable interrupts, only the
5624 _first_ does it */
5625 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
f0967a1a 5626 gen_helper_set_inhibit_irq(cpu_env);
2c0262af
FB
5627 s->tf = 0;
5628 }
5629 if (s->is_jmp) {
14ce26e7 5630 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
5631 gen_eob(s);
5632 }
5633 break;
5634 case 0x1a1: /* pop fs */
5635 case 0x1a9: /* pop gs */
5636 gen_pop_T0(s);
5637 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5638 gen_pop_update(s);
5639 if (s->is_jmp) {
14ce26e7 5640 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
5641 gen_eob(s);
5642 }
5643 break;
5644
5645 /**************************/
5646 /* mov */
5647 case 0x88:
5648 case 0x89: /* mov Gv, Ev */
5649 if ((b & 1) == 0)
5650 ot = OT_BYTE;
5651 else
14ce26e7 5652 ot = dflag + OT_WORD;
0af10c86 5653 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5654 reg = ((modrm >> 3) & 7) | rex_r;
3b46e624 5655
2c0262af 5656 /* generate a generic store */
0af10c86 5657 gen_ldst_modrm(env, s, modrm, ot, reg, 1);
2c0262af
FB
5658 break;
5659 case 0xc6:
5660 case 0xc7: /* mov Ev, Iv */
5661 if ((b & 1) == 0)
5662 ot = OT_BYTE;
5663 else
14ce26e7 5664 ot = dflag + OT_WORD;
0af10c86 5665 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 5666 mod = (modrm >> 6) & 3;
14ce26e7
FB
5667 if (mod != 3) {
5668 s->rip_offset = insn_const_size(ot);
0af10c86 5669 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
14ce26e7 5670 }
0af10c86 5671 val = insn_get(env, s, ot);
2c0262af
FB
5672 gen_op_movl_T0_im(val);
5673 if (mod != 3)
57fec1fe 5674 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 5675 else
57fec1fe 5676 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
2c0262af
FB
5677 break;
5678 case 0x8a:
5679 case 0x8b: /* mov Ev, Gv */
5680 if ((b & 1) == 0)
5681 ot = OT_BYTE;
5682 else
14ce26e7 5683 ot = OT_WORD + dflag;
0af10c86 5684 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5685 reg = ((modrm >> 3) & 7) | rex_r;
3b46e624 5686
0af10c86 5687 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
57fec1fe 5688 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
5689 break;
5690 case 0x8e: /* mov seg, Gv */
0af10c86 5691 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
5692 reg = (modrm >> 3) & 7;
5693 if (reg >= 6 || reg == R_CS)
5694 goto illegal_op;
0af10c86 5695 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
2c0262af
FB
5696 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5697 if (reg == R_SS) {
5698 /* if reg == SS, inhibit interrupts/trace */
a2cc3b24
FB
5699 /* If several instructions disable interrupts, only the
5700 _first_ does it */
5701 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
f0967a1a 5702 gen_helper_set_inhibit_irq(cpu_env);
2c0262af
FB
5703 s->tf = 0;
5704 }
5705 if (s->is_jmp) {
14ce26e7 5706 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
5707 gen_eob(s);
5708 }
5709 break;
5710 case 0x8c: /* mov Gv, seg */
0af10c86 5711 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
5712 reg = (modrm >> 3) & 7;
5713 mod = (modrm >> 6) & 3;
5714 if (reg >= 6)
5715 goto illegal_op;
5716 gen_op_movl_T0_seg(reg);
14ce26e7
FB
5717 if (mod == 3)
5718 ot = OT_WORD + dflag;
5719 else
5720 ot = OT_WORD;
0af10c86 5721 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
2c0262af
FB
5722 break;
5723
5724 case 0x1b6: /* movzbS Gv, Eb */
5725 case 0x1b7: /* movzwS Gv, Eb */
5726 case 0x1be: /* movsbS Gv, Eb */
5727 case 0x1bf: /* movswS Gv, Eb */
5728 {
5729 int d_ot;
5730 /* d_ot is the size of destination */
5731 d_ot = dflag + OT_WORD;
5732 /* ot is the size of source */
5733 ot = (b & 1) + OT_BYTE;
0af10c86 5734 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5735 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af 5736 mod = (modrm >> 6) & 3;
14ce26e7 5737 rm = (modrm & 7) | REX_B(s);
3b46e624 5738
2c0262af 5739 if (mod == 3) {
57fec1fe 5740 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af
FB
5741 switch(ot | (b & 8)) {
5742 case OT_BYTE:
e108dd01 5743 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
2c0262af
FB
5744 break;
5745 case OT_BYTE | 8:
e108dd01 5746 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
2c0262af
FB
5747 break;
5748 case OT_WORD:
e108dd01 5749 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
2c0262af
FB
5750 break;
5751 default:
5752 case OT_WORD | 8:
e108dd01 5753 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
2c0262af
FB
5754 break;
5755 }
57fec1fe 5756 gen_op_mov_reg_T0(d_ot, reg);
2c0262af 5757 } else {
0af10c86 5758 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af 5759 if (b & 8) {
57fec1fe 5760 gen_op_lds_T0_A0(ot + s->mem_index);
2c0262af 5761 } else {
57fec1fe 5762 gen_op_ldu_T0_A0(ot + s->mem_index);
2c0262af 5763 }
57fec1fe 5764 gen_op_mov_reg_T0(d_ot, reg);
2c0262af
FB
5765 }
5766 }
5767 break;
5768
5769 case 0x8d: /* lea */
14ce26e7 5770 ot = dflag + OT_WORD;
0af10c86 5771 modrm = cpu_ldub_code(env, s->pc++);
3a1d9b8b
FB
5772 mod = (modrm >> 6) & 3;
5773 if (mod == 3)
5774 goto illegal_op;
14ce26e7 5775 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af
FB
5776 /* we must ensure that no segment is added */
5777 s->override = -1;
5778 val = s->addseg;
5779 s->addseg = 0;
0af10c86 5780 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af 5781 s->addseg = val;
57fec1fe 5782 gen_op_mov_reg_A0(ot - OT_WORD, reg);
2c0262af 5783 break;
3b46e624 5784
2c0262af
FB
5785 case 0xa0: /* mov EAX, Ov */
5786 case 0xa1:
5787 case 0xa2: /* mov Ov, EAX */
5788 case 0xa3:
2c0262af 5789 {
14ce26e7
FB
5790 target_ulong offset_addr;
5791
5792 if ((b & 1) == 0)
5793 ot = OT_BYTE;
5794 else
5795 ot = dflag + OT_WORD;
5796#ifdef TARGET_X86_64
8f091a59 5797 if (s->aflag == 2) {
0af10c86 5798 offset_addr = cpu_ldq_code(env, s->pc);
14ce26e7 5799 s->pc += 8;
57fec1fe 5800 gen_op_movq_A0_im(offset_addr);
5fafdf24 5801 } else
14ce26e7
FB
5802#endif
5803 {
5804 if (s->aflag) {
0af10c86 5805 offset_addr = insn_get(env, s, OT_LONG);
14ce26e7 5806 } else {
0af10c86 5807 offset_addr = insn_get(env, s, OT_WORD);
14ce26e7
FB
5808 }
5809 gen_op_movl_A0_im(offset_addr);
5810 }
664e0f19 5811 gen_add_A0_ds_seg(s);
14ce26e7 5812 if ((b & 2) == 0) {
57fec1fe
FB
5813 gen_op_ld_T0_A0(ot + s->mem_index);
5814 gen_op_mov_reg_T0(ot, R_EAX);
14ce26e7 5815 } else {
57fec1fe
FB
5816 gen_op_mov_TN_reg(ot, 0, R_EAX);
5817 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af
FB
5818 }
5819 }
2c0262af
FB
5820 break;
5821 case 0xd7: /* xlat */
14ce26e7 5822#ifdef TARGET_X86_64
8f091a59 5823 if (s->aflag == 2) {
57fec1fe 5824 gen_op_movq_A0_reg(R_EBX);
bbf662ee
FB
5825 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5826 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5827 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5fafdf24 5828 } else
14ce26e7
FB
5829#endif
5830 {
57fec1fe 5831 gen_op_movl_A0_reg(R_EBX);
bbf662ee
FB
5832 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5833 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5834 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
14ce26e7
FB
5835 if (s->aflag == 0)
5836 gen_op_andl_A0_ffff();
bbf662ee
FB
5837 else
5838 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
14ce26e7 5839 }
664e0f19 5840 gen_add_A0_ds_seg(s);
57fec1fe
FB
5841 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5842 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
2c0262af
FB
5843 break;
5844 case 0xb0 ... 0xb7: /* mov R, Ib */
0af10c86 5845 val = insn_get(env, s, OT_BYTE);
2c0262af 5846 gen_op_movl_T0_im(val);
57fec1fe 5847 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
2c0262af
FB
5848 break;
5849 case 0xb8 ... 0xbf: /* mov R, Iv */
14ce26e7
FB
5850#ifdef TARGET_X86_64
5851 if (dflag == 2) {
5852 uint64_t tmp;
5853 /* 64 bit case */
0af10c86 5854 tmp = cpu_ldq_code(env, s->pc);
14ce26e7
FB
5855 s->pc += 8;
5856 reg = (b & 7) | REX_B(s);
5857 gen_movtl_T0_im(tmp);
57fec1fe 5858 gen_op_mov_reg_T0(OT_QUAD, reg);
5fafdf24 5859 } else
14ce26e7
FB
5860#endif
5861 {
5862 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 5863 val = insn_get(env, s, ot);
14ce26e7
FB
5864 reg = (b & 7) | REX_B(s);
5865 gen_op_movl_T0_im(val);
57fec1fe 5866 gen_op_mov_reg_T0(ot, reg);
14ce26e7 5867 }
2c0262af
FB
5868 break;
5869
5870 case 0x91 ... 0x97: /* xchg R, EAX */
7418027e 5871 do_xchg_reg_eax:
14ce26e7
FB
5872 ot = dflag + OT_WORD;
5873 reg = (b & 7) | REX_B(s);
2c0262af
FB
5874 rm = R_EAX;
5875 goto do_xchg_reg;
5876 case 0x86:
5877 case 0x87: /* xchg Ev, Gv */
5878 if ((b & 1) == 0)
5879 ot = OT_BYTE;
5880 else
14ce26e7 5881 ot = dflag + OT_WORD;
0af10c86 5882 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5883 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af
FB
5884 mod = (modrm >> 6) & 3;
5885 if (mod == 3) {
14ce26e7 5886 rm = (modrm & 7) | REX_B(s);
2c0262af 5887 do_xchg_reg:
57fec1fe
FB
5888 gen_op_mov_TN_reg(ot, 0, reg);
5889 gen_op_mov_TN_reg(ot, 1, rm);
5890 gen_op_mov_reg_T0(ot, rm);
5891 gen_op_mov_reg_T1(ot, reg);
2c0262af 5892 } else {
0af10c86 5893 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 5894 gen_op_mov_TN_reg(ot, 0, reg);
2c0262af
FB
5895 /* for xchg, lock is implicit */
5896 if (!(prefixes & PREFIX_LOCK))
a7812ae4 5897 gen_helper_lock();
57fec1fe
FB
5898 gen_op_ld_T1_A0(ot + s->mem_index);
5899 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 5900 if (!(prefixes & PREFIX_LOCK))
a7812ae4 5901 gen_helper_unlock();
57fec1fe 5902 gen_op_mov_reg_T1(ot, reg);
2c0262af
FB
5903 }
5904 break;
5905 case 0xc4: /* les Gv */
701ed211 5906 /* In CODE64 this is VEX3; see above. */
2c0262af
FB
5907 op = R_ES;
5908 goto do_lxx;
5909 case 0xc5: /* lds Gv */
701ed211 5910 /* In CODE64 this is VEX2; see above. */
2c0262af
FB
5911 op = R_DS;
5912 goto do_lxx;
5913 case 0x1b2: /* lss Gv */
5914 op = R_SS;
5915 goto do_lxx;
5916 case 0x1b4: /* lfs Gv */
5917 op = R_FS;
5918 goto do_lxx;
5919 case 0x1b5: /* lgs Gv */
5920 op = R_GS;
5921 do_lxx:
5922 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 5923 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 5924 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af
FB
5925 mod = (modrm >> 6) & 3;
5926 if (mod == 3)
5927 goto illegal_op;
0af10c86 5928 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 5929 gen_op_ld_T1_A0(ot + s->mem_index);
aba9d61e 5930 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
2c0262af 5931 /* load the segment first to handle exceptions properly */
57fec1fe 5932 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
5933 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5934 /* then put the data */
57fec1fe 5935 gen_op_mov_reg_T1(ot, reg);
2c0262af 5936 if (s->is_jmp) {
14ce26e7 5937 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
5938 gen_eob(s);
5939 }
5940 break;
3b46e624 5941
2c0262af
FB
5942 /************************/
5943 /* shifts */
5944 case 0xc0:
5945 case 0xc1:
5946 /* shift Ev,Ib */
5947 shift = 2;
5948 grp2:
5949 {
5950 if ((b & 1) == 0)
5951 ot = OT_BYTE;
5952 else
14ce26e7 5953 ot = dflag + OT_WORD;
3b46e624 5954
0af10c86 5955 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 5956 mod = (modrm >> 6) & 3;
2c0262af 5957 op = (modrm >> 3) & 7;
3b46e624 5958
2c0262af 5959 if (mod != 3) {
14ce26e7
FB
5960 if (shift == 2) {
5961 s->rip_offset = 1;
5962 }
0af10c86 5963 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
5964 opreg = OR_TMP0;
5965 } else {
14ce26e7 5966 opreg = (modrm & 7) | REX_B(s);
2c0262af
FB
5967 }
5968
5969 /* simpler op */
5970 if (shift == 0) {
5971 gen_shift(s, op, ot, opreg, OR_ECX);
5972 } else {
5973 if (shift == 2) {
0af10c86 5974 shift = cpu_ldub_code(env, s->pc++);
2c0262af
FB
5975 }
5976 gen_shifti(s, op, ot, opreg, shift);
5977 }
5978 }
5979 break;
5980 case 0xd0:
5981 case 0xd1:
5982 /* shift Ev,1 */
5983 shift = 1;
5984 goto grp2;
5985 case 0xd2:
5986 case 0xd3:
5987 /* shift Ev,cl */
5988 shift = 0;
5989 goto grp2;
5990
5991 case 0x1a4: /* shld imm */
5992 op = 0;
5993 shift = 1;
5994 goto do_shiftd;
5995 case 0x1a5: /* shld cl */
5996 op = 0;
5997 shift = 0;
5998 goto do_shiftd;
5999 case 0x1ac: /* shrd imm */
6000 op = 1;
6001 shift = 1;
6002 goto do_shiftd;
6003 case 0x1ad: /* shrd cl */
6004 op = 1;
6005 shift = 0;
6006 do_shiftd:
14ce26e7 6007 ot = dflag + OT_WORD;
0af10c86 6008 modrm = cpu_ldub_code(env, s->pc++);
2c0262af 6009 mod = (modrm >> 6) & 3;
14ce26e7
FB
6010 rm = (modrm & 7) | REX_B(s);
6011 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af 6012 if (mod != 3) {
0af10c86 6013 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
b6abf97d 6014 opreg = OR_TMP0;
2c0262af 6015 } else {
b6abf97d 6016 opreg = rm;
2c0262af 6017 }
57fec1fe 6018 gen_op_mov_TN_reg(ot, 1, reg);
3b46e624 6019
2c0262af 6020 if (shift) {
3b9d3cf1
PB
6021 TCGv imm = tcg_const_tl(cpu_ldub_code(env, s->pc++));
6022 gen_shiftd_rm_T1(s, ot, opreg, op, imm);
6023 tcg_temp_free(imm);
2c0262af 6024 } else {
3b9d3cf1 6025 gen_shiftd_rm_T1(s, ot, opreg, op, cpu_regs[R_ECX]);
2c0262af
FB
6026 }
6027 break;
6028
6029 /************************/
6030 /* floats */
5fafdf24 6031 case 0xd8 ... 0xdf:
7eee2a50
FB
6032 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6033 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6034 /* XXX: what to do if illegal op ? */
6035 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6036 break;
6037 }
0af10c86 6038 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
6039 mod = (modrm >> 6) & 3;
6040 rm = modrm & 7;
6041 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
2c0262af
FB
6042 if (mod != 3) {
6043 /* memory op */
0af10c86 6044 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
6045 switch(op) {
6046 case 0x00 ... 0x07: /* fxxxs */
6047 case 0x10 ... 0x17: /* fixxxl */
6048 case 0x20 ... 0x27: /* fxxxl */
6049 case 0x30 ... 0x37: /* fixxx */
6050 {
6051 int op1;
6052 op1 = op & 7;
6053
6054 switch(op >> 4) {
6055 case 0:
ba7cd150 6056 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
b6abf97d 6057 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6058 gen_helper_flds_FT0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6059 break;
6060 case 1:
ba7cd150 6061 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
b6abf97d 6062 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6063 gen_helper_fildl_FT0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6064 break;
6065 case 2:
b6abf97d 6066 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6067 (s->mem_index >> 2) - 1);
d3eb5eae 6068 gen_helper_fldl_FT0(cpu_env, cpu_tmp1_i64);
2c0262af
FB
6069 break;
6070 case 3:
6071 default:
ba7cd150 6072 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
b6abf97d 6073 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6074 gen_helper_fildl_FT0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6075 break;
6076 }
3b46e624 6077
a7812ae4 6078 gen_helper_fp_arith_ST0_FT0(op1);
2c0262af
FB
6079 if (op1 == 3) {
6080 /* fcomp needs pop */
d3eb5eae 6081 gen_helper_fpop(cpu_env);
2c0262af
FB
6082 }
6083 }
6084 break;
6085 case 0x08: /* flds */
6086 case 0x0a: /* fsts */
6087 case 0x0b: /* fstps */
465e9838
FB
6088 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6089 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6090 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
2c0262af
FB
6091 switch(op & 7) {
6092 case 0:
6093 switch(op >> 4) {
6094 case 0:
ba7cd150 6095 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
b6abf97d 6096 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6097 gen_helper_flds_ST0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6098 break;
6099 case 1:
ba7cd150 6100 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
b6abf97d 6101 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6102 gen_helper_fildl_ST0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6103 break;
6104 case 2:
b6abf97d 6105 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6106 (s->mem_index >> 2) - 1);
d3eb5eae 6107 gen_helper_fldl_ST0(cpu_env, cpu_tmp1_i64);
2c0262af
FB
6108 break;
6109 case 3:
6110 default:
ba7cd150 6111 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
b6abf97d 6112 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6113 gen_helper_fildl_ST0(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6114 break;
6115 }
6116 break;
465e9838 6117 case 1:
19e6c4b8 6118 /* XXX: the corresponding CPUID bit must be tested ! */
465e9838
FB
6119 switch(op >> 4) {
6120 case 1:
d3eb5eae 6121 gen_helper_fisttl_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6122 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6123 gen_op_st_T0_A0(OT_LONG + s->mem_index);
465e9838
FB
6124 break;
6125 case 2:
d3eb5eae 6126 gen_helper_fisttll_ST0(cpu_tmp1_i64, cpu_env);
b6abf97d 6127 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6128 (s->mem_index >> 2) - 1);
465e9838
FB
6129 break;
6130 case 3:
6131 default:
d3eb5eae 6132 gen_helper_fistt_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6133 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6134 gen_op_st_T0_A0(OT_WORD + s->mem_index);
19e6c4b8 6135 break;
465e9838 6136 }
d3eb5eae 6137 gen_helper_fpop(cpu_env);
465e9838 6138 break;
2c0262af
FB
6139 default:
6140 switch(op >> 4) {
6141 case 0:
d3eb5eae 6142 gen_helper_fsts_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6143 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6144 gen_op_st_T0_A0(OT_LONG + s->mem_index);
2c0262af
FB
6145 break;
6146 case 1:
d3eb5eae 6147 gen_helper_fistl_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6148 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6149 gen_op_st_T0_A0(OT_LONG + s->mem_index);
2c0262af
FB
6150 break;
6151 case 2:
d3eb5eae 6152 gen_helper_fstl_ST0(cpu_tmp1_i64, cpu_env);
b6abf97d 6153 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6154 (s->mem_index >> 2) - 1);
2c0262af
FB
6155 break;
6156 case 3:
6157 default:
d3eb5eae 6158 gen_helper_fist_ST0(cpu_tmp2_i32, cpu_env);
b6abf97d 6159 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
ba7cd150 6160 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
6161 break;
6162 }
6163 if ((op & 7) == 3)
d3eb5eae 6164 gen_helper_fpop(cpu_env);
2c0262af
FB
6165 break;
6166 }
6167 break;
6168 case 0x0c: /* fldenv mem */
773cdfcc 6169 gen_update_cc_op(s);
19e6c4b8 6170 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6171 gen_helper_fldenv(cpu_env, cpu_A0, tcg_const_i32(s->dflag));
2c0262af
FB
6172 break;
6173 case 0x0d: /* fldcw mem */
19e6c4b8 6174 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
b6abf97d 6175 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 6176 gen_helper_fldcw(cpu_env, cpu_tmp2_i32);
2c0262af
FB
6177 break;
6178 case 0x0e: /* fnstenv mem */
773cdfcc 6179 gen_update_cc_op(s);
19e6c4b8 6180 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6181 gen_helper_fstenv(cpu_env, cpu_A0, tcg_const_i32(s->dflag));
2c0262af
FB
6182 break;
6183 case 0x0f: /* fnstcw mem */
d3eb5eae 6184 gen_helper_fnstcw(cpu_tmp2_i32, cpu_env);
b6abf97d 6185 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
19e6c4b8 6186 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
6187 break;
6188 case 0x1d: /* fldt mem */
773cdfcc 6189 gen_update_cc_op(s);
19e6c4b8 6190 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6191 gen_helper_fldt_ST0(cpu_env, cpu_A0);
2c0262af
FB
6192 break;
6193 case 0x1f: /* fstpt mem */
773cdfcc 6194 gen_update_cc_op(s);
19e6c4b8 6195 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae
BS
6196 gen_helper_fstt_ST0(cpu_env, cpu_A0);
6197 gen_helper_fpop(cpu_env);
2c0262af
FB
6198 break;
6199 case 0x2c: /* frstor mem */
773cdfcc 6200 gen_update_cc_op(s);
19e6c4b8 6201 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6202 gen_helper_frstor(cpu_env, cpu_A0, tcg_const_i32(s->dflag));
2c0262af
FB
6203 break;
6204 case 0x2e: /* fnsave mem */
773cdfcc 6205 gen_update_cc_op(s);
19e6c4b8 6206 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6207 gen_helper_fsave(cpu_env, cpu_A0, tcg_const_i32(s->dflag));
2c0262af
FB
6208 break;
6209 case 0x2f: /* fnstsw mem */
d3eb5eae 6210 gen_helper_fnstsw(cpu_tmp2_i32, cpu_env);
b6abf97d 6211 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
19e6c4b8 6212 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2c0262af
FB
6213 break;
6214 case 0x3c: /* fbld */
773cdfcc 6215 gen_update_cc_op(s);
19e6c4b8 6216 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6217 gen_helper_fbld_ST0(cpu_env, cpu_A0);
2c0262af
FB
6218 break;
6219 case 0x3e: /* fbstp */
773cdfcc 6220 gen_update_cc_op(s);
19e6c4b8 6221 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae
BS
6222 gen_helper_fbst_ST0(cpu_env, cpu_A0);
6223 gen_helper_fpop(cpu_env);
2c0262af
FB
6224 break;
6225 case 0x3d: /* fildll */
b6abf97d 6226 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6227 (s->mem_index >> 2) - 1);
d3eb5eae 6228 gen_helper_fildll_ST0(cpu_env, cpu_tmp1_i64);
2c0262af
FB
6229 break;
6230 case 0x3f: /* fistpll */
d3eb5eae 6231 gen_helper_fistll_ST0(cpu_tmp1_i64, cpu_env);
b6abf97d 6232 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
19e6c4b8 6233 (s->mem_index >> 2) - 1);
d3eb5eae 6234 gen_helper_fpop(cpu_env);
2c0262af
FB
6235 break;
6236 default:
6237 goto illegal_op;
6238 }
6239 } else {
6240 /* register float ops */
6241 opreg = rm;
6242
6243 switch(op) {
6244 case 0x08: /* fld sti */
d3eb5eae
BS
6245 gen_helper_fpush(cpu_env);
6246 gen_helper_fmov_ST0_STN(cpu_env,
6247 tcg_const_i32((opreg + 1) & 7));
2c0262af
FB
6248 break;
6249 case 0x09: /* fxchg sti */
c169c906
FB
6250 case 0x29: /* fxchg4 sti, undocumented op */
6251 case 0x39: /* fxchg7 sti, undocumented op */
d3eb5eae 6252 gen_helper_fxchg_ST0_STN(cpu_env, tcg_const_i32(opreg));
2c0262af
FB
6253 break;
6254 case 0x0a: /* grp d9/2 */
6255 switch(rm) {
6256 case 0: /* fnop */
023fe10d 6257 /* check exceptions (FreeBSD FPU probe) */
773cdfcc 6258 gen_update_cc_op(s);
14ce26e7 6259 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 6260 gen_helper_fwait(cpu_env);
2c0262af
FB
6261 break;
6262 default:
6263 goto illegal_op;
6264 }
6265 break;
6266 case 0x0c: /* grp d9/4 */
6267 switch(rm) {
6268 case 0: /* fchs */
d3eb5eae 6269 gen_helper_fchs_ST0(cpu_env);
2c0262af
FB
6270 break;
6271 case 1: /* fabs */
d3eb5eae 6272 gen_helper_fabs_ST0(cpu_env);
2c0262af
FB
6273 break;
6274 case 4: /* ftst */
d3eb5eae
BS
6275 gen_helper_fldz_FT0(cpu_env);
6276 gen_helper_fcom_ST0_FT0(cpu_env);
2c0262af
FB
6277 break;
6278 case 5: /* fxam */
d3eb5eae 6279 gen_helper_fxam_ST0(cpu_env);
2c0262af
FB
6280 break;
6281 default:
6282 goto illegal_op;
6283 }
6284 break;
6285 case 0x0d: /* grp d9/5 */
6286 {
6287 switch(rm) {
6288 case 0:
d3eb5eae
BS
6289 gen_helper_fpush(cpu_env);
6290 gen_helper_fld1_ST0(cpu_env);
2c0262af
FB
6291 break;
6292 case 1:
d3eb5eae
BS
6293 gen_helper_fpush(cpu_env);
6294 gen_helper_fldl2t_ST0(cpu_env);
2c0262af
FB
6295 break;
6296 case 2:
d3eb5eae
BS
6297 gen_helper_fpush(cpu_env);
6298 gen_helper_fldl2e_ST0(cpu_env);
2c0262af
FB
6299 break;
6300 case 3:
d3eb5eae
BS
6301 gen_helper_fpush(cpu_env);
6302 gen_helper_fldpi_ST0(cpu_env);
2c0262af
FB
6303 break;
6304 case 4:
d3eb5eae
BS
6305 gen_helper_fpush(cpu_env);
6306 gen_helper_fldlg2_ST0(cpu_env);
2c0262af
FB
6307 break;
6308 case 5:
d3eb5eae
BS
6309 gen_helper_fpush(cpu_env);
6310 gen_helper_fldln2_ST0(cpu_env);
2c0262af
FB
6311 break;
6312 case 6:
d3eb5eae
BS
6313 gen_helper_fpush(cpu_env);
6314 gen_helper_fldz_ST0(cpu_env);
2c0262af
FB
6315 break;
6316 default:
6317 goto illegal_op;
6318 }
6319 }
6320 break;
6321 case 0x0e: /* grp d9/6 */
6322 switch(rm) {
6323 case 0: /* f2xm1 */
d3eb5eae 6324 gen_helper_f2xm1(cpu_env);
2c0262af
FB
6325 break;
6326 case 1: /* fyl2x */
d3eb5eae 6327 gen_helper_fyl2x(cpu_env);
2c0262af
FB
6328 break;
6329 case 2: /* fptan */
d3eb5eae 6330 gen_helper_fptan(cpu_env);
2c0262af
FB
6331 break;
6332 case 3: /* fpatan */
d3eb5eae 6333 gen_helper_fpatan(cpu_env);
2c0262af
FB
6334 break;
6335 case 4: /* fxtract */
d3eb5eae 6336 gen_helper_fxtract(cpu_env);
2c0262af
FB
6337 break;
6338 case 5: /* fprem1 */
d3eb5eae 6339 gen_helper_fprem1(cpu_env);
2c0262af
FB
6340 break;
6341 case 6: /* fdecstp */
d3eb5eae 6342 gen_helper_fdecstp(cpu_env);
2c0262af
FB
6343 break;
6344 default:
6345 case 7: /* fincstp */
d3eb5eae 6346 gen_helper_fincstp(cpu_env);
2c0262af
FB
6347 break;
6348 }
6349 break;
6350 case 0x0f: /* grp d9/7 */
6351 switch(rm) {
6352 case 0: /* fprem */
d3eb5eae 6353 gen_helper_fprem(cpu_env);
2c0262af
FB
6354 break;
6355 case 1: /* fyl2xp1 */
d3eb5eae 6356 gen_helper_fyl2xp1(cpu_env);
2c0262af
FB
6357 break;
6358 case 2: /* fsqrt */
d3eb5eae 6359 gen_helper_fsqrt(cpu_env);
2c0262af
FB
6360 break;
6361 case 3: /* fsincos */
d3eb5eae 6362 gen_helper_fsincos(cpu_env);
2c0262af
FB
6363 break;
6364 case 5: /* fscale */
d3eb5eae 6365 gen_helper_fscale(cpu_env);
2c0262af
FB
6366 break;
6367 case 4: /* frndint */
d3eb5eae 6368 gen_helper_frndint(cpu_env);
2c0262af
FB
6369 break;
6370 case 6: /* fsin */
d3eb5eae 6371 gen_helper_fsin(cpu_env);
2c0262af
FB
6372 break;
6373 default:
6374 case 7: /* fcos */
d3eb5eae 6375 gen_helper_fcos(cpu_env);
2c0262af
FB
6376 break;
6377 }
6378 break;
6379 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6380 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6381 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6382 {
6383 int op1;
3b46e624 6384
2c0262af
FB
6385 op1 = op & 7;
6386 if (op >= 0x20) {
a7812ae4 6387 gen_helper_fp_arith_STN_ST0(op1, opreg);
2c0262af 6388 if (op >= 0x30)
d3eb5eae 6389 gen_helper_fpop(cpu_env);
2c0262af 6390 } else {
d3eb5eae 6391 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
a7812ae4 6392 gen_helper_fp_arith_ST0_FT0(op1);
2c0262af
FB
6393 }
6394 }
6395 break;
6396 case 0x02: /* fcom */
c169c906 6397 case 0x22: /* fcom2, undocumented op */
d3eb5eae
BS
6398 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6399 gen_helper_fcom_ST0_FT0(cpu_env);
2c0262af
FB
6400 break;
6401 case 0x03: /* fcomp */
c169c906
FB
6402 case 0x23: /* fcomp3, undocumented op */
6403 case 0x32: /* fcomp5, undocumented op */
d3eb5eae
BS
6404 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6405 gen_helper_fcom_ST0_FT0(cpu_env);
6406 gen_helper_fpop(cpu_env);
2c0262af
FB
6407 break;
6408 case 0x15: /* da/5 */
6409 switch(rm) {
6410 case 1: /* fucompp */
d3eb5eae
BS
6411 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(1));
6412 gen_helper_fucom_ST0_FT0(cpu_env);
6413 gen_helper_fpop(cpu_env);
6414 gen_helper_fpop(cpu_env);
2c0262af
FB
6415 break;
6416 default:
6417 goto illegal_op;
6418 }
6419 break;
6420 case 0x1c:
6421 switch(rm) {
6422 case 0: /* feni (287 only, just do nop here) */
6423 break;
6424 case 1: /* fdisi (287 only, just do nop here) */
6425 break;
6426 case 2: /* fclex */
d3eb5eae 6427 gen_helper_fclex(cpu_env);
2c0262af
FB
6428 break;
6429 case 3: /* fninit */
d3eb5eae 6430 gen_helper_fninit(cpu_env);
2c0262af
FB
6431 break;
6432 case 4: /* fsetpm (287 only, just do nop here) */
6433 break;
6434 default:
6435 goto illegal_op;
6436 }
6437 break;
6438 case 0x1d: /* fucomi */
bff93281
PM
6439 if (!(s->cpuid_features & CPUID_CMOV)) {
6440 goto illegal_op;
6441 }
773cdfcc 6442 gen_update_cc_op(s);
d3eb5eae
BS
6443 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6444 gen_helper_fucomi_ST0_FT0(cpu_env);
3ca51d07 6445 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
6446 break;
6447 case 0x1e: /* fcomi */
bff93281
PM
6448 if (!(s->cpuid_features & CPUID_CMOV)) {
6449 goto illegal_op;
6450 }
773cdfcc 6451 gen_update_cc_op(s);
d3eb5eae
BS
6452 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6453 gen_helper_fcomi_ST0_FT0(cpu_env);
3ca51d07 6454 set_cc_op(s, CC_OP_EFLAGS);
2c0262af 6455 break;
658c8bda 6456 case 0x28: /* ffree sti */
d3eb5eae 6457 gen_helper_ffree_STN(cpu_env, tcg_const_i32(opreg));
5fafdf24 6458 break;
2c0262af 6459 case 0x2a: /* fst sti */
d3eb5eae 6460 gen_helper_fmov_STN_ST0(cpu_env, tcg_const_i32(opreg));
2c0262af
FB
6461 break;
6462 case 0x2b: /* fstp sti */
c169c906
FB
6463 case 0x0b: /* fstp1 sti, undocumented op */
6464 case 0x3a: /* fstp8 sti, undocumented op */
6465 case 0x3b: /* fstp9 sti, undocumented op */
d3eb5eae
BS
6466 gen_helper_fmov_STN_ST0(cpu_env, tcg_const_i32(opreg));
6467 gen_helper_fpop(cpu_env);
2c0262af
FB
6468 break;
6469 case 0x2c: /* fucom st(i) */
d3eb5eae
BS
6470 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6471 gen_helper_fucom_ST0_FT0(cpu_env);
2c0262af
FB
6472 break;
6473 case 0x2d: /* fucomp st(i) */
d3eb5eae
BS
6474 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6475 gen_helper_fucom_ST0_FT0(cpu_env);
6476 gen_helper_fpop(cpu_env);
2c0262af
FB
6477 break;
6478 case 0x33: /* de/3 */
6479 switch(rm) {
6480 case 1: /* fcompp */
d3eb5eae
BS
6481 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(1));
6482 gen_helper_fcom_ST0_FT0(cpu_env);
6483 gen_helper_fpop(cpu_env);
6484 gen_helper_fpop(cpu_env);
2c0262af
FB
6485 break;
6486 default:
6487 goto illegal_op;
6488 }
6489 break;
c169c906 6490 case 0x38: /* ffreep sti, undocumented op */
d3eb5eae
BS
6491 gen_helper_ffree_STN(cpu_env, tcg_const_i32(opreg));
6492 gen_helper_fpop(cpu_env);
c169c906 6493 break;
2c0262af
FB
6494 case 0x3c: /* df/4 */
6495 switch(rm) {
6496 case 0:
d3eb5eae 6497 gen_helper_fnstsw(cpu_tmp2_i32, cpu_env);
b6abf97d 6498 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
19e6c4b8 6499 gen_op_mov_reg_T0(OT_WORD, R_EAX);
2c0262af
FB
6500 break;
6501 default:
6502 goto illegal_op;
6503 }
6504 break;
6505 case 0x3d: /* fucomip */
bff93281
PM
6506 if (!(s->cpuid_features & CPUID_CMOV)) {
6507 goto illegal_op;
6508 }
773cdfcc 6509 gen_update_cc_op(s);
d3eb5eae
BS
6510 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6511 gen_helper_fucomi_ST0_FT0(cpu_env);
6512 gen_helper_fpop(cpu_env);
3ca51d07 6513 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
6514 break;
6515 case 0x3e: /* fcomip */
bff93281
PM
6516 if (!(s->cpuid_features & CPUID_CMOV)) {
6517 goto illegal_op;
6518 }
773cdfcc 6519 gen_update_cc_op(s);
d3eb5eae
BS
6520 gen_helper_fmov_FT0_STN(cpu_env, tcg_const_i32(opreg));
6521 gen_helper_fcomi_ST0_FT0(cpu_env);
6522 gen_helper_fpop(cpu_env);
3ca51d07 6523 set_cc_op(s, CC_OP_EFLAGS);
2c0262af 6524 break;
a2cc3b24
FB
6525 case 0x10 ... 0x13: /* fcmovxx */
6526 case 0x18 ... 0x1b:
6527 {
19e6c4b8 6528 int op1, l1;
d70040bc 6529 static const uint8_t fcmov_cc[8] = {
a2cc3b24
FB
6530 (JCC_B << 1),
6531 (JCC_Z << 1),
6532 (JCC_BE << 1),
6533 (JCC_P << 1),
6534 };
bff93281
PM
6535
6536 if (!(s->cpuid_features & CPUID_CMOV)) {
6537 goto illegal_op;
6538 }
1e4840bf 6539 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
19e6c4b8 6540 l1 = gen_new_label();
dc259201 6541 gen_jcc1_noeob(s, op1, l1);
d3eb5eae 6542 gen_helper_fmov_ST0_STN(cpu_env, tcg_const_i32(opreg));
19e6c4b8 6543 gen_set_label(l1);
a2cc3b24
FB
6544 }
6545 break;
2c0262af
FB
6546 default:
6547 goto illegal_op;
6548 }
6549 }
6550 break;
6551 /************************/
6552 /* string ops */
6553
6554 case 0xa4: /* movsS */
6555 case 0xa5:
6556 if ((b & 1) == 0)
6557 ot = OT_BYTE;
6558 else
14ce26e7 6559 ot = dflag + OT_WORD;
2c0262af
FB
6560
6561 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6562 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6563 } else {
6564 gen_movs(s, ot);
6565 }
6566 break;
3b46e624 6567
2c0262af
FB
6568 case 0xaa: /* stosS */
6569 case 0xab:
6570 if ((b & 1) == 0)
6571 ot = OT_BYTE;
6572 else
14ce26e7 6573 ot = dflag + OT_WORD;
2c0262af
FB
6574
6575 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6576 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6577 } else {
6578 gen_stos(s, ot);
6579 }
6580 break;
6581 case 0xac: /* lodsS */
6582 case 0xad:
6583 if ((b & 1) == 0)
6584 ot = OT_BYTE;
6585 else
14ce26e7 6586 ot = dflag + OT_WORD;
2c0262af
FB
6587 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6588 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6589 } else {
6590 gen_lods(s, ot);
6591 }
6592 break;
6593 case 0xae: /* scasS */
6594 case 0xaf:
6595 if ((b & 1) == 0)
6596 ot = OT_BYTE;
6597 else
14ce26e7 6598 ot = dflag + OT_WORD;
2c0262af
FB
6599 if (prefixes & PREFIX_REPNZ) {
6600 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6601 } else if (prefixes & PREFIX_REPZ) {
6602 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6603 } else {
6604 gen_scas(s, ot);
2c0262af
FB
6605 }
6606 break;
6607
6608 case 0xa6: /* cmpsS */
6609 case 0xa7:
6610 if ((b & 1) == 0)
6611 ot = OT_BYTE;
6612 else
14ce26e7 6613 ot = dflag + OT_WORD;
2c0262af
FB
6614 if (prefixes & PREFIX_REPNZ) {
6615 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6616 } else if (prefixes & PREFIX_REPZ) {
6617 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6618 } else {
6619 gen_cmps(s, ot);
2c0262af
FB
6620 }
6621 break;
6622 case 0x6c: /* insS */
6623 case 0x6d:
f115e911
FB
6624 if ((b & 1) == 0)
6625 ot = OT_BYTE;
6626 else
6627 ot = dflag ? OT_LONG : OT_WORD;
57fec1fe 6628 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
0573fbfc 6629 gen_op_andl_T0_ffff();
b8b6a50b
FB
6630 gen_check_io(s, ot, pc_start - s->cs_base,
6631 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
f115e911
FB
6632 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6633 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
2c0262af 6634 } else {
f115e911 6635 gen_ins(s, ot);
2e70f6ef
PB
6636 if (use_icount) {
6637 gen_jmp(s, s->pc - s->cs_base);
6638 }
2c0262af
FB
6639 }
6640 break;
6641 case 0x6e: /* outsS */
6642 case 0x6f:
f115e911
FB
6643 if ((b & 1) == 0)
6644 ot = OT_BYTE;
6645 else
6646 ot = dflag ? OT_LONG : OT_WORD;
57fec1fe 6647 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
0573fbfc 6648 gen_op_andl_T0_ffff();
b8b6a50b
FB
6649 gen_check_io(s, ot, pc_start - s->cs_base,
6650 svm_is_rep(prefixes) | 4);
f115e911
FB
6651 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6652 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
2c0262af 6653 } else {
f115e911 6654 gen_outs(s, ot);
2e70f6ef
PB
6655 if (use_icount) {
6656 gen_jmp(s, s->pc - s->cs_base);
6657 }
2c0262af
FB
6658 }
6659 break;
6660
6661 /************************/
6662 /* port I/O */
0573fbfc 6663
2c0262af
FB
6664 case 0xe4:
6665 case 0xe5:
f115e911
FB
6666 if ((b & 1) == 0)
6667 ot = OT_BYTE;
6668 else
6669 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 6670 val = cpu_ldub_code(env, s->pc++);
f115e911 6671 gen_op_movl_T0_im(val);
b8b6a50b
FB
6672 gen_check_io(s, ot, pc_start - s->cs_base,
6673 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
2e70f6ef
PB
6674 if (use_icount)
6675 gen_io_start();
b6abf97d 6676 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
a7812ae4 6677 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
57fec1fe 6678 gen_op_mov_reg_T1(ot, R_EAX);
2e70f6ef
PB
6679 if (use_icount) {
6680 gen_io_end();
6681 gen_jmp(s, s->pc - s->cs_base);
6682 }
2c0262af
FB
6683 break;
6684 case 0xe6:
6685 case 0xe7:
f115e911
FB
6686 if ((b & 1) == 0)
6687 ot = OT_BYTE;
6688 else
6689 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 6690 val = cpu_ldub_code(env, s->pc++);
f115e911 6691 gen_op_movl_T0_im(val);
b8b6a50b
FB
6692 gen_check_io(s, ot, pc_start - s->cs_base,
6693 svm_is_rep(prefixes));
57fec1fe 6694 gen_op_mov_TN_reg(ot, 1, R_EAX);
b8b6a50b 6695
2e70f6ef
PB
6696 if (use_icount)
6697 gen_io_start();
b6abf97d 6698 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
b6abf97d 6699 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
a7812ae4 6700 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
2e70f6ef
PB
6701 if (use_icount) {
6702 gen_io_end();
6703 gen_jmp(s, s->pc - s->cs_base);
6704 }
2c0262af
FB
6705 break;
6706 case 0xec:
6707 case 0xed:
f115e911
FB
6708 if ((b & 1) == 0)
6709 ot = OT_BYTE;
6710 else
6711 ot = dflag ? OT_LONG : OT_WORD;
57fec1fe 6712 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4f31916f 6713 gen_op_andl_T0_ffff();
b8b6a50b
FB
6714 gen_check_io(s, ot, pc_start - s->cs_base,
6715 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
2e70f6ef
PB
6716 if (use_icount)
6717 gen_io_start();
b6abf97d 6718 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
a7812ae4 6719 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
57fec1fe 6720 gen_op_mov_reg_T1(ot, R_EAX);
2e70f6ef
PB
6721 if (use_icount) {
6722 gen_io_end();
6723 gen_jmp(s, s->pc - s->cs_base);
6724 }
2c0262af
FB
6725 break;
6726 case 0xee:
6727 case 0xef:
f115e911
FB
6728 if ((b & 1) == 0)
6729 ot = OT_BYTE;
6730 else
6731 ot = dflag ? OT_LONG : OT_WORD;
57fec1fe 6732 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4f31916f 6733 gen_op_andl_T0_ffff();
b8b6a50b
FB
6734 gen_check_io(s, ot, pc_start - s->cs_base,
6735 svm_is_rep(prefixes));
57fec1fe 6736 gen_op_mov_TN_reg(ot, 1, R_EAX);
b8b6a50b 6737
2e70f6ef
PB
6738 if (use_icount)
6739 gen_io_start();
b6abf97d 6740 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
b6abf97d 6741 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
a7812ae4 6742 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
2e70f6ef
PB
6743 if (use_icount) {
6744 gen_io_end();
6745 gen_jmp(s, s->pc - s->cs_base);
6746 }
2c0262af
FB
6747 break;
6748
6749 /************************/
6750 /* control */
6751 case 0xc2: /* ret im */
0af10c86 6752 val = cpu_ldsw_code(env, s->pc);
2c0262af
FB
6753 s->pc += 2;
6754 gen_pop_T0(s);
8f091a59
FB
6755 if (CODE64(s) && s->dflag)
6756 s->dflag = 2;
2c0262af
FB
6757 gen_stack_update(s, val + (2 << s->dflag));
6758 if (s->dflag == 0)
6759 gen_op_andl_T0_ffff();
6760 gen_op_jmp_T0();
6761 gen_eob(s);
6762 break;
6763 case 0xc3: /* ret */
6764 gen_pop_T0(s);
6765 gen_pop_update(s);
6766 if (s->dflag == 0)
6767 gen_op_andl_T0_ffff();
6768 gen_op_jmp_T0();
6769 gen_eob(s);
6770 break;
6771 case 0xca: /* lret im */
0af10c86 6772 val = cpu_ldsw_code(env, s->pc);
2c0262af
FB
6773 s->pc += 2;
6774 do_lret:
6775 if (s->pe && !s->vm86) {
773cdfcc 6776 gen_update_cc_op(s);
14ce26e7 6777 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 6778 gen_helper_lret_protected(cpu_env, tcg_const_i32(s->dflag),
a7812ae4 6779 tcg_const_i32(val));
2c0262af
FB
6780 } else {
6781 gen_stack_A0(s);
6782 /* pop offset */
57fec1fe 6783 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
2c0262af
FB
6784 if (s->dflag == 0)
6785 gen_op_andl_T0_ffff();
6786 /* NOTE: keeping EIP updated is not a problem in case of
6787 exception */
6788 gen_op_jmp_T0();
6789 /* pop selector */
6790 gen_op_addl_A0_im(2 << s->dflag);
57fec1fe 6791 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
3bd7da9e 6792 gen_op_movl_seg_T0_vm(R_CS);
2c0262af
FB
6793 /* add stack offset */
6794 gen_stack_update(s, val + (4 << s->dflag));
6795 }
6796 gen_eob(s);
6797 break;
6798 case 0xcb: /* lret */
6799 val = 0;
6800 goto do_lret;
6801 case 0xcf: /* iret */
872929aa 6802 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
2c0262af
FB
6803 if (!s->pe) {
6804 /* real mode */
2999a0b2 6805 gen_helper_iret_real(cpu_env, tcg_const_i32(s->dflag));
3ca51d07 6806 set_cc_op(s, CC_OP_EFLAGS);
f115e911
FB
6807 } else if (s->vm86) {
6808 if (s->iopl != 3) {
6809 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6810 } else {
2999a0b2 6811 gen_helper_iret_real(cpu_env, tcg_const_i32(s->dflag));
3ca51d07 6812 set_cc_op(s, CC_OP_EFLAGS);
f115e911 6813 }
2c0262af 6814 } else {
773cdfcc 6815 gen_update_cc_op(s);
14ce26e7 6816 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 6817 gen_helper_iret_protected(cpu_env, tcg_const_i32(s->dflag),
a7812ae4 6818 tcg_const_i32(s->pc - s->cs_base));
3ca51d07 6819 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
6820 }
6821 gen_eob(s);
6822 break;
6823 case 0xe8: /* call im */
6824 {
14ce26e7 6825 if (dflag)
0af10c86 6826 tval = (int32_t)insn_get(env, s, OT_LONG);
14ce26e7 6827 else
0af10c86 6828 tval = (int16_t)insn_get(env, s, OT_WORD);
2c0262af 6829 next_eip = s->pc - s->cs_base;
14ce26e7 6830 tval += next_eip;
2c0262af 6831 if (s->dflag == 0)
14ce26e7 6832 tval &= 0xffff;
99596385
AJ
6833 else if(!CODE64(s))
6834 tval &= 0xffffffff;
14ce26e7 6835 gen_movtl_T0_im(next_eip);
2c0262af 6836 gen_push_T0(s);
14ce26e7 6837 gen_jmp(s, tval);
2c0262af
FB
6838 }
6839 break;
6840 case 0x9a: /* lcall im */
6841 {
6842 unsigned int selector, offset;
3b46e624 6843
14ce26e7
FB
6844 if (CODE64(s))
6845 goto illegal_op;
2c0262af 6846 ot = dflag ? OT_LONG : OT_WORD;
0af10c86
BS
6847 offset = insn_get(env, s, ot);
6848 selector = insn_get(env, s, OT_WORD);
3b46e624 6849
2c0262af 6850 gen_op_movl_T0_im(selector);
14ce26e7 6851 gen_op_movl_T1_imu(offset);
2c0262af
FB
6852 }
6853 goto do_lcall;
ecada8a2 6854 case 0xe9: /* jmp im */
14ce26e7 6855 if (dflag)
0af10c86 6856 tval = (int32_t)insn_get(env, s, OT_LONG);
14ce26e7 6857 else
0af10c86 6858 tval = (int16_t)insn_get(env, s, OT_WORD);
14ce26e7 6859 tval += s->pc - s->cs_base;
2c0262af 6860 if (s->dflag == 0)
14ce26e7 6861 tval &= 0xffff;
32938e12
AJ
6862 else if(!CODE64(s))
6863 tval &= 0xffffffff;
14ce26e7 6864 gen_jmp(s, tval);
2c0262af
FB
6865 break;
6866 case 0xea: /* ljmp im */
6867 {
6868 unsigned int selector, offset;
6869
14ce26e7
FB
6870 if (CODE64(s))
6871 goto illegal_op;
2c0262af 6872 ot = dflag ? OT_LONG : OT_WORD;
0af10c86
BS
6873 offset = insn_get(env, s, ot);
6874 selector = insn_get(env, s, OT_WORD);
3b46e624 6875
2c0262af 6876 gen_op_movl_T0_im(selector);
14ce26e7 6877 gen_op_movl_T1_imu(offset);
2c0262af
FB
6878 }
6879 goto do_ljmp;
6880 case 0xeb: /* jmp Jb */
0af10c86 6881 tval = (int8_t)insn_get(env, s, OT_BYTE);
14ce26e7 6882 tval += s->pc - s->cs_base;
2c0262af 6883 if (s->dflag == 0)
14ce26e7
FB
6884 tval &= 0xffff;
6885 gen_jmp(s, tval);
2c0262af
FB
6886 break;
6887 case 0x70 ... 0x7f: /* jcc Jb */
0af10c86 6888 tval = (int8_t)insn_get(env, s, OT_BYTE);
2c0262af
FB
6889 goto do_jcc;
6890 case 0x180 ... 0x18f: /* jcc Jv */
6891 if (dflag) {
0af10c86 6892 tval = (int32_t)insn_get(env, s, OT_LONG);
2c0262af 6893 } else {
0af10c86 6894 tval = (int16_t)insn_get(env, s, OT_WORD);
2c0262af
FB
6895 }
6896 do_jcc:
6897 next_eip = s->pc - s->cs_base;
14ce26e7 6898 tval += next_eip;
2c0262af 6899 if (s->dflag == 0)
14ce26e7
FB
6900 tval &= 0xffff;
6901 gen_jcc(s, b, tval, next_eip);
2c0262af
FB
6902 break;
6903
6904 case 0x190 ... 0x19f: /* setcc Gv */
0af10c86 6905 modrm = cpu_ldub_code(env, s->pc++);
cc8b6f5b 6906 gen_setcc1(s, b, cpu_T[0]);
0af10c86 6907 gen_ldst_modrm(env, s, modrm, OT_BYTE, OR_TMP0, 1);
2c0262af
FB
6908 break;
6909 case 0x140 ... 0x14f: /* cmov Gv, Ev */
bff93281
PM
6910 if (!(s->cpuid_features & CPUID_CMOV)) {
6911 goto illegal_op;
6912 }
f32d3781
PB
6913 ot = dflag + OT_WORD;
6914 modrm = cpu_ldub_code(env, s->pc++);
6915 reg = ((modrm >> 3) & 7) | rex_r;
6916 gen_cmovcc1(env, s, ot, b, modrm, reg);
2c0262af 6917 break;
3b46e624 6918
2c0262af
FB
6919 /************************/
6920 /* flags */
6921 case 0x9c: /* pushf */
872929aa 6922 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
2c0262af
FB
6923 if (s->vm86 && s->iopl != 3) {
6924 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6925 } else {
773cdfcc 6926 gen_update_cc_op(s);
f0967a1a 6927 gen_helper_read_eflags(cpu_T[0], cpu_env);
2c0262af
FB
6928 gen_push_T0(s);
6929 }
6930 break;
6931 case 0x9d: /* popf */
872929aa 6932 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
2c0262af
FB
6933 if (s->vm86 && s->iopl != 3) {
6934 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6935 } else {
6936 gen_pop_T0(s);
6937 if (s->cpl == 0) {
6938 if (s->dflag) {
f0967a1a
BS
6939 gen_helper_write_eflags(cpu_env, cpu_T[0],
6940 tcg_const_i32((TF_MASK | AC_MASK |
6941 ID_MASK | NT_MASK |
6942 IF_MASK |
6943 IOPL_MASK)));
2c0262af 6944 } else {
f0967a1a
BS
6945 gen_helper_write_eflags(cpu_env, cpu_T[0],
6946 tcg_const_i32((TF_MASK | AC_MASK |
6947 ID_MASK | NT_MASK |
6948 IF_MASK | IOPL_MASK)
6949 & 0xffff));
2c0262af
FB
6950 }
6951 } else {
4136f33c
FB
6952 if (s->cpl <= s->iopl) {
6953 if (s->dflag) {
f0967a1a
BS
6954 gen_helper_write_eflags(cpu_env, cpu_T[0],
6955 tcg_const_i32((TF_MASK |
6956 AC_MASK |
6957 ID_MASK |
6958 NT_MASK |
6959 IF_MASK)));
4136f33c 6960 } else {
f0967a1a
BS
6961 gen_helper_write_eflags(cpu_env, cpu_T[0],
6962 tcg_const_i32((TF_MASK |
6963 AC_MASK |
6964 ID_MASK |
6965 NT_MASK |
6966 IF_MASK)
6967 & 0xffff));
4136f33c 6968 }
2c0262af 6969 } else {
4136f33c 6970 if (s->dflag) {
f0967a1a
BS
6971 gen_helper_write_eflags(cpu_env, cpu_T[0],
6972 tcg_const_i32((TF_MASK | AC_MASK |
6973 ID_MASK | NT_MASK)));
4136f33c 6974 } else {
f0967a1a
BS
6975 gen_helper_write_eflags(cpu_env, cpu_T[0],
6976 tcg_const_i32((TF_MASK | AC_MASK |
6977 ID_MASK | NT_MASK)
6978 & 0xffff));
4136f33c 6979 }
2c0262af
FB
6980 }
6981 }
6982 gen_pop_update(s);
3ca51d07 6983 set_cc_op(s, CC_OP_EFLAGS);
a9321a4d 6984 /* abort translation because TF/AC flag may change */
14ce26e7 6985 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
6986 gen_eob(s);
6987 }
6988 break;
6989 case 0x9e: /* sahf */
12e26b75 6990 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
14ce26e7 6991 goto illegal_op;
57fec1fe 6992 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
d229edce 6993 gen_compute_eflags(s);
bd7a7b33
FB
6994 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
6995 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
6996 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
2c0262af
FB
6997 break;
6998 case 0x9f: /* lahf */
12e26b75 6999 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
14ce26e7 7000 goto illegal_op;
d229edce 7001 gen_compute_eflags(s);
bd7a7b33 7002 /* Note: gen_compute_eflags() only gives the condition codes */
d229edce 7003 tcg_gen_ori_tl(cpu_T[0], cpu_cc_src, 0x02);
57fec1fe 7004 gen_op_mov_reg_T0(OT_BYTE, R_AH);
2c0262af
FB
7005 break;
7006 case 0xf5: /* cmc */
d229edce 7007 gen_compute_eflags(s);
bd7a7b33 7008 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
2c0262af
FB
7009 break;
7010 case 0xf8: /* clc */
d229edce 7011 gen_compute_eflags(s);
bd7a7b33 7012 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
2c0262af
FB
7013 break;
7014 case 0xf9: /* stc */
d229edce 7015 gen_compute_eflags(s);
bd7a7b33 7016 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
2c0262af
FB
7017 break;
7018 case 0xfc: /* cld */
b6abf97d 7019 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
317ac620 7020 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUX86State, df));
2c0262af
FB
7021 break;
7022 case 0xfd: /* std */
b6abf97d 7023 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
317ac620 7024 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUX86State, df));
2c0262af
FB
7025 break;
7026
7027 /************************/
7028 /* bit operations */
7029 case 0x1ba: /* bt/bts/btr/btc Gv, im */
14ce26e7 7030 ot = dflag + OT_WORD;
0af10c86 7031 modrm = cpu_ldub_code(env, s->pc++);
33698e5f 7032 op = (modrm >> 3) & 7;
2c0262af 7033 mod = (modrm >> 6) & 3;
14ce26e7 7034 rm = (modrm & 7) | REX_B(s);
2c0262af 7035 if (mod != 3) {
14ce26e7 7036 s->rip_offset = 1;
0af10c86 7037 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 7038 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 7039 } else {
57fec1fe 7040 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af
FB
7041 }
7042 /* load shift */
0af10c86 7043 val = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7044 gen_op_movl_T1_im(val);
7045 if (op < 4)
7046 goto illegal_op;
7047 op -= 4;
f484d386 7048 goto bt_op;
2c0262af
FB
7049 case 0x1a3: /* bt Gv, Ev */
7050 op = 0;
7051 goto do_btx;
7052 case 0x1ab: /* bts */
7053 op = 1;
7054 goto do_btx;
7055 case 0x1b3: /* btr */
7056 op = 2;
7057 goto do_btx;
7058 case 0x1bb: /* btc */
7059 op = 3;
7060 do_btx:
14ce26e7 7061 ot = dflag + OT_WORD;
0af10c86 7062 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7 7063 reg = ((modrm >> 3) & 7) | rex_r;
2c0262af 7064 mod = (modrm >> 6) & 3;
14ce26e7 7065 rm = (modrm & 7) | REX_B(s);
57fec1fe 7066 gen_op_mov_TN_reg(OT_LONG, 1, reg);
2c0262af 7067 if (mod != 3) {
0af10c86 7068 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af 7069 /* specific case: we need to add a displacement */
f484d386
FB
7070 gen_exts(ot, cpu_T[1]);
7071 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7072 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7073 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
57fec1fe 7074 gen_op_ld_T0_A0(ot + s->mem_index);
2c0262af 7075 } else {
57fec1fe 7076 gen_op_mov_TN_reg(ot, 0, rm);
2c0262af 7077 }
f484d386
FB
7078 bt_op:
7079 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7080 switch(op) {
7081 case 0:
7082 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7083 tcg_gen_movi_tl(cpu_cc_dst, 0);
7084 break;
7085 case 1:
7086 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7087 tcg_gen_movi_tl(cpu_tmp0, 1);
7088 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7089 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7090 break;
7091 case 2:
7092 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7093 tcg_gen_movi_tl(cpu_tmp0, 1);
7094 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7095 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7096 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7097 break;
7098 default:
7099 case 3:
7100 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7101 tcg_gen_movi_tl(cpu_tmp0, 1);
7102 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7103 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7104 break;
7105 }
3ca51d07 7106 set_cc_op(s, CC_OP_SARB + ot);
2c0262af
FB
7107 if (op != 0) {
7108 if (mod != 3)
57fec1fe 7109 gen_op_st_T0_A0(ot + s->mem_index);
2c0262af 7110 else
57fec1fe 7111 gen_op_mov_reg_T0(ot, rm);
f484d386
FB
7112 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7113 tcg_gen_movi_tl(cpu_cc_dst, 0);
2c0262af
FB
7114 }
7115 break;
321c5351
RH
7116 case 0x1bc: /* bsf / tzcnt */
7117 case 0x1bd: /* bsr / lzcnt */
7118 ot = dflag + OT_WORD;
7119 modrm = cpu_ldub_code(env, s->pc++);
7120 reg = ((modrm >> 3) & 7) | rex_r;
7121 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
7122 gen_extu(ot, cpu_T[0]);
7123
7124 /* Note that lzcnt and tzcnt are in different extensions. */
7125 if ((prefixes & PREFIX_REPZ)
7126 && (b & 1
7127 ? s->cpuid_ext3_features & CPUID_EXT3_ABM
7128 : s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_BMI1)) {
7129 int size = 8 << ot;
7130 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
7131 if (b & 1) {
7132 /* For lzcnt, reduce the target_ulong result by the
7133 number of zeros that we expect to find at the top. */
7134 gen_helper_clz(cpu_T[0], cpu_T[0]);
7135 tcg_gen_subi_tl(cpu_T[0], cpu_T[0], TARGET_LONG_BITS - size);
6191b059 7136 } else {
321c5351
RH
7137 /* For tzcnt, a zero input must return the operand size:
7138 force all bits outside the operand size to 1. */
7139 target_ulong mask = (target_ulong)-2 << (size - 1);
7140 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], mask);
7141 gen_helper_ctz(cpu_T[0], cpu_T[0]);
6191b059 7142 }
321c5351
RH
7143 /* For lzcnt/tzcnt, C and Z bits are defined and are
7144 related to the result. */
7145 gen_op_update1_cc();
7146 set_cc_op(s, CC_OP_BMILGB + ot);
7147 } else {
7148 /* For bsr/bsf, only the Z bit is defined and it is related
7149 to the input and not the result. */
7150 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
7151 set_cc_op(s, CC_OP_LOGICB + ot);
7152 if (b & 1) {
7153 /* For bsr, return the bit index of the first 1 bit,
7154 not the count of leading zeros. */
7155 gen_helper_clz(cpu_T[0], cpu_T[0]);
7156 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], TARGET_LONG_BITS - 1);
7157 } else {
7158 gen_helper_ctz(cpu_T[0], cpu_T[0]);
7159 }
7160 /* ??? The manual says that the output is undefined when the
7161 input is zero, but real hardware leaves it unchanged, and
7162 real programs appear to depend on that. */
7163 tcg_gen_movi_tl(cpu_tmp0, 0);
7164 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_T[0], cpu_cc_dst, cpu_tmp0,
7165 cpu_regs[reg], cpu_T[0]);
6191b059 7166 }
321c5351 7167 gen_op_mov_reg_T0(ot, reg);
2c0262af
FB
7168 break;
7169 /************************/
7170 /* bcd */
7171 case 0x27: /* daa */
14ce26e7
FB
7172 if (CODE64(s))
7173 goto illegal_op;
773cdfcc 7174 gen_update_cc_op(s);
7923057b 7175 gen_helper_daa(cpu_env);
3ca51d07 7176 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
7177 break;
7178 case 0x2f: /* das */
14ce26e7
FB
7179 if (CODE64(s))
7180 goto illegal_op;
773cdfcc 7181 gen_update_cc_op(s);
7923057b 7182 gen_helper_das(cpu_env);
3ca51d07 7183 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
7184 break;
7185 case 0x37: /* aaa */
14ce26e7
FB
7186 if (CODE64(s))
7187 goto illegal_op;
773cdfcc 7188 gen_update_cc_op(s);
7923057b 7189 gen_helper_aaa(cpu_env);
3ca51d07 7190 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
7191 break;
7192 case 0x3f: /* aas */
14ce26e7
FB
7193 if (CODE64(s))
7194 goto illegal_op;
773cdfcc 7195 gen_update_cc_op(s);
7923057b 7196 gen_helper_aas(cpu_env);
3ca51d07 7197 set_cc_op(s, CC_OP_EFLAGS);
2c0262af
FB
7198 break;
7199 case 0xd4: /* aam */
14ce26e7
FB
7200 if (CODE64(s))
7201 goto illegal_op;
0af10c86 7202 val = cpu_ldub_code(env, s->pc++);
b6d7c3db
TS
7203 if (val == 0) {
7204 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7205 } else {
7923057b 7206 gen_helper_aam(cpu_env, tcg_const_i32(val));
3ca51d07 7207 set_cc_op(s, CC_OP_LOGICB);
b6d7c3db 7208 }
2c0262af
FB
7209 break;
7210 case 0xd5: /* aad */
14ce26e7
FB
7211 if (CODE64(s))
7212 goto illegal_op;
0af10c86 7213 val = cpu_ldub_code(env, s->pc++);
7923057b 7214 gen_helper_aad(cpu_env, tcg_const_i32(val));
3ca51d07 7215 set_cc_op(s, CC_OP_LOGICB);
2c0262af
FB
7216 break;
7217 /************************/
7218 /* misc */
7219 case 0x90: /* nop */
ab1f142b 7220 /* XXX: correct lock test for all insn */
7418027e 7221 if (prefixes & PREFIX_LOCK) {
ab1f142b 7222 goto illegal_op;
7418027e
RH
7223 }
7224 /* If REX_B is set, then this is xchg eax, r8d, not a nop. */
7225 if (REX_B(s)) {
7226 goto do_xchg_reg_eax;
7227 }
0573fbfc 7228 if (prefixes & PREFIX_REPZ) {
81f3053b
PB
7229 gen_update_cc_op(s);
7230 gen_jmp_im(pc_start - s->cs_base);
7231 gen_helper_pause(cpu_env, tcg_const_i32(s->pc - pc_start));
7232 s->is_jmp = DISAS_TB_JUMP;
0573fbfc 7233 }
2c0262af
FB
7234 break;
7235 case 0x9b: /* fwait */
5fafdf24 7236 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7eee2a50
FB
7237 (HF_MP_MASK | HF_TS_MASK)) {
7238 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2ee73ac3 7239 } else {
773cdfcc 7240 gen_update_cc_op(s);
14ce26e7 7241 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 7242 gen_helper_fwait(cpu_env);
7eee2a50 7243 }
2c0262af
FB
7244 break;
7245 case 0xcc: /* int3 */
7246 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7247 break;
7248 case 0xcd: /* int N */
0af10c86 7249 val = cpu_ldub_code(env, s->pc++);
f115e911 7250 if (s->vm86 && s->iopl != 3) {
5fafdf24 7251 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
f115e911
FB
7252 } else {
7253 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7254 }
2c0262af
FB
7255 break;
7256 case 0xce: /* into */
14ce26e7
FB
7257 if (CODE64(s))
7258 goto illegal_op;
773cdfcc 7259 gen_update_cc_op(s);
a8ede8ba 7260 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7261 gen_helper_into(cpu_env, tcg_const_i32(s->pc - pc_start));
2c0262af 7262 break;
0b97134b 7263#ifdef WANT_ICEBP
2c0262af 7264 case 0xf1: /* icebp (undocumented, exits to external debugger) */
872929aa 7265 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
aba9d61e 7266#if 1
2c0262af 7267 gen_debug(s, pc_start - s->cs_base);
aba9d61e
FB
7268#else
7269 /* start debug */
0af10c86 7270 tb_flush(env);
24537a01 7271 qemu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
aba9d61e 7272#endif
2c0262af 7273 break;
0b97134b 7274#endif
2c0262af
FB
7275 case 0xfa: /* cli */
7276 if (!s->vm86) {
7277 if (s->cpl <= s->iopl) {
f0967a1a 7278 gen_helper_cli(cpu_env);
2c0262af
FB
7279 } else {
7280 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7281 }
7282 } else {
7283 if (s->iopl == 3) {
f0967a1a 7284 gen_helper_cli(cpu_env);
2c0262af
FB
7285 } else {
7286 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7287 }
7288 }
7289 break;
7290 case 0xfb: /* sti */
7291 if (!s->vm86) {
7292 if (s->cpl <= s->iopl) {
7293 gen_sti:
f0967a1a 7294 gen_helper_sti(cpu_env);
2c0262af 7295 /* interruptions are enabled only the first insn after sti */
a2cc3b24
FB
7296 /* If several instructions disable interrupts, only the
7297 _first_ does it */
7298 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
f0967a1a 7299 gen_helper_set_inhibit_irq(cpu_env);
2c0262af 7300 /* give a chance to handle pending irqs */
14ce26e7 7301 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
7302 gen_eob(s);
7303 } else {
7304 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7305 }
7306 } else {
7307 if (s->iopl == 3) {
7308 goto gen_sti;
7309 } else {
7310 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7311 }
7312 }
7313 break;
7314 case 0x62: /* bound */
14ce26e7
FB
7315 if (CODE64(s))
7316 goto illegal_op;
2c0262af 7317 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 7318 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7319 reg = (modrm >> 3) & 7;
7320 mod = (modrm >> 6) & 3;
7321 if (mod == 3)
7322 goto illegal_op;
57fec1fe 7323 gen_op_mov_TN_reg(ot, 0, reg);
0af10c86 7324 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
14ce26e7 7325 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 7326 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
92fc4b58
BS
7327 if (ot == OT_WORD) {
7328 gen_helper_boundw(cpu_env, cpu_A0, cpu_tmp2_i32);
7329 } else {
7330 gen_helper_boundl(cpu_env, cpu_A0, cpu_tmp2_i32);
7331 }
2c0262af
FB
7332 break;
7333 case 0x1c8 ... 0x1cf: /* bswap reg */
14ce26e7
FB
7334 reg = (b & 7) | REX_B(s);
7335#ifdef TARGET_X86_64
7336 if (dflag == 2) {
57fec1fe 7337 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
66896cb8 7338 tcg_gen_bswap64_i64(cpu_T[0], cpu_T[0]);
57fec1fe 7339 gen_op_mov_reg_T0(OT_QUAD, reg);
5fafdf24 7340 } else
8777643e 7341#endif
57fec1fe
FB
7342 {
7343 gen_op_mov_TN_reg(OT_LONG, 0, reg);
8777643e
AJ
7344 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
7345 tcg_gen_bswap32_tl(cpu_T[0], cpu_T[0]);
57fec1fe 7346 gen_op_mov_reg_T0(OT_LONG, reg);
14ce26e7 7347 }
2c0262af
FB
7348 break;
7349 case 0xd6: /* salc */
14ce26e7
FB
7350 if (CODE64(s))
7351 goto illegal_op;
cc8b6f5b 7352 gen_compute_eflags_c(s, cpu_T[0]);
bd7a7b33
FB
7353 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7354 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
2c0262af
FB
7355 break;
7356 case 0xe0: /* loopnz */
7357 case 0xe1: /* loopz */
2c0262af
FB
7358 case 0xe2: /* loop */
7359 case 0xe3: /* jecxz */
14ce26e7 7360 {
6e0d8677 7361 int l1, l2, l3;
14ce26e7 7362
0af10c86 7363 tval = (int8_t)insn_get(env, s, OT_BYTE);
14ce26e7
FB
7364 next_eip = s->pc - s->cs_base;
7365 tval += next_eip;
7366 if (s->dflag == 0)
7367 tval &= 0xffff;
3b46e624 7368
14ce26e7
FB
7369 l1 = gen_new_label();
7370 l2 = gen_new_label();
6e0d8677 7371 l3 = gen_new_label();
14ce26e7 7372 b &= 3;
6e0d8677
FB
7373 switch(b) {
7374 case 0: /* loopnz */
7375 case 1: /* loopz */
6e0d8677
FB
7376 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7377 gen_op_jz_ecx(s->aflag, l3);
5bdb91b0 7378 gen_jcc1(s, (JCC_Z << 1) | (b ^ 1), l1);
6e0d8677
FB
7379 break;
7380 case 2: /* loop */
7381 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7382 gen_op_jnz_ecx(s->aflag, l1);
7383 break;
7384 default:
7385 case 3: /* jcxz */
7386 gen_op_jz_ecx(s->aflag, l1);
7387 break;
14ce26e7
FB
7388 }
7389
6e0d8677 7390 gen_set_label(l3);
14ce26e7 7391 gen_jmp_im(next_eip);
8e1c85e3 7392 tcg_gen_br(l2);
6e0d8677 7393
14ce26e7
FB
7394 gen_set_label(l1);
7395 gen_jmp_im(tval);
7396 gen_set_label(l2);
7397 gen_eob(s);
7398 }
2c0262af
FB
7399 break;
7400 case 0x130: /* wrmsr */
7401 case 0x132: /* rdmsr */
7402 if (s->cpl != 0) {
7403 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7404 } else {
773cdfcc 7405 gen_update_cc_op(s);
872929aa 7406 gen_jmp_im(pc_start - s->cs_base);
0573fbfc 7407 if (b & 2) {
4a7443be 7408 gen_helper_rdmsr(cpu_env);
0573fbfc 7409 } else {
4a7443be 7410 gen_helper_wrmsr(cpu_env);
0573fbfc 7411 }
2c0262af
FB
7412 }
7413 break;
7414 case 0x131: /* rdtsc */
773cdfcc 7415 gen_update_cc_op(s);
ecada8a2 7416 gen_jmp_im(pc_start - s->cs_base);
efade670
PB
7417 if (use_icount)
7418 gen_io_start();
4a7443be 7419 gen_helper_rdtsc(cpu_env);
efade670
PB
7420 if (use_icount) {
7421 gen_io_end();
7422 gen_jmp(s, s->pc - s->cs_base);
7423 }
2c0262af 7424 break;
df01e0fc 7425 case 0x133: /* rdpmc */
773cdfcc 7426 gen_update_cc_op(s);
df01e0fc 7427 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7428 gen_helper_rdpmc(cpu_env);
df01e0fc 7429 break;
023fe10d 7430 case 0x134: /* sysenter */
2436b61a 7431 /* For Intel SYSENTER is valid on 64-bit */
0af10c86 7432 if (CODE64(s) && env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
14ce26e7 7433 goto illegal_op;
023fe10d
FB
7434 if (!s->pe) {
7435 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7436 } else {
728d803b 7437 gen_update_cc_op(s);
14ce26e7 7438 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 7439 gen_helper_sysenter(cpu_env);
023fe10d
FB
7440 gen_eob(s);
7441 }
7442 break;
7443 case 0x135: /* sysexit */
2436b61a 7444 /* For Intel SYSEXIT is valid on 64-bit */
0af10c86 7445 if (CODE64(s) && env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
14ce26e7 7446 goto illegal_op;
023fe10d
FB
7447 if (!s->pe) {
7448 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7449 } else {
728d803b 7450 gen_update_cc_op(s);
14ce26e7 7451 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 7452 gen_helper_sysexit(cpu_env, tcg_const_i32(dflag));
023fe10d
FB
7453 gen_eob(s);
7454 }
7455 break;
14ce26e7
FB
7456#ifdef TARGET_X86_64
7457 case 0x105: /* syscall */
7458 /* XXX: is it usable in real mode ? */
728d803b 7459 gen_update_cc_op(s);
14ce26e7 7460 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 7461 gen_helper_syscall(cpu_env, tcg_const_i32(s->pc - pc_start));
14ce26e7
FB
7462 gen_eob(s);
7463 break;
7464 case 0x107: /* sysret */
7465 if (!s->pe) {
7466 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7467 } else {
728d803b 7468 gen_update_cc_op(s);
14ce26e7 7469 gen_jmp_im(pc_start - s->cs_base);
2999a0b2 7470 gen_helper_sysret(cpu_env, tcg_const_i32(s->dflag));
aba9d61e 7471 /* condition codes are modified only in long mode */
3ca51d07
RH
7472 if (s->lma) {
7473 set_cc_op(s, CC_OP_EFLAGS);
7474 }
14ce26e7
FB
7475 gen_eob(s);
7476 }
7477 break;
7478#endif
2c0262af 7479 case 0x1a2: /* cpuid */
773cdfcc 7480 gen_update_cc_op(s);
9575cb94 7481 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7482 gen_helper_cpuid(cpu_env);
2c0262af
FB
7483 break;
7484 case 0xf4: /* hlt */
7485 if (s->cpl != 0) {
7486 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7487 } else {
773cdfcc 7488 gen_update_cc_op(s);
94451178 7489 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7490 gen_helper_hlt(cpu_env, tcg_const_i32(s->pc - pc_start));
5779406a 7491 s->is_jmp = DISAS_TB_JUMP;
2c0262af
FB
7492 }
7493 break;
7494 case 0x100:
0af10c86 7495 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7496 mod = (modrm >> 6) & 3;
7497 op = (modrm >> 3) & 7;
7498 switch(op) {
7499 case 0: /* sldt */
f115e911
FB
7500 if (!s->pe || s->vm86)
7501 goto illegal_op;
872929aa 7502 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
651ba608 7503 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
2c0262af
FB
7504 ot = OT_WORD;
7505 if (mod == 3)
7506 ot += s->dflag;
0af10c86 7507 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
2c0262af
FB
7508 break;
7509 case 2: /* lldt */
f115e911
FB
7510 if (!s->pe || s->vm86)
7511 goto illegal_op;
2c0262af
FB
7512 if (s->cpl != 0) {
7513 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7514 } else {
872929aa 7515 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
0af10c86 7516 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
14ce26e7 7517 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 7518 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2 7519 gen_helper_lldt(cpu_env, cpu_tmp2_i32);
2c0262af
FB
7520 }
7521 break;
7522 case 1: /* str */
f115e911
FB
7523 if (!s->pe || s->vm86)
7524 goto illegal_op;
872929aa 7525 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
651ba608 7526 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
2c0262af
FB
7527 ot = OT_WORD;
7528 if (mod == 3)
7529 ot += s->dflag;
0af10c86 7530 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 1);
2c0262af
FB
7531 break;
7532 case 3: /* ltr */
f115e911
FB
7533 if (!s->pe || s->vm86)
7534 goto illegal_op;
2c0262af
FB
7535 if (s->cpl != 0) {
7536 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7537 } else {
872929aa 7538 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
0af10c86 7539 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
14ce26e7 7540 gen_jmp_im(pc_start - s->cs_base);
b6abf97d 7541 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2999a0b2 7542 gen_helper_ltr(cpu_env, cpu_tmp2_i32);
2c0262af
FB
7543 }
7544 break;
7545 case 4: /* verr */
7546 case 5: /* verw */
f115e911
FB
7547 if (!s->pe || s->vm86)
7548 goto illegal_op;
0af10c86 7549 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
773cdfcc 7550 gen_update_cc_op(s);
2999a0b2
BS
7551 if (op == 4) {
7552 gen_helper_verr(cpu_env, cpu_T[0]);
7553 } else {
7554 gen_helper_verw(cpu_env, cpu_T[0]);
7555 }
3ca51d07 7556 set_cc_op(s, CC_OP_EFLAGS);
f115e911 7557 break;
2c0262af
FB
7558 default:
7559 goto illegal_op;
7560 }
7561 break;
7562 case 0x101:
0af10c86 7563 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7564 mod = (modrm >> 6) & 3;
7565 op = (modrm >> 3) & 7;
3d7374c5 7566 rm = modrm & 7;
2c0262af
FB
7567 switch(op) {
7568 case 0: /* sgdt */
2c0262af
FB
7569 if (mod == 3)
7570 goto illegal_op;
872929aa 7571 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
0af10c86 7572 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
651ba608 7573 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
57fec1fe 7574 gen_op_st_T0_A0(OT_WORD + s->mem_index);
aba9d61e 7575 gen_add_A0_im(s, 2);
651ba608 7576 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
2c0262af
FB
7577 if (!s->dflag)
7578 gen_op_andl_T0_im(0xffffff);
57fec1fe 7579 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
2c0262af 7580 break;
3d7374c5
FB
7581 case 1:
7582 if (mod == 3) {
7583 switch (rm) {
7584 case 0: /* monitor */
7585 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7586 s->cpl != 0)
7587 goto illegal_op;
773cdfcc 7588 gen_update_cc_op(s);
3d7374c5
FB
7589 gen_jmp_im(pc_start - s->cs_base);
7590#ifdef TARGET_X86_64
7591 if (s->aflag == 2) {
bbf662ee 7592 gen_op_movq_A0_reg(R_EAX);
5fafdf24 7593 } else
3d7374c5
FB
7594#endif
7595 {
bbf662ee 7596 gen_op_movl_A0_reg(R_EAX);
3d7374c5
FB
7597 if (s->aflag == 0)
7598 gen_op_andl_A0_ffff();
7599 }
7600 gen_add_A0_ds_seg(s);
4a7443be 7601 gen_helper_monitor(cpu_env, cpu_A0);
3d7374c5
FB
7602 break;
7603 case 1: /* mwait */
7604 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7605 s->cpl != 0)
7606 goto illegal_op;
728d803b 7607 gen_update_cc_op(s);
94451178 7608 gen_jmp_im(pc_start - s->cs_base);
4a7443be 7609 gen_helper_mwait(cpu_env, tcg_const_i32(s->pc - pc_start));
3d7374c5
FB
7610 gen_eob(s);
7611 break;
a9321a4d
PA
7612 case 2: /* clac */
7613 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_SMAP) ||
7614 s->cpl != 0) {
7615 goto illegal_op;
7616 }
7617 gen_helper_clac(cpu_env);
7618 gen_jmp_im(s->pc - s->cs_base);
7619 gen_eob(s);
7620 break;
7621 case 3: /* stac */
7622 if (!(s->cpuid_7_0_ebx_features & CPUID_7_0_EBX_SMAP) ||
7623 s->cpl != 0) {
7624 goto illegal_op;
7625 }
7626 gen_helper_stac(cpu_env);
7627 gen_jmp_im(s->pc - s->cs_base);
7628 gen_eob(s);
7629 break;
3d7374c5
FB
7630 default:
7631 goto illegal_op;
7632 }
7633 } else { /* sidt */
872929aa 7634 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
0af10c86 7635 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
651ba608 7636 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
57fec1fe 7637 gen_op_st_T0_A0(OT_WORD + s->mem_index);
3d7374c5 7638 gen_add_A0_im(s, 2);
651ba608 7639 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
3d7374c5
FB
7640 if (!s->dflag)
7641 gen_op_andl_T0_im(0xffffff);
57fec1fe 7642 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
3d7374c5
FB
7643 }
7644 break;
2c0262af
FB
7645 case 2: /* lgdt */
7646 case 3: /* lidt */
0573fbfc 7647 if (mod == 3) {
773cdfcc 7648 gen_update_cc_op(s);
872929aa 7649 gen_jmp_im(pc_start - s->cs_base);
0573fbfc
TS
7650 switch(rm) {
7651 case 0: /* VMRUN */
872929aa
FB
7652 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7653 goto illegal_op;
7654 if (s->cpl != 0) {
7655 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
0573fbfc 7656 break;
872929aa 7657 } else {
052e80d5 7658 gen_helper_vmrun(cpu_env, tcg_const_i32(s->aflag),
a7812ae4 7659 tcg_const_i32(s->pc - pc_start));
db620f46 7660 tcg_gen_exit_tb(0);
5779406a 7661 s->is_jmp = DISAS_TB_JUMP;
872929aa 7662 }
0573fbfc
TS
7663 break;
7664 case 1: /* VMMCALL */
872929aa
FB
7665 if (!(s->flags & HF_SVME_MASK))
7666 goto illegal_op;
052e80d5 7667 gen_helper_vmmcall(cpu_env);
0573fbfc
TS
7668 break;
7669 case 2: /* VMLOAD */
872929aa
FB
7670 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7671 goto illegal_op;
7672 if (s->cpl != 0) {
7673 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7674 break;
7675 } else {
052e80d5 7676 gen_helper_vmload(cpu_env, tcg_const_i32(s->aflag));
872929aa 7677 }
0573fbfc
TS
7678 break;
7679 case 3: /* VMSAVE */
872929aa
FB
7680 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7681 goto illegal_op;
7682 if (s->cpl != 0) {
7683 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7684 break;
7685 } else {
052e80d5 7686 gen_helper_vmsave(cpu_env, tcg_const_i32(s->aflag));
872929aa 7687 }
0573fbfc
TS
7688 break;
7689 case 4: /* STGI */
872929aa
FB
7690 if ((!(s->flags & HF_SVME_MASK) &&
7691 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7692 !s->pe)
7693 goto illegal_op;
7694 if (s->cpl != 0) {
7695 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7696 break;
7697 } else {
052e80d5 7698 gen_helper_stgi(cpu_env);
872929aa 7699 }
0573fbfc
TS
7700 break;
7701 case 5: /* CLGI */
872929aa
FB
7702 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7703 goto illegal_op;
7704 if (s->cpl != 0) {
7705 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7706 break;
7707 } else {
052e80d5 7708 gen_helper_clgi(cpu_env);
872929aa 7709 }
0573fbfc
TS
7710 break;
7711 case 6: /* SKINIT */
872929aa
FB
7712 if ((!(s->flags & HF_SVME_MASK) &&
7713 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7714 !s->pe)
7715 goto illegal_op;
052e80d5 7716 gen_helper_skinit(cpu_env);
0573fbfc
TS
7717 break;
7718 case 7: /* INVLPGA */
872929aa
FB
7719 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7720 goto illegal_op;
7721 if (s->cpl != 0) {
7722 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7723 break;
7724 } else {
052e80d5 7725 gen_helper_invlpga(cpu_env, tcg_const_i32(s->aflag));
872929aa 7726 }
0573fbfc
TS
7727 break;
7728 default:
7729 goto illegal_op;
7730 }
7731 } else if (s->cpl != 0) {
2c0262af
FB
7732 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7733 } else {
872929aa
FB
7734 gen_svm_check_intercept(s, pc_start,
7735 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
0af10c86 7736 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
57fec1fe 7737 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
aba9d61e 7738 gen_add_A0_im(s, 2);
57fec1fe 7739 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
2c0262af
FB
7740 if (!s->dflag)
7741 gen_op_andl_T0_im(0xffffff);
7742 if (op == 2) {
651ba608
FB
7743 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7744 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
2c0262af 7745 } else {
651ba608
FB
7746 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7747 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
2c0262af
FB
7748 }
7749 }
7750 break;
7751 case 4: /* smsw */
872929aa 7752 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
e2542fe2 7753#if defined TARGET_X86_64 && defined HOST_WORDS_BIGENDIAN
f60d2728 7754 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]) + 4);
7755#else
651ba608 7756 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
f60d2728 7757#endif
0af10c86 7758 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 1);
2c0262af
FB
7759 break;
7760 case 6: /* lmsw */
7761 if (s->cpl != 0) {
7762 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7763 } else {
872929aa 7764 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
0af10c86 7765 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
4a7443be 7766 gen_helper_lmsw(cpu_env, cpu_T[0]);
14ce26e7 7767 gen_jmp_im(s->pc - s->cs_base);
d71b9a8b 7768 gen_eob(s);
2c0262af
FB
7769 }
7770 break;
1b050077
AP
7771 case 7:
7772 if (mod != 3) { /* invlpg */
7773 if (s->cpl != 0) {
7774 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7775 } else {
773cdfcc 7776 gen_update_cc_op(s);
1b050077 7777 gen_jmp_im(pc_start - s->cs_base);
0af10c86 7778 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
4a7443be 7779 gen_helper_invlpg(cpu_env, cpu_A0);
1b050077
AP
7780 gen_jmp_im(s->pc - s->cs_base);
7781 gen_eob(s);
7782 }
2c0262af 7783 } else {
1b050077
AP
7784 switch (rm) {
7785 case 0: /* swapgs */
14ce26e7 7786#ifdef TARGET_X86_64
1b050077
AP
7787 if (CODE64(s)) {
7788 if (s->cpl != 0) {
7789 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7790 } else {
7791 tcg_gen_ld_tl(cpu_T[0], cpu_env,
7792 offsetof(CPUX86State,segs[R_GS].base));
7793 tcg_gen_ld_tl(cpu_T[1], cpu_env,
7794 offsetof(CPUX86State,kernelgsbase));
7795 tcg_gen_st_tl(cpu_T[1], cpu_env,
7796 offsetof(CPUX86State,segs[R_GS].base));
7797 tcg_gen_st_tl(cpu_T[0], cpu_env,
7798 offsetof(CPUX86State,kernelgsbase));
7799 }
5fafdf24 7800 } else
14ce26e7
FB
7801#endif
7802 {
7803 goto illegal_op;
7804 }
1b050077
AP
7805 break;
7806 case 1: /* rdtscp */
7807 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7808 goto illegal_op;
773cdfcc 7809 gen_update_cc_op(s);
9575cb94 7810 gen_jmp_im(pc_start - s->cs_base);
1b050077
AP
7811 if (use_icount)
7812 gen_io_start();
4a7443be 7813 gen_helper_rdtscp(cpu_env);
1b050077
AP
7814 if (use_icount) {
7815 gen_io_end();
7816 gen_jmp(s, s->pc - s->cs_base);
7817 }
7818 break;
7819 default:
7820 goto illegal_op;
14ce26e7 7821 }
2c0262af
FB
7822 }
7823 break;
7824 default:
7825 goto illegal_op;
7826 }
7827 break;
3415a4dd
FB
7828 case 0x108: /* invd */
7829 case 0x109: /* wbinvd */
7830 if (s->cpl != 0) {
7831 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7832 } else {
872929aa 7833 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
3415a4dd
FB
7834 /* nothing to do */
7835 }
7836 break;
14ce26e7
FB
7837 case 0x63: /* arpl or movslS (x86_64) */
7838#ifdef TARGET_X86_64
7839 if (CODE64(s)) {
7840 int d_ot;
7841 /* d_ot is the size of destination */
7842 d_ot = dflag + OT_WORD;
7843
0af10c86 7844 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7
FB
7845 reg = ((modrm >> 3) & 7) | rex_r;
7846 mod = (modrm >> 6) & 3;
7847 rm = (modrm & 7) | REX_B(s);
3b46e624 7848
14ce26e7 7849 if (mod == 3) {
57fec1fe 7850 gen_op_mov_TN_reg(OT_LONG, 0, rm);
14ce26e7
FB
7851 /* sign extend */
7852 if (d_ot == OT_QUAD)
e108dd01 7853 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
57fec1fe 7854 gen_op_mov_reg_T0(d_ot, reg);
14ce26e7 7855 } else {
0af10c86 7856 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
14ce26e7 7857 if (d_ot == OT_QUAD) {
57fec1fe 7858 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
14ce26e7 7859 } else {
57fec1fe 7860 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
14ce26e7 7861 }
57fec1fe 7862 gen_op_mov_reg_T0(d_ot, reg);
14ce26e7 7863 }
5fafdf24 7864 } else
14ce26e7
FB
7865#endif
7866 {
3bd7da9e 7867 int label1;
49d9fdcc 7868 TCGv t0, t1, t2, a0;
1e4840bf 7869
14ce26e7
FB
7870 if (!s->pe || s->vm86)
7871 goto illegal_op;
a7812ae4
PB
7872 t0 = tcg_temp_local_new();
7873 t1 = tcg_temp_local_new();
7874 t2 = tcg_temp_local_new();
3bd7da9e 7875 ot = OT_WORD;
0af10c86 7876 modrm = cpu_ldub_code(env, s->pc++);
14ce26e7
FB
7877 reg = (modrm >> 3) & 7;
7878 mod = (modrm >> 6) & 3;
7879 rm = modrm & 7;
7880 if (mod != 3) {
0af10c86 7881 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
1e4840bf 7882 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
49d9fdcc
LD
7883 a0 = tcg_temp_local_new();
7884 tcg_gen_mov_tl(a0, cpu_A0);
14ce26e7 7885 } else {
1e4840bf 7886 gen_op_mov_v_reg(ot, t0, rm);
49d9fdcc 7887 TCGV_UNUSED(a0);
14ce26e7 7888 }
1e4840bf
FB
7889 gen_op_mov_v_reg(ot, t1, reg);
7890 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7891 tcg_gen_andi_tl(t1, t1, 3);
7892 tcg_gen_movi_tl(t2, 0);
3bd7da9e 7893 label1 = gen_new_label();
1e4840bf
FB
7894 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7895 tcg_gen_andi_tl(t0, t0, ~3);
7896 tcg_gen_or_tl(t0, t0, t1);
7897 tcg_gen_movi_tl(t2, CC_Z);
3bd7da9e 7898 gen_set_label(label1);
14ce26e7 7899 if (mod != 3) {
49d9fdcc
LD
7900 gen_op_st_v(ot + s->mem_index, t0, a0);
7901 tcg_temp_free(a0);
7902 } else {
1e4840bf 7903 gen_op_mov_reg_v(ot, rm, t0);
14ce26e7 7904 }
d229edce 7905 gen_compute_eflags(s);
3bd7da9e 7906 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
1e4840bf 7907 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
1e4840bf
FB
7908 tcg_temp_free(t0);
7909 tcg_temp_free(t1);
7910 tcg_temp_free(t2);
f115e911 7911 }
f115e911 7912 break;
2c0262af
FB
7913 case 0x102: /* lar */
7914 case 0x103: /* lsl */
cec6843e
FB
7915 {
7916 int label1;
1e4840bf 7917 TCGv t0;
cec6843e
FB
7918 if (!s->pe || s->vm86)
7919 goto illegal_op;
7920 ot = dflag ? OT_LONG : OT_WORD;
0af10c86 7921 modrm = cpu_ldub_code(env, s->pc++);
cec6843e 7922 reg = ((modrm >> 3) & 7) | rex_r;
0af10c86 7923 gen_ldst_modrm(env, s, modrm, OT_WORD, OR_TMP0, 0);
a7812ae4 7924 t0 = tcg_temp_local_new();
773cdfcc 7925 gen_update_cc_op(s);
2999a0b2
BS
7926 if (b == 0x102) {
7927 gen_helper_lar(t0, cpu_env, cpu_T[0]);
7928 } else {
7929 gen_helper_lsl(t0, cpu_env, cpu_T[0]);
7930 }
cec6843e
FB
7931 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
7932 label1 = gen_new_label();
cb63669a 7933 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1e4840bf 7934 gen_op_mov_reg_v(ot, reg, t0);
cec6843e 7935 gen_set_label(label1);
3ca51d07 7936 set_cc_op(s, CC_OP_EFLAGS);
1e4840bf 7937 tcg_temp_free(t0);
cec6843e 7938 }
2c0262af
FB
7939 break;
7940 case 0x118:
0af10c86 7941 modrm = cpu_ldub_code(env, s->pc++);
2c0262af
FB
7942 mod = (modrm >> 6) & 3;
7943 op = (modrm >> 3) & 7;
7944 switch(op) {
7945 case 0: /* prefetchnta */
7946 case 1: /* prefetchnt0 */
7947 case 2: /* prefetchnt0 */
7948 case 3: /* prefetchnt0 */
7949 if (mod == 3)
7950 goto illegal_op;
0af10c86 7951 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
2c0262af
FB
7952 /* nothing more to do */
7953 break;
e17a36ce 7954 default: /* nop (multi byte) */
0af10c86 7955 gen_nop_modrm(env, s, modrm);
e17a36ce 7956 break;
2c0262af
FB
7957 }
7958 break;
e17a36ce 7959 case 0x119 ... 0x11f: /* nop (multi byte) */
0af10c86
BS
7960 modrm = cpu_ldub_code(env, s->pc++);
7961 gen_nop_modrm(env, s, modrm);
e17a36ce 7962 break;
2c0262af
FB
7963 case 0x120: /* mov reg, crN */
7964 case 0x122: /* mov crN, reg */
7965 if (s->cpl != 0) {
7966 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7967 } else {
0af10c86 7968 modrm = cpu_ldub_code(env, s->pc++);
5c73b757
MO
7969 /* Ignore the mod bits (assume (modrm&0xc0)==0xc0).
7970 * AMD documentation (24594.pdf) and testing of
7971 * intel 386 and 486 processors all show that the mod bits
7972 * are assumed to be 1's, regardless of actual values.
7973 */
14ce26e7
FB
7974 rm = (modrm & 7) | REX_B(s);
7975 reg = ((modrm >> 3) & 7) | rex_r;
7976 if (CODE64(s))
7977 ot = OT_QUAD;
7978 else
7979 ot = OT_LONG;
ccd59d09
AP
7980 if ((prefixes & PREFIX_LOCK) && (reg == 0) &&
7981 (s->cpuid_ext3_features & CPUID_EXT3_CR8LEG)) {
7982 reg = 8;
7983 }
2c0262af
FB
7984 switch(reg) {
7985 case 0:
7986 case 2:
7987 case 3:
7988 case 4:
9230e66e 7989 case 8:
773cdfcc 7990 gen_update_cc_op(s);
872929aa 7991 gen_jmp_im(pc_start - s->cs_base);
2c0262af 7992 if (b & 2) {
57fec1fe 7993 gen_op_mov_TN_reg(ot, 0, rm);
4a7443be
BS
7994 gen_helper_write_crN(cpu_env, tcg_const_i32(reg),
7995 cpu_T[0]);
14ce26e7 7996 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
7997 gen_eob(s);
7998 } else {
4a7443be 7999 gen_helper_read_crN(cpu_T[0], cpu_env, tcg_const_i32(reg));
57fec1fe 8000 gen_op_mov_reg_T0(ot, rm);
2c0262af
FB
8001 }
8002 break;
8003 default:
8004 goto illegal_op;
8005 }
8006 }
8007 break;
8008 case 0x121: /* mov reg, drN */
8009 case 0x123: /* mov drN, reg */
8010 if (s->cpl != 0) {
8011 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8012 } else {
0af10c86 8013 modrm = cpu_ldub_code(env, s->pc++);
5c73b757
MO
8014 /* Ignore the mod bits (assume (modrm&0xc0)==0xc0).
8015 * AMD documentation (24594.pdf) and testing of
8016 * intel 386 and 486 processors all show that the mod bits
8017 * are assumed to be 1's, regardless of actual values.
8018 */
14ce26e7
FB
8019 rm = (modrm & 7) | REX_B(s);
8020 reg = ((modrm >> 3) & 7) | rex_r;
8021 if (CODE64(s))
8022 ot = OT_QUAD;
8023 else
8024 ot = OT_LONG;
2c0262af 8025 /* XXX: do it dynamically with CR4.DE bit */
14ce26e7 8026 if (reg == 4 || reg == 5 || reg >= 8)
2c0262af
FB
8027 goto illegal_op;
8028 if (b & 2) {
0573fbfc 8029 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
57fec1fe 8030 gen_op_mov_TN_reg(ot, 0, rm);
4a7443be 8031 gen_helper_movl_drN_T0(cpu_env, tcg_const_i32(reg), cpu_T[0]);
14ce26e7 8032 gen_jmp_im(s->pc - s->cs_base);
2c0262af
FB
8033 gen_eob(s);
8034 } else {
0573fbfc 8035 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
651ba608 8036 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
57fec1fe 8037 gen_op_mov_reg_T0(ot, rm);
2c0262af
FB
8038 }
8039 }
8040 break;
8041 case 0x106: /* clts */
8042 if (s->cpl != 0) {
8043 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8044 } else {
0573fbfc 8045 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
f0967a1a 8046 gen_helper_clts(cpu_env);
7eee2a50 8047 /* abort block because static cpu state changed */
14ce26e7 8048 gen_jmp_im(s->pc - s->cs_base);
7eee2a50 8049 gen_eob(s);
2c0262af
FB
8050 }
8051 break;
222a3336 8052 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
664e0f19
FB
8053 case 0x1c3: /* MOVNTI reg, mem */
8054 if (!(s->cpuid_features & CPUID_SSE2))
14ce26e7 8055 goto illegal_op;
664e0f19 8056 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
0af10c86 8057 modrm = cpu_ldub_code(env, s->pc++);
664e0f19
FB
8058 mod = (modrm >> 6) & 3;
8059 if (mod == 3)
8060 goto illegal_op;
8061 reg = ((modrm >> 3) & 7) | rex_r;
8062 /* generate a generic store */
0af10c86 8063 gen_ldst_modrm(env, s, modrm, ot, reg, 1);
14ce26e7 8064 break;
664e0f19 8065 case 0x1ae:
0af10c86 8066 modrm = cpu_ldub_code(env, s->pc++);
664e0f19
FB
8067 mod = (modrm >> 6) & 3;
8068 op = (modrm >> 3) & 7;
8069 switch(op) {
8070 case 0: /* fxsave */
5fafdf24 8071 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
09d85fb8 8072 (s->prefix & PREFIX_LOCK))
14ce26e7 8073 goto illegal_op;
09d85fb8 8074 if ((s->flags & HF_EM_MASK) || (s->flags & HF_TS_MASK)) {
0fd14b72
FB
8075 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8076 break;
8077 }
0af10c86 8078 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
773cdfcc 8079 gen_update_cc_op(s);
19e6c4b8 8080 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae 8081 gen_helper_fxsave(cpu_env, cpu_A0, tcg_const_i32((s->dflag == 2)));
664e0f19
FB
8082 break;
8083 case 1: /* fxrstor */
5fafdf24 8084 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
09d85fb8 8085 (s->prefix & PREFIX_LOCK))
14ce26e7 8086 goto illegal_op;
09d85fb8 8087 if ((s->flags & HF_EM_MASK) || (s->flags & HF_TS_MASK)) {
0fd14b72
FB
8088 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8089 break;
8090 }
0af10c86 8091 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
773cdfcc 8092 gen_update_cc_op(s);
19e6c4b8 8093 gen_jmp_im(pc_start - s->cs_base);
d3eb5eae
BS
8094 gen_helper_fxrstor(cpu_env, cpu_A0,
8095 tcg_const_i32((s->dflag == 2)));
664e0f19
FB
8096 break;
8097 case 2: /* ldmxcsr */
8098 case 3: /* stmxcsr */
8099 if (s->flags & HF_TS_MASK) {
8100 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8101 break;
14ce26e7 8102 }
664e0f19
FB
8103 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8104 mod == 3)
14ce26e7 8105 goto illegal_op;
0af10c86 8106 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
664e0f19 8107 if (op == 2) {
57fec1fe 8108 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
20f8bd48 8109 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
d3eb5eae 8110 gen_helper_ldmxcsr(cpu_env, cpu_tmp2_i32);
14ce26e7 8111 } else {
651ba608 8112 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
57fec1fe 8113 gen_op_st_T0_A0(OT_LONG + s->mem_index);
14ce26e7 8114 }
664e0f19
FB
8115 break;
8116 case 5: /* lfence */
8117 case 6: /* mfence */
8001c294 8118 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE2))
664e0f19
FB
8119 goto illegal_op;
8120 break;
8f091a59
FB
8121 case 7: /* sfence / clflush */
8122 if ((modrm & 0xc7) == 0xc0) {
8123 /* sfence */
a35f3ec7 8124 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8f091a59
FB
8125 if (!(s->cpuid_features & CPUID_SSE))
8126 goto illegal_op;
8127 } else {
8128 /* clflush */
8129 if (!(s->cpuid_features & CPUID_CLFLUSH))
8130 goto illegal_op;
0af10c86 8131 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8f091a59
FB
8132 }
8133 break;
664e0f19 8134 default:
14ce26e7
FB
8135 goto illegal_op;
8136 }
8137 break;
a35f3ec7 8138 case 0x10d: /* 3DNow! prefetch(w) */
0af10c86 8139 modrm = cpu_ldub_code(env, s->pc++);
a35f3ec7
AJ
8140 mod = (modrm >> 6) & 3;
8141 if (mod == 3)
8142 goto illegal_op;
0af10c86 8143 gen_lea_modrm(env, s, modrm, &reg_addr, &offset_addr);
8f091a59
FB
8144 /* ignore for now */
8145 break;
3b21e03e 8146 case 0x1aa: /* rsm */
872929aa 8147 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
3b21e03e
FB
8148 if (!(s->flags & HF_SMM_MASK))
8149 goto illegal_op;
728d803b 8150 gen_update_cc_op(s);
3b21e03e 8151 gen_jmp_im(s->pc - s->cs_base);
608badfc 8152 gen_helper_rsm(cpu_env);
3b21e03e
FB
8153 gen_eob(s);
8154 break;
222a3336
AZ
8155 case 0x1b8: /* SSE4.2 popcnt */
8156 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8157 PREFIX_REPZ)
8158 goto illegal_op;
8159 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8160 goto illegal_op;
8161
0af10c86 8162 modrm = cpu_ldub_code(env, s->pc++);
8b4a3df8 8163 reg = ((modrm >> 3) & 7) | rex_r;
222a3336
AZ
8164
8165 if (s->prefix & PREFIX_DATA)
8166 ot = OT_WORD;
8167 else if (s->dflag != 2)
8168 ot = OT_LONG;
8169 else
8170 ot = OT_QUAD;
8171
0af10c86 8172 gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
d3eb5eae 8173 gen_helper_popcnt(cpu_T[0], cpu_env, cpu_T[0], tcg_const_i32(ot));
222a3336 8174 gen_op_mov_reg_T0(ot, reg);
fdb0d09d 8175
3ca51d07 8176 set_cc_op(s, CC_OP_EFLAGS);
222a3336 8177 break;
a35f3ec7
AJ
8178 case 0x10e ... 0x10f:
8179 /* 3DNow! instructions, ignore prefixes */
8180 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
664e0f19
FB
8181 case 0x110 ... 0x117:
8182 case 0x128 ... 0x12f:
4242b1bd 8183 case 0x138 ... 0x13a:
d9f4bb27 8184 case 0x150 ... 0x179:
664e0f19
FB
8185 case 0x17c ... 0x17f:
8186 case 0x1c2:
8187 case 0x1c4 ... 0x1c6:
8188 case 0x1d0 ... 0x1fe:
0af10c86 8189 gen_sse(env, s, b, pc_start, rex_r);
664e0f19 8190 break;
2c0262af
FB
8191 default:
8192 goto illegal_op;
8193 }
8194 /* lock generation */
8195 if (s->prefix & PREFIX_LOCK)
a7812ae4 8196 gen_helper_unlock();
2c0262af
FB
8197 return s->pc;
8198 illegal_op:
ab1f142b 8199 if (s->prefix & PREFIX_LOCK)
a7812ae4 8200 gen_helper_unlock();
2c0262af
FB
8201 /* XXX: ensure that no lock was generated */
8202 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8203 return s->pc;
8204}
8205
2c0262af
FB
8206void optimize_flags_init(void)
8207{
a7812ae4
PB
8208 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
8209 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8210 offsetof(CPUX86State, cc_op), "cc_op");
317ac620 8211 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUX86State, cc_dst),
a7812ae4 8212 "cc_dst");
a3251186
RH
8213 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUX86State, cc_src),
8214 "cc_src");
988c3eb0
RH
8215 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0, offsetof(CPUX86State, cc_src2),
8216 "cc_src2");
437a88a5 8217
cc739bb0
LD
8218#ifdef TARGET_X86_64
8219 cpu_regs[R_EAX] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8220 offsetof(CPUX86State, regs[R_EAX]), "rax");
cc739bb0 8221 cpu_regs[R_ECX] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8222 offsetof(CPUX86State, regs[R_ECX]), "rcx");
cc739bb0 8223 cpu_regs[R_EDX] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8224 offsetof(CPUX86State, regs[R_EDX]), "rdx");
cc739bb0 8225 cpu_regs[R_EBX] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8226 offsetof(CPUX86State, regs[R_EBX]), "rbx");
cc739bb0 8227 cpu_regs[R_ESP] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8228 offsetof(CPUX86State, regs[R_ESP]), "rsp");
cc739bb0 8229 cpu_regs[R_EBP] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8230 offsetof(CPUX86State, regs[R_EBP]), "rbp");
cc739bb0 8231 cpu_regs[R_ESI] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8232 offsetof(CPUX86State, regs[R_ESI]), "rsi");
cc739bb0 8233 cpu_regs[R_EDI] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8234 offsetof(CPUX86State, regs[R_EDI]), "rdi");
cc739bb0 8235 cpu_regs[8] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8236 offsetof(CPUX86State, regs[8]), "r8");
cc739bb0 8237 cpu_regs[9] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8238 offsetof(CPUX86State, regs[9]), "r9");
cc739bb0 8239 cpu_regs[10] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8240 offsetof(CPUX86State, regs[10]), "r10");
cc739bb0 8241 cpu_regs[11] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8242 offsetof(CPUX86State, regs[11]), "r11");
cc739bb0 8243 cpu_regs[12] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8244 offsetof(CPUX86State, regs[12]), "r12");
cc739bb0 8245 cpu_regs[13] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8246 offsetof(CPUX86State, regs[13]), "r13");
cc739bb0 8247 cpu_regs[14] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8248 offsetof(CPUX86State, regs[14]), "r14");
cc739bb0 8249 cpu_regs[15] = tcg_global_mem_new_i64(TCG_AREG0,
317ac620 8250 offsetof(CPUX86State, regs[15]), "r15");
cc739bb0
LD
8251#else
8252 cpu_regs[R_EAX] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8253 offsetof(CPUX86State, regs[R_EAX]), "eax");
cc739bb0 8254 cpu_regs[R_ECX] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8255 offsetof(CPUX86State, regs[R_ECX]), "ecx");
cc739bb0 8256 cpu_regs[R_EDX] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8257 offsetof(CPUX86State, regs[R_EDX]), "edx");
cc739bb0 8258 cpu_regs[R_EBX] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8259 offsetof(CPUX86State, regs[R_EBX]), "ebx");
cc739bb0 8260 cpu_regs[R_ESP] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8261 offsetof(CPUX86State, regs[R_ESP]), "esp");
cc739bb0 8262 cpu_regs[R_EBP] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8263 offsetof(CPUX86State, regs[R_EBP]), "ebp");
cc739bb0 8264 cpu_regs[R_ESI] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8265 offsetof(CPUX86State, regs[R_ESI]), "esi");
cc739bb0 8266 cpu_regs[R_EDI] = tcg_global_mem_new_i32(TCG_AREG0,
317ac620 8267 offsetof(CPUX86State, regs[R_EDI]), "edi");
cc739bb0 8268#endif
2c0262af
FB
8269}
8270
8271/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8272 basic block 'tb'. If search_pc is TRUE, also generate PC
8273 information for each intermediate instruction. */
467215c2 8274static inline void gen_intermediate_code_internal(X86CPU *cpu,
2cfc5f17 8275 TranslationBlock *tb,
467215c2 8276 bool search_pc)
2c0262af 8277{
ed2803da 8278 CPUState *cs = CPU(cpu);
467215c2 8279 CPUX86State *env = &cpu->env;
2c0262af 8280 DisasContext dc1, *dc = &dc1;
14ce26e7 8281 target_ulong pc_ptr;
2c0262af 8282 uint16_t *gen_opc_end;
a1d1bb31 8283 CPUBreakpoint *bp;
7f5b7d3e 8284 int j, lj;
c068688b 8285 uint64_t flags;
14ce26e7
FB
8286 target_ulong pc_start;
8287 target_ulong cs_base;
2e70f6ef
PB
8288 int num_insns;
8289 int max_insns;
3b46e624 8290
2c0262af 8291 /* generate intermediate code */
14ce26e7
FB
8292 pc_start = tb->pc;
8293 cs_base = tb->cs_base;
2c0262af 8294 flags = tb->flags;
3a1d9b8b 8295
4f31916f 8296 dc->pe = (flags >> HF_PE_SHIFT) & 1;
2c0262af
FB
8297 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8298 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8299 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8300 dc->f_st = 0;
8301 dc->vm86 = (flags >> VM_SHIFT) & 1;
8302 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8303 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8304 dc->tf = (flags >> TF_SHIFT) & 1;
ed2803da 8305 dc->singlestep_enabled = cs->singlestep_enabled;
2c0262af 8306 dc->cc_op = CC_OP_DYNAMIC;
e207582f 8307 dc->cc_op_dirty = false;
2c0262af
FB
8308 dc->cs_base = cs_base;
8309 dc->tb = tb;
8310 dc->popl_esp_hack = 0;
8311 /* select memory access functions */
8312 dc->mem_index = 0;
8313 if (flags & HF_SOFTMMU_MASK) {
a9321a4d 8314 dc->mem_index = (cpu_mmu_index(env) + 1) << 2;
2c0262af 8315 }
0514ef2f
EH
8316 dc->cpuid_features = env->features[FEAT_1_EDX];
8317 dc->cpuid_ext_features = env->features[FEAT_1_ECX];
8318 dc->cpuid_ext2_features = env->features[FEAT_8000_0001_EDX];
8319 dc->cpuid_ext3_features = env->features[FEAT_8000_0001_ECX];
8320 dc->cpuid_7_0_ebx_features = env->features[FEAT_7_0_EBX];
14ce26e7
FB
8321#ifdef TARGET_X86_64
8322 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8323 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8324#endif
7eee2a50 8325 dc->flags = flags;
ed2803da 8326 dc->jmp_opt = !(dc->tf || cs->singlestep_enabled ||
a2cc3b24 8327 (flags & HF_INHIBIT_IRQ_MASK)
415fa2ea 8328#ifndef CONFIG_SOFTMMU
2c0262af
FB
8329 || (flags & HF_SOFTMMU_MASK)
8330#endif
8331 );
4f31916f
FB
8332#if 0
8333 /* check addseg logic */
dc196a57 8334 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
4f31916f
FB
8335 printf("ERROR addseg\n");
8336#endif
8337
a7812ae4
PB
8338 cpu_T[0] = tcg_temp_new();
8339 cpu_T[1] = tcg_temp_new();
8340 cpu_A0 = tcg_temp_new();
a7812ae4
PB
8341
8342 cpu_tmp0 = tcg_temp_new();
8343 cpu_tmp1_i64 = tcg_temp_new_i64();
8344 cpu_tmp2_i32 = tcg_temp_new_i32();
8345 cpu_tmp3_i32 = tcg_temp_new_i32();
8346 cpu_tmp4 = tcg_temp_new();
a7812ae4
PB
8347 cpu_ptr0 = tcg_temp_new_ptr();
8348 cpu_ptr1 = tcg_temp_new_ptr();
a3251186 8349 cpu_cc_srcT = tcg_temp_local_new();
57fec1fe 8350
92414b31 8351 gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
2c0262af
FB
8352
8353 dc->is_jmp = DISAS_NEXT;
8354 pc_ptr = pc_start;
8355 lj = -1;
2e70f6ef
PB
8356 num_insns = 0;
8357 max_insns = tb->cflags & CF_COUNT_MASK;
8358 if (max_insns == 0)
8359 max_insns = CF_COUNT_MASK;
2c0262af 8360
806f352d 8361 gen_tb_start();
2c0262af 8362 for(;;) {
72cf2d4f
BS
8363 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
8364 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
a2397807
JK
8365 if (bp->pc == pc_ptr &&
8366 !((bp->flags & BP_CPU) && (tb->flags & HF_RF_MASK))) {
2c0262af
FB
8367 gen_debug(dc, pc_ptr - dc->cs_base);
8368 break;
8369 }
8370 }
8371 }
8372 if (search_pc) {
92414b31 8373 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
2c0262af
FB
8374 if (lj < j) {
8375 lj++;
8376 while (lj < j)
ab1103de 8377 tcg_ctx.gen_opc_instr_start[lj++] = 0;
2c0262af 8378 }
25983cad 8379 tcg_ctx.gen_opc_pc[lj] = pc_ptr;
2c0262af 8380 gen_opc_cc_op[lj] = dc->cc_op;
ab1103de 8381 tcg_ctx.gen_opc_instr_start[lj] = 1;
c9c99c22 8382 tcg_ctx.gen_opc_icount[lj] = num_insns;
2c0262af 8383 }
2e70f6ef
PB
8384 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8385 gen_io_start();
8386
0af10c86 8387 pc_ptr = disas_insn(env, dc, pc_ptr);
2e70f6ef 8388 num_insns++;
2c0262af
FB
8389 /* stop translation if indicated */
8390 if (dc->is_jmp)
8391 break;
8392 /* if single step mode, we generate only one instruction and
8393 generate an exception */
a2cc3b24
FB
8394 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8395 the flag and abort the translation to give the irqs a
8396 change to be happen */
5fafdf24 8397 if (dc->tf || dc->singlestep_enabled ||
2e70f6ef 8398 (flags & HF_INHIBIT_IRQ_MASK)) {
14ce26e7 8399 gen_jmp_im(pc_ptr - dc->cs_base);
2c0262af
FB
8400 gen_eob(dc);
8401 break;
8402 }
8403 /* if too long translation, stop generation too */
efd7f486 8404 if (tcg_ctx.gen_opc_ptr >= gen_opc_end ||
2e70f6ef
PB
8405 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8406 num_insns >= max_insns) {
14ce26e7 8407 gen_jmp_im(pc_ptr - dc->cs_base);
2c0262af
FB
8408 gen_eob(dc);
8409 break;
8410 }
1b530a6d
AJ
8411 if (singlestep) {
8412 gen_jmp_im(pc_ptr - dc->cs_base);
8413 gen_eob(dc);
8414 break;
8415 }
2c0262af 8416 }
2e70f6ef
PB
8417 if (tb->cflags & CF_LAST_IO)
8418 gen_io_end();
806f352d 8419 gen_tb_end(tb, num_insns);
efd7f486 8420 *tcg_ctx.gen_opc_ptr = INDEX_op_end;
2c0262af
FB
8421 /* we don't forget to fill the last values */
8422 if (search_pc) {
92414b31 8423 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
2c0262af
FB
8424 lj++;
8425 while (lj <= j)
ab1103de 8426 tcg_ctx.gen_opc_instr_start[lj++] = 0;
2c0262af 8427 }
3b46e624 8428
2c0262af 8429#ifdef DEBUG_DISAS
8fec2b8c 8430 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
14ce26e7 8431 int disas_flags;
93fcfe39
AL
8432 qemu_log("----------------\n");
8433 qemu_log("IN: %s\n", lookup_symbol(pc_start));
14ce26e7
FB
8434#ifdef TARGET_X86_64
8435 if (dc->code64)
8436 disas_flags = 2;
8437 else
8438#endif
8439 disas_flags = !dc->code32;
f4359b9f 8440 log_target_disas(env, pc_start, pc_ptr - pc_start, disas_flags);
93fcfe39 8441 qemu_log("\n");
2c0262af
FB
8442 }
8443#endif
8444
2e70f6ef 8445 if (!search_pc) {
2c0262af 8446 tb->size = pc_ptr - pc_start;
2e70f6ef
PB
8447 tb->icount = num_insns;
8448 }
2c0262af
FB
8449}
8450
317ac620 8451void gen_intermediate_code(CPUX86State *env, TranslationBlock *tb)
2c0262af 8452{
467215c2 8453 gen_intermediate_code_internal(x86_env_get_cpu(env), tb, false);
2c0262af
FB
8454}
8455
317ac620 8456void gen_intermediate_code_pc(CPUX86State *env, TranslationBlock *tb)
2c0262af 8457{
467215c2 8458 gen_intermediate_code_internal(x86_env_get_cpu(env), tb, true);
2c0262af
FB
8459}
8460
317ac620 8461void restore_state_to_opc(CPUX86State *env, TranslationBlock *tb, int pc_pos)
d2856f1a
AJ
8462{
8463 int cc_op;
8464#ifdef DEBUG_DISAS
8fec2b8c 8465 if (qemu_loglevel_mask(CPU_LOG_TB_OP)) {
d2856f1a 8466 int i;
93fcfe39 8467 qemu_log("RESTORE:\n");
d2856f1a 8468 for(i = 0;i <= pc_pos; i++) {
ab1103de 8469 if (tcg_ctx.gen_opc_instr_start[i]) {
25983cad
EV
8470 qemu_log("0x%04x: " TARGET_FMT_lx "\n", i,
8471 tcg_ctx.gen_opc_pc[i]);
d2856f1a
AJ
8472 }
8473 }
e87b7cb0 8474 qemu_log("pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
25983cad 8475 pc_pos, tcg_ctx.gen_opc_pc[pc_pos] - tb->cs_base,
d2856f1a
AJ
8476 (uint32_t)tb->cs_base);
8477 }
8478#endif
25983cad 8479 env->eip = tcg_ctx.gen_opc_pc[pc_pos] - tb->cs_base;
d2856f1a
AJ
8480 cc_op = gen_opc_cc_op[pc_pos];
8481 if (cc_op != CC_OP_DYNAMIC)
8482 env->cc_op = cc_op;
8483}