]> git.proxmox.com Git - qemu.git/blob - target-i386/translate.c
converted SSE/MMX ops to TCG
[qemu.git] / target-i386 / translate.c
1 /*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25 #include <signal.h>
26 #include <assert.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
33
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
39
40 #ifdef TARGET_X86_64
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47 #if 1
48 #define BUGGY_64(x) NULL
49 #endif
50 #else
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
53 #define CODE64(s) 0
54 #define REX_X(s) 0
55 #define REX_B(s) 0
56 #endif
57
58 //#define MACRO_TEST 1
59
60 /* global register indexes */
61 static TCGv cpu_env, cpu_T[2], cpu_A0;
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0, cpu_tmp1, cpu_tmp2, cpu_ptr0, cpu_ptr1;
64
65 #ifdef TARGET_X86_64
66 static int x86_64_hregs;
67 #endif
68
69 typedef struct DisasContext {
70 /* current insn context */
71 int override; /* -1 if no override */
72 int prefix;
73 int aflag, dflag;
74 target_ulong pc; /* pc = eip + cs_base */
75 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
76 static state change (stop translation) */
77 /* current block context */
78 target_ulong cs_base; /* base of CS segment */
79 int pe; /* protected mode */
80 int code32; /* 32 bit code segment */
81 #ifdef TARGET_X86_64
82 int lma; /* long mode active */
83 int code64; /* 64 bit code segment */
84 int rex_x, rex_b;
85 #endif
86 int ss32; /* 32 bit stack segment */
87 int cc_op; /* current CC operation */
88 int addseg; /* non zero if either DS/ES/SS have a non zero base */
89 int f_st; /* currently unused */
90 int vm86; /* vm86 mode */
91 int cpl;
92 int iopl;
93 int tf; /* TF cpu flag */
94 int singlestep_enabled; /* "hardware" single step enabled */
95 int jmp_opt; /* use direct block chaining for direct jumps */
96 int mem_index; /* select memory access functions */
97 uint64_t flags; /* all execution flags */
98 struct TranslationBlock *tb;
99 int popl_esp_hack; /* for correct popl with esp base handling */
100 int rip_offset; /* only used in x86_64, but left for simplicity */
101 int cpuid_features;
102 int cpuid_ext_features;
103 int cpuid_ext2_features;
104 } DisasContext;
105
106 static void gen_eob(DisasContext *s);
107 static void gen_jmp(DisasContext *s, target_ulong eip);
108 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
109
110 /* i386 arith/logic operations */
111 enum {
112 OP_ADDL,
113 OP_ORL,
114 OP_ADCL,
115 OP_SBBL,
116 OP_ANDL,
117 OP_SUBL,
118 OP_XORL,
119 OP_CMPL,
120 };
121
122 /* i386 shift ops */
123 enum {
124 OP_ROL,
125 OP_ROR,
126 OP_RCL,
127 OP_RCR,
128 OP_SHL,
129 OP_SHR,
130 OP_SHL1, /* undocumented */
131 OP_SAR = 7,
132 };
133
134 /* operand size */
135 enum {
136 OT_BYTE = 0,
137 OT_WORD,
138 OT_LONG,
139 OT_QUAD,
140 };
141
142 enum {
143 /* I386 int registers */
144 OR_EAX, /* MUST be even numbered */
145 OR_ECX,
146 OR_EDX,
147 OR_EBX,
148 OR_ESP,
149 OR_EBP,
150 OR_ESI,
151 OR_EDI,
152
153 OR_TMP0 = 16, /* temporary operand register */
154 OR_TMP1,
155 OR_A0, /* temporary register used when doing address evaluation */
156 };
157
158 static inline void gen_op_movl_T0_0(void)
159 {
160 tcg_gen_movi_tl(cpu_T[0], 0);
161 }
162
163 static inline void gen_op_movl_T0_im(int32_t val)
164 {
165 tcg_gen_movi_tl(cpu_T[0], val);
166 }
167
168 static inline void gen_op_movl_T0_imu(uint32_t val)
169 {
170 tcg_gen_movi_tl(cpu_T[0], val);
171 }
172
173 static inline void gen_op_movl_T1_im(int32_t val)
174 {
175 tcg_gen_movi_tl(cpu_T[1], val);
176 }
177
178 static inline void gen_op_movl_T1_imu(uint32_t val)
179 {
180 tcg_gen_movi_tl(cpu_T[1], val);
181 }
182
183 static inline void gen_op_movl_A0_im(uint32_t val)
184 {
185 tcg_gen_movi_tl(cpu_A0, val);
186 }
187
188 #ifdef TARGET_X86_64
189 static inline void gen_op_movq_A0_im(int64_t val)
190 {
191 tcg_gen_movi_tl(cpu_A0, val);
192 }
193 #endif
194
195 static inline void gen_movtl_T0_im(target_ulong val)
196 {
197 tcg_gen_movi_tl(cpu_T[0], val);
198 }
199
200 static inline void gen_movtl_T1_im(target_ulong val)
201 {
202 tcg_gen_movi_tl(cpu_T[1], val);
203 }
204
205 static inline void gen_op_andl_T0_ffff(void)
206 {
207 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
208 }
209
210 static inline void gen_op_andl_T0_im(uint32_t val)
211 {
212 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
213 }
214
215 static inline void gen_op_movl_T0_T1(void)
216 {
217 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
218 }
219
220 static inline void gen_op_andl_A0_ffff(void)
221 {
222 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
223 }
224
225 #ifdef TARGET_X86_64
226
227 #define NB_OP_SIZES 4
228
229 #define DEF_REGS(prefix, suffix) \
230 prefix ## EAX ## suffix,\
231 prefix ## ECX ## suffix,\
232 prefix ## EDX ## suffix,\
233 prefix ## EBX ## suffix,\
234 prefix ## ESP ## suffix,\
235 prefix ## EBP ## suffix,\
236 prefix ## ESI ## suffix,\
237 prefix ## EDI ## suffix,\
238 prefix ## R8 ## suffix,\
239 prefix ## R9 ## suffix,\
240 prefix ## R10 ## suffix,\
241 prefix ## R11 ## suffix,\
242 prefix ## R12 ## suffix,\
243 prefix ## R13 ## suffix,\
244 prefix ## R14 ## suffix,\
245 prefix ## R15 ## suffix,
246
247 #else /* !TARGET_X86_64 */
248
249 #define NB_OP_SIZES 3
250
251 #define DEF_REGS(prefix, suffix) \
252 prefix ## EAX ## suffix,\
253 prefix ## ECX ## suffix,\
254 prefix ## EDX ## suffix,\
255 prefix ## EBX ## suffix,\
256 prefix ## ESP ## suffix,\
257 prefix ## EBP ## suffix,\
258 prefix ## ESI ## suffix,\
259 prefix ## EDI ## suffix,
260
261 #endif /* !TARGET_X86_64 */
262
263 #if defined(WORDS_BIGENDIAN)
264 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
265 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
266 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
267 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
268 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
269 #else
270 #define REG_B_OFFSET 0
271 #define REG_H_OFFSET 1
272 #define REG_W_OFFSET 0
273 #define REG_L_OFFSET 0
274 #define REG_LH_OFFSET 4
275 #endif
276
277 static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
278 {
279 switch(ot) {
280 case OT_BYTE:
281 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
282 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
283 } else {
284 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
285 }
286 break;
287 case OT_WORD:
288 tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
289 break;
290 #ifdef TARGET_X86_64
291 case OT_LONG:
292 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
293 /* high part of register set to zero */
294 tcg_gen_movi_tl(cpu_tmp0, 0);
295 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
296 break;
297 default:
298 case OT_QUAD:
299 tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
300 break;
301 #else
302 default:
303 case OT_LONG:
304 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
305 break;
306 #endif
307 }
308 }
309
310 static inline void gen_op_mov_reg_T0(int ot, int reg)
311 {
312 gen_op_mov_reg_TN(ot, 0, reg);
313 }
314
315 static inline void gen_op_mov_reg_T1(int ot, int reg)
316 {
317 gen_op_mov_reg_TN(ot, 1, reg);
318 }
319
320 static inline void gen_op_mov_reg_A0(int size, int reg)
321 {
322 switch(size) {
323 case 0:
324 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
325 break;
326 #ifdef TARGET_X86_64
327 case 1:
328 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
329 /* high part of register set to zero */
330 tcg_gen_movi_tl(cpu_tmp0, 0);
331 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
332 break;
333 default:
334 case 2:
335 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
336 break;
337 #else
338 default:
339 case 1:
340 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
341 break;
342 #endif
343 }
344 }
345
346 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
347 {
348 switch(ot) {
349 case OT_BYTE:
350 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
351 goto std_case;
352 } else {
353 tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
354 }
355 break;
356 default:
357 std_case:
358 tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
359 break;
360 }
361 }
362
363 static inline void gen_op_movl_A0_reg(int reg)
364 {
365 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
366 }
367
368 static inline void gen_op_addl_A0_im(int32_t val)
369 {
370 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
371 #ifdef TARGET_X86_64
372 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
373 #endif
374 }
375
376 #ifdef TARGET_X86_64
377 static inline void gen_op_addq_A0_im(int64_t val)
378 {
379 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
380 }
381 #endif
382
383 static void gen_add_A0_im(DisasContext *s, int val)
384 {
385 #ifdef TARGET_X86_64
386 if (CODE64(s))
387 gen_op_addq_A0_im(val);
388 else
389 #endif
390 gen_op_addl_A0_im(val);
391 }
392
393 static inline void gen_op_addl_T0_T1(void)
394 {
395 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
396 }
397
398 static inline void gen_op_jmp_T0(void)
399 {
400 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
401 }
402
403 static inline void gen_op_addw_ESP_im(int32_t val)
404 {
405 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
406 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
407 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
408 }
409
410 static inline void gen_op_addl_ESP_im(int32_t val)
411 {
412 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
413 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
414 #ifdef TARGET_X86_64
415 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
416 #endif
417 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
418 }
419
420 #ifdef TARGET_X86_64
421 static inline void gen_op_addq_ESP_im(int32_t val)
422 {
423 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
424 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
426 }
427 #endif
428
429 static inline void gen_op_set_cc_op(int32_t val)
430 {
431 tcg_gen_movi_tl(cpu_tmp0, val);
432 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, cc_op));
433 }
434
435 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
436 {
437 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
438 if (shift != 0)
439 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
440 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
441 #ifdef TARGET_X86_64
442 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
443 #endif
444 }
445
446 static inline void gen_op_movl_A0_seg(int reg)
447 {
448 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
449 }
450
451 static inline void gen_op_addl_A0_seg(int reg)
452 {
453 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
454 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
455 #ifdef TARGET_X86_64
456 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
457 #endif
458 }
459
460 #ifdef TARGET_X86_64
461 static inline void gen_op_movq_A0_seg(int reg)
462 {
463 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
464 }
465
466 static inline void gen_op_addq_A0_seg(int reg)
467 {
468 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
469 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
470 }
471
472 static inline void gen_op_movq_A0_reg(int reg)
473 {
474 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
475 }
476
477 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
478 {
479 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
480 if (shift != 0)
481 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
482 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
483 }
484 #endif
485
486 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
487 [0] = {
488 DEF_REGS(gen_op_cmovw_, _T1_T0)
489 },
490 [1] = {
491 DEF_REGS(gen_op_cmovl_, _T1_T0)
492 },
493 #ifdef TARGET_X86_64
494 [2] = {
495 DEF_REGS(gen_op_cmovq_, _T1_T0)
496 },
497 #endif
498 };
499
500 #define DEF_ARITHC(SUFFIX)\
501 {\
502 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
503 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
504 },\
505 {\
506 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
507 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
508 },\
509 {\
510 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
511 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
512 },\
513 {\
514 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
515 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
516 },
517
518 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
519 DEF_ARITHC( )
520 };
521
522 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
523 DEF_ARITHC(_raw)
524 #ifndef CONFIG_USER_ONLY
525 DEF_ARITHC(_kernel)
526 DEF_ARITHC(_user)
527 #endif
528 };
529
530 static const int cc_op_arithb[8] = {
531 CC_OP_ADDB,
532 CC_OP_LOGICB,
533 CC_OP_ADDB,
534 CC_OP_SUBB,
535 CC_OP_LOGICB,
536 CC_OP_SUBB,
537 CC_OP_LOGICB,
538 CC_OP_SUBB,
539 };
540
541 #define DEF_CMPXCHG(SUFFIX)\
542 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
543 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
544 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
545 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
546
547 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
548 DEF_CMPXCHG( )
549 };
550
551 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
552 DEF_CMPXCHG(_raw)
553 #ifndef CONFIG_USER_ONLY
554 DEF_CMPXCHG(_kernel)
555 DEF_CMPXCHG(_user)
556 #endif
557 };
558
559 #define DEF_SHIFT(SUFFIX)\
560 {\
561 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
562 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
563 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
565 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
566 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
568 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
569 },\
570 {\
571 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
572 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
573 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
574 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
575 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
576 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
577 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
578 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
579 },\
580 {\
581 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
582 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
583 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
584 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
585 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
586 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
587 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
588 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
589 },\
590 {\
591 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
592 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
593 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
594 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
595 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
596 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
597 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
598 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
599 },
600
601 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
602 DEF_SHIFT( )
603 };
604
605 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
606 DEF_SHIFT(_raw)
607 #ifndef CONFIG_USER_ONLY
608 DEF_SHIFT(_kernel)
609 DEF_SHIFT(_user)
610 #endif
611 };
612
613 #define DEF_SHIFTD(SUFFIX, op)\
614 {\
615 NULL,\
616 NULL,\
617 },\
618 {\
619 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
620 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
621 },\
622 {\
623 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
624 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
625 },\
626 {\
627 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
628 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
629 },
630
631 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
632 DEF_SHIFTD(, im)
633 };
634
635 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
636 DEF_SHIFTD(, ECX)
637 };
638
639 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
640 DEF_SHIFTD(_raw, im)
641 #ifndef CONFIG_USER_ONLY
642 DEF_SHIFTD(_kernel, im)
643 DEF_SHIFTD(_user, im)
644 #endif
645 };
646
647 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
648 DEF_SHIFTD(_raw, ECX)
649 #ifndef CONFIG_USER_ONLY
650 DEF_SHIFTD(_kernel, ECX)
651 DEF_SHIFTD(_user, ECX)
652 #endif
653 };
654
655 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
656 [0] = {
657 gen_op_btw_T0_T1_cc,
658 gen_op_btsw_T0_T1_cc,
659 gen_op_btrw_T0_T1_cc,
660 gen_op_btcw_T0_T1_cc,
661 },
662 [1] = {
663 gen_op_btl_T0_T1_cc,
664 gen_op_btsl_T0_T1_cc,
665 gen_op_btrl_T0_T1_cc,
666 gen_op_btcl_T0_T1_cc,
667 },
668 #ifdef TARGET_X86_64
669 [2] = {
670 gen_op_btq_T0_T1_cc,
671 gen_op_btsq_T0_T1_cc,
672 gen_op_btrq_T0_T1_cc,
673 gen_op_btcq_T0_T1_cc,
674 },
675 #endif
676 };
677
678 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
679 gen_op_add_bitw_A0_T1,
680 gen_op_add_bitl_A0_T1,
681 X86_64_ONLY(gen_op_add_bitq_A0_T1),
682 };
683
684 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
685 [0] = {
686 gen_op_bsfw_T0_cc,
687 gen_op_bsrw_T0_cc,
688 },
689 [1] = {
690 gen_op_bsfl_T0_cc,
691 gen_op_bsrl_T0_cc,
692 },
693 #ifdef TARGET_X86_64
694 [2] = {
695 gen_op_bsfq_T0_cc,
696 gen_op_bsrq_T0_cc,
697 },
698 #endif
699 };
700
701 static inline void gen_op_lds_T0_A0(int idx)
702 {
703 int mem_index = (idx >> 2) - 1;
704 switch(idx & 3) {
705 case 0:
706 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
707 break;
708 case 1:
709 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
710 break;
711 default:
712 case 2:
713 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
714 break;
715 }
716 }
717
718 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719 static inline void gen_op_ld_T0_A0(int idx)
720 {
721 int mem_index = (idx >> 2) - 1;
722 switch(idx & 3) {
723 case 0:
724 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
725 break;
726 case 1:
727 tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
728 break;
729 case 2:
730 tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
731 break;
732 default:
733 case 3:
734 tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
735 break;
736 }
737 }
738
739 static inline void gen_op_ldu_T0_A0(int idx)
740 {
741 gen_op_ld_T0_A0(idx);
742 }
743
744 static inline void gen_op_ld_T1_A0(int idx)
745 {
746 int mem_index = (idx >> 2) - 1;
747 switch(idx & 3) {
748 case 0:
749 tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
750 break;
751 case 1:
752 tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
753 break;
754 case 2:
755 tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
756 break;
757 default:
758 case 3:
759 tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
760 break;
761 }
762 }
763
764 static inline void gen_op_st_T0_A0(int idx)
765 {
766 int mem_index = (idx >> 2) - 1;
767 switch(idx & 3) {
768 case 0:
769 tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
770 break;
771 case 1:
772 tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
773 break;
774 case 2:
775 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
776 break;
777 default:
778 case 3:
779 tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
780 break;
781 }
782 }
783
784 static inline void gen_op_st_T1_A0(int idx)
785 {
786 int mem_index = (idx >> 2) - 1;
787 switch(idx & 3) {
788 case 0:
789 tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
790 break;
791 case 1:
792 tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
793 break;
794 case 2:
795 tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
796 break;
797 default:
798 case 3:
799 tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
800 break;
801 }
802 }
803
804 static inline void gen_jmp_im(target_ulong pc)
805 {
806 tcg_gen_movi_tl(cpu_tmp0, pc);
807 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
808 }
809
810 static inline void gen_string_movl_A0_ESI(DisasContext *s)
811 {
812 int override;
813
814 override = s->override;
815 #ifdef TARGET_X86_64
816 if (s->aflag == 2) {
817 if (override >= 0) {
818 gen_op_movq_A0_seg(override);
819 gen_op_addq_A0_reg_sN(0, R_ESI);
820 } else {
821 gen_op_movq_A0_reg(R_ESI);
822 }
823 } else
824 #endif
825 if (s->aflag) {
826 /* 32 bit address */
827 if (s->addseg && override < 0)
828 override = R_DS;
829 if (override >= 0) {
830 gen_op_movl_A0_seg(override);
831 gen_op_addl_A0_reg_sN(0, R_ESI);
832 } else {
833 gen_op_movl_A0_reg(R_ESI);
834 }
835 } else {
836 /* 16 address, always override */
837 if (override < 0)
838 override = R_DS;
839 gen_op_movl_A0_reg(R_ESI);
840 gen_op_andl_A0_ffff();
841 gen_op_addl_A0_seg(override);
842 }
843 }
844
845 static inline void gen_string_movl_A0_EDI(DisasContext *s)
846 {
847 #ifdef TARGET_X86_64
848 if (s->aflag == 2) {
849 gen_op_movq_A0_reg(R_EDI);
850 } else
851 #endif
852 if (s->aflag) {
853 if (s->addseg) {
854 gen_op_movl_A0_seg(R_ES);
855 gen_op_addl_A0_reg_sN(0, R_EDI);
856 } else {
857 gen_op_movl_A0_reg(R_EDI);
858 }
859 } else {
860 gen_op_movl_A0_reg(R_EDI);
861 gen_op_andl_A0_ffff();
862 gen_op_addl_A0_seg(R_ES);
863 }
864 }
865
866 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
867 gen_op_movl_T0_Dshiftb,
868 gen_op_movl_T0_Dshiftw,
869 gen_op_movl_T0_Dshiftl,
870 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
871 };
872
873 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
874 gen_op_jnz_ecxw,
875 gen_op_jnz_ecxl,
876 X86_64_ONLY(gen_op_jnz_ecxq),
877 };
878
879 static GenOpFunc1 *gen_op_jz_ecx[3] = {
880 gen_op_jz_ecxw,
881 gen_op_jz_ecxl,
882 X86_64_ONLY(gen_op_jz_ecxq),
883 };
884
885 static GenOpFunc *gen_op_dec_ECX[3] = {
886 gen_op_decw_ECX,
887 gen_op_decl_ECX,
888 X86_64_ONLY(gen_op_decq_ECX),
889 };
890
891 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
892 {
893 gen_op_jnz_subb,
894 gen_op_jnz_subw,
895 gen_op_jnz_subl,
896 X86_64_ONLY(gen_op_jnz_subq),
897 },
898 {
899 gen_op_jz_subb,
900 gen_op_jz_subw,
901 gen_op_jz_subl,
902 X86_64_ONLY(gen_op_jz_subq),
903 },
904 };
905
906 static GenOpFunc *gen_op_in_DX_T0[3] = {
907 gen_op_inb_DX_T0,
908 gen_op_inw_DX_T0,
909 gen_op_inl_DX_T0,
910 };
911
912 static GenOpFunc *gen_op_out_DX_T0[3] = {
913 gen_op_outb_DX_T0,
914 gen_op_outw_DX_T0,
915 gen_op_outl_DX_T0,
916 };
917
918 static GenOpFunc *gen_op_in[3] = {
919 gen_op_inb_T0_T1,
920 gen_op_inw_T0_T1,
921 gen_op_inl_T0_T1,
922 };
923
924 static GenOpFunc *gen_op_out[3] = {
925 gen_op_outb_T0_T1,
926 gen_op_outw_T0_T1,
927 gen_op_outl_T0_T1,
928 };
929
930 static GenOpFunc *gen_check_io_T0[3] = {
931 gen_op_check_iob_T0,
932 gen_op_check_iow_T0,
933 gen_op_check_iol_T0,
934 };
935
936 static GenOpFunc *gen_check_io_DX[3] = {
937 gen_op_check_iob_DX,
938 gen_op_check_iow_DX,
939 gen_op_check_iol_DX,
940 };
941
942 static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
943 {
944 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
945 if (s->cc_op != CC_OP_DYNAMIC)
946 gen_op_set_cc_op(s->cc_op);
947 gen_jmp_im(cur_eip);
948 if (use_dx)
949 gen_check_io_DX[ot]();
950 else
951 gen_check_io_T0[ot]();
952 }
953 }
954
955 static inline void gen_movs(DisasContext *s, int ot)
956 {
957 gen_string_movl_A0_ESI(s);
958 gen_op_ld_T0_A0(ot + s->mem_index);
959 gen_string_movl_A0_EDI(s);
960 gen_op_st_T0_A0(ot + s->mem_index);
961 gen_op_movl_T0_Dshift[ot]();
962 #ifdef TARGET_X86_64
963 if (s->aflag == 2) {
964 gen_op_addq_ESI_T0();
965 gen_op_addq_EDI_T0();
966 } else
967 #endif
968 if (s->aflag) {
969 gen_op_addl_ESI_T0();
970 gen_op_addl_EDI_T0();
971 } else {
972 gen_op_addw_ESI_T0();
973 gen_op_addw_EDI_T0();
974 }
975 }
976
977 static inline void gen_update_cc_op(DisasContext *s)
978 {
979 if (s->cc_op != CC_OP_DYNAMIC) {
980 gen_op_set_cc_op(s->cc_op);
981 s->cc_op = CC_OP_DYNAMIC;
982 }
983 }
984
985 /* XXX: does not work with gdbstub "ice" single step - not a
986 serious problem */
987 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
988 {
989 int l1, l2;
990
991 l1 = gen_new_label();
992 l2 = gen_new_label();
993 gen_op_jnz_ecx[s->aflag](l1);
994 gen_set_label(l2);
995 gen_jmp_tb(s, next_eip, 1);
996 gen_set_label(l1);
997 return l2;
998 }
999
1000 static inline void gen_stos(DisasContext *s, int ot)
1001 {
1002 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1003 gen_string_movl_A0_EDI(s);
1004 gen_op_st_T0_A0(ot + s->mem_index);
1005 gen_op_movl_T0_Dshift[ot]();
1006 #ifdef TARGET_X86_64
1007 if (s->aflag == 2) {
1008 gen_op_addq_EDI_T0();
1009 } else
1010 #endif
1011 if (s->aflag) {
1012 gen_op_addl_EDI_T0();
1013 } else {
1014 gen_op_addw_EDI_T0();
1015 }
1016 }
1017
1018 static inline void gen_lods(DisasContext *s, int ot)
1019 {
1020 gen_string_movl_A0_ESI(s);
1021 gen_op_ld_T0_A0(ot + s->mem_index);
1022 gen_op_mov_reg_T0(ot, R_EAX);
1023 gen_op_movl_T0_Dshift[ot]();
1024 #ifdef TARGET_X86_64
1025 if (s->aflag == 2) {
1026 gen_op_addq_ESI_T0();
1027 } else
1028 #endif
1029 if (s->aflag) {
1030 gen_op_addl_ESI_T0();
1031 } else {
1032 gen_op_addw_ESI_T0();
1033 }
1034 }
1035
1036 static inline void gen_scas(DisasContext *s, int ot)
1037 {
1038 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1039 gen_string_movl_A0_EDI(s);
1040 gen_op_ld_T1_A0(ot + s->mem_index);
1041 gen_op_cmpl_T0_T1_cc();
1042 gen_op_movl_T0_Dshift[ot]();
1043 #ifdef TARGET_X86_64
1044 if (s->aflag == 2) {
1045 gen_op_addq_EDI_T0();
1046 } else
1047 #endif
1048 if (s->aflag) {
1049 gen_op_addl_EDI_T0();
1050 } else {
1051 gen_op_addw_EDI_T0();
1052 }
1053 }
1054
1055 static inline void gen_cmps(DisasContext *s, int ot)
1056 {
1057 gen_string_movl_A0_ESI(s);
1058 gen_op_ld_T0_A0(ot + s->mem_index);
1059 gen_string_movl_A0_EDI(s);
1060 gen_op_ld_T1_A0(ot + s->mem_index);
1061 gen_op_cmpl_T0_T1_cc();
1062 gen_op_movl_T0_Dshift[ot]();
1063 #ifdef TARGET_X86_64
1064 if (s->aflag == 2) {
1065 gen_op_addq_ESI_T0();
1066 gen_op_addq_EDI_T0();
1067 } else
1068 #endif
1069 if (s->aflag) {
1070 gen_op_addl_ESI_T0();
1071 gen_op_addl_EDI_T0();
1072 } else {
1073 gen_op_addw_ESI_T0();
1074 gen_op_addw_EDI_T0();
1075 }
1076 }
1077
1078 static inline void gen_ins(DisasContext *s, int ot)
1079 {
1080 gen_string_movl_A0_EDI(s);
1081 gen_op_movl_T0_0();
1082 gen_op_st_T0_A0(ot + s->mem_index);
1083 gen_op_in_DX_T0[ot]();
1084 gen_op_st_T0_A0(ot + s->mem_index);
1085 gen_op_movl_T0_Dshift[ot]();
1086 #ifdef TARGET_X86_64
1087 if (s->aflag == 2) {
1088 gen_op_addq_EDI_T0();
1089 } else
1090 #endif
1091 if (s->aflag) {
1092 gen_op_addl_EDI_T0();
1093 } else {
1094 gen_op_addw_EDI_T0();
1095 }
1096 }
1097
1098 static inline void gen_outs(DisasContext *s, int ot)
1099 {
1100 gen_string_movl_A0_ESI(s);
1101 gen_op_ld_T0_A0(ot + s->mem_index);
1102 gen_op_out_DX_T0[ot]();
1103 gen_op_movl_T0_Dshift[ot]();
1104 #ifdef TARGET_X86_64
1105 if (s->aflag == 2) {
1106 gen_op_addq_ESI_T0();
1107 } else
1108 #endif
1109 if (s->aflag) {
1110 gen_op_addl_ESI_T0();
1111 } else {
1112 gen_op_addw_ESI_T0();
1113 }
1114 }
1115
1116 /* same method as Valgrind : we generate jumps to current or next
1117 instruction */
1118 #define GEN_REPZ(op) \
1119 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1120 target_ulong cur_eip, target_ulong next_eip) \
1121 { \
1122 int l2;\
1123 gen_update_cc_op(s); \
1124 l2 = gen_jz_ecx_string(s, next_eip); \
1125 gen_ ## op(s, ot); \
1126 gen_op_dec_ECX[s->aflag](); \
1127 /* a loop would cause two single step exceptions if ECX = 1 \
1128 before rep string_insn */ \
1129 if (!s->jmp_opt) \
1130 gen_op_jz_ecx[s->aflag](l2); \
1131 gen_jmp(s, cur_eip); \
1132 }
1133
1134 #define GEN_REPZ2(op) \
1135 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1136 target_ulong cur_eip, \
1137 target_ulong next_eip, \
1138 int nz) \
1139 { \
1140 int l2;\
1141 gen_update_cc_op(s); \
1142 l2 = gen_jz_ecx_string(s, next_eip); \
1143 gen_ ## op(s, ot); \
1144 gen_op_dec_ECX[s->aflag](); \
1145 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1146 gen_op_string_jnz_sub[nz][ot](l2);\
1147 if (!s->jmp_opt) \
1148 gen_op_jz_ecx[s->aflag](l2); \
1149 gen_jmp(s, cur_eip); \
1150 }
1151
1152 GEN_REPZ(movs)
1153 GEN_REPZ(stos)
1154 GEN_REPZ(lods)
1155 GEN_REPZ(ins)
1156 GEN_REPZ(outs)
1157 GEN_REPZ2(scas)
1158 GEN_REPZ2(cmps)
1159
1160 enum {
1161 JCC_O,
1162 JCC_B,
1163 JCC_Z,
1164 JCC_BE,
1165 JCC_S,
1166 JCC_P,
1167 JCC_L,
1168 JCC_LE,
1169 };
1170
1171 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1172 [OT_BYTE] = {
1173 NULL,
1174 gen_op_jb_subb,
1175 gen_op_jz_subb,
1176 gen_op_jbe_subb,
1177 gen_op_js_subb,
1178 NULL,
1179 gen_op_jl_subb,
1180 gen_op_jle_subb,
1181 },
1182 [OT_WORD] = {
1183 NULL,
1184 gen_op_jb_subw,
1185 gen_op_jz_subw,
1186 gen_op_jbe_subw,
1187 gen_op_js_subw,
1188 NULL,
1189 gen_op_jl_subw,
1190 gen_op_jle_subw,
1191 },
1192 [OT_LONG] = {
1193 NULL,
1194 gen_op_jb_subl,
1195 gen_op_jz_subl,
1196 gen_op_jbe_subl,
1197 gen_op_js_subl,
1198 NULL,
1199 gen_op_jl_subl,
1200 gen_op_jle_subl,
1201 },
1202 #ifdef TARGET_X86_64
1203 [OT_QUAD] = {
1204 NULL,
1205 BUGGY_64(gen_op_jb_subq),
1206 gen_op_jz_subq,
1207 BUGGY_64(gen_op_jbe_subq),
1208 gen_op_js_subq,
1209 NULL,
1210 BUGGY_64(gen_op_jl_subq),
1211 BUGGY_64(gen_op_jle_subq),
1212 },
1213 #endif
1214 };
1215 static GenOpFunc1 *gen_op_loop[3][4] = {
1216 [0] = {
1217 gen_op_loopnzw,
1218 gen_op_loopzw,
1219 gen_op_jnz_ecxw,
1220 },
1221 [1] = {
1222 gen_op_loopnzl,
1223 gen_op_loopzl,
1224 gen_op_jnz_ecxl,
1225 },
1226 #ifdef TARGET_X86_64
1227 [2] = {
1228 gen_op_loopnzq,
1229 gen_op_loopzq,
1230 gen_op_jnz_ecxq,
1231 },
1232 #endif
1233 };
1234
1235 static GenOpFunc *gen_setcc_slow[8] = {
1236 gen_op_seto_T0_cc,
1237 gen_op_setb_T0_cc,
1238 gen_op_setz_T0_cc,
1239 gen_op_setbe_T0_cc,
1240 gen_op_sets_T0_cc,
1241 gen_op_setp_T0_cc,
1242 gen_op_setl_T0_cc,
1243 gen_op_setle_T0_cc,
1244 };
1245
1246 static GenOpFunc *gen_setcc_sub[4][8] = {
1247 [OT_BYTE] = {
1248 NULL,
1249 gen_op_setb_T0_subb,
1250 gen_op_setz_T0_subb,
1251 gen_op_setbe_T0_subb,
1252 gen_op_sets_T0_subb,
1253 NULL,
1254 gen_op_setl_T0_subb,
1255 gen_op_setle_T0_subb,
1256 },
1257 [OT_WORD] = {
1258 NULL,
1259 gen_op_setb_T0_subw,
1260 gen_op_setz_T0_subw,
1261 gen_op_setbe_T0_subw,
1262 gen_op_sets_T0_subw,
1263 NULL,
1264 gen_op_setl_T0_subw,
1265 gen_op_setle_T0_subw,
1266 },
1267 [OT_LONG] = {
1268 NULL,
1269 gen_op_setb_T0_subl,
1270 gen_op_setz_T0_subl,
1271 gen_op_setbe_T0_subl,
1272 gen_op_sets_T0_subl,
1273 NULL,
1274 gen_op_setl_T0_subl,
1275 gen_op_setle_T0_subl,
1276 },
1277 #ifdef TARGET_X86_64
1278 [OT_QUAD] = {
1279 NULL,
1280 gen_op_setb_T0_subq,
1281 gen_op_setz_T0_subq,
1282 gen_op_setbe_T0_subq,
1283 gen_op_sets_T0_subq,
1284 NULL,
1285 gen_op_setl_T0_subq,
1286 gen_op_setle_T0_subq,
1287 },
1288 #endif
1289 };
1290
1291 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1292 gen_op_fadd_ST0_FT0,
1293 gen_op_fmul_ST0_FT0,
1294 gen_op_fcom_ST0_FT0,
1295 gen_op_fcom_ST0_FT0,
1296 gen_op_fsub_ST0_FT0,
1297 gen_op_fsubr_ST0_FT0,
1298 gen_op_fdiv_ST0_FT0,
1299 gen_op_fdivr_ST0_FT0,
1300 };
1301
1302 /* NOTE the exception in "r" op ordering */
1303 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1304 gen_op_fadd_STN_ST0,
1305 gen_op_fmul_STN_ST0,
1306 NULL,
1307 NULL,
1308 gen_op_fsubr_STN_ST0,
1309 gen_op_fsub_STN_ST0,
1310 gen_op_fdivr_STN_ST0,
1311 gen_op_fdiv_STN_ST0,
1312 };
1313
1314 /* if d == OR_TMP0, it means memory operand (address in A0) */
1315 static void gen_op(DisasContext *s1, int op, int ot, int d)
1316 {
1317 GenOpFunc *gen_update_cc;
1318
1319 if (d != OR_TMP0) {
1320 gen_op_mov_TN_reg(ot, 0, d);
1321 } else {
1322 gen_op_ld_T0_A0(ot + s1->mem_index);
1323 }
1324 switch(op) {
1325 case OP_ADCL:
1326 case OP_SBBL:
1327 if (s1->cc_op != CC_OP_DYNAMIC)
1328 gen_op_set_cc_op(s1->cc_op);
1329 if (d != OR_TMP0) {
1330 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1331 gen_op_mov_reg_T0(ot, d);
1332 } else {
1333 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1334 }
1335 s1->cc_op = CC_OP_DYNAMIC;
1336 goto the_end;
1337 case OP_ADDL:
1338 gen_op_addl_T0_T1();
1339 s1->cc_op = CC_OP_ADDB + ot;
1340 gen_update_cc = gen_op_update2_cc;
1341 break;
1342 case OP_SUBL:
1343 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1344 s1->cc_op = CC_OP_SUBB + ot;
1345 gen_update_cc = gen_op_update2_cc;
1346 break;
1347 default:
1348 case OP_ANDL:
1349 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1350 s1->cc_op = CC_OP_LOGICB + ot;
1351 gen_update_cc = gen_op_update1_cc;
1352 break;
1353 case OP_ORL:
1354 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1355 s1->cc_op = CC_OP_LOGICB + ot;
1356 gen_update_cc = gen_op_update1_cc;
1357 break;
1358 case OP_XORL:
1359 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1360 s1->cc_op = CC_OP_LOGICB + ot;
1361 gen_update_cc = gen_op_update1_cc;
1362 break;
1363 case OP_CMPL:
1364 gen_op_cmpl_T0_T1_cc();
1365 s1->cc_op = CC_OP_SUBB + ot;
1366 gen_update_cc = NULL;
1367 break;
1368 }
1369 if (op != OP_CMPL) {
1370 if (d != OR_TMP0)
1371 gen_op_mov_reg_T0(ot, d);
1372 else
1373 gen_op_st_T0_A0(ot + s1->mem_index);
1374 }
1375 /* the flags update must happen after the memory write (precise
1376 exception support) */
1377 if (gen_update_cc)
1378 gen_update_cc();
1379 the_end: ;
1380 }
1381
1382 /* if d == OR_TMP0, it means memory operand (address in A0) */
1383 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1384 {
1385 if (d != OR_TMP0)
1386 gen_op_mov_TN_reg(ot, 0, d);
1387 else
1388 gen_op_ld_T0_A0(ot + s1->mem_index);
1389 if (s1->cc_op != CC_OP_DYNAMIC)
1390 gen_op_set_cc_op(s1->cc_op);
1391 if (c > 0) {
1392 gen_op_incl_T0();
1393 s1->cc_op = CC_OP_INCB + ot;
1394 } else {
1395 gen_op_decl_T0();
1396 s1->cc_op = CC_OP_DECB + ot;
1397 }
1398 if (d != OR_TMP0)
1399 gen_op_mov_reg_T0(ot, d);
1400 else
1401 gen_op_st_T0_A0(ot + s1->mem_index);
1402 gen_op_update_inc_cc();
1403 }
1404
1405 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1406 {
1407 if (d != OR_TMP0)
1408 gen_op_mov_TN_reg(ot, 0, d);
1409 else
1410 gen_op_ld_T0_A0(ot + s1->mem_index);
1411 if (s != OR_TMP1)
1412 gen_op_mov_TN_reg(ot, 1, s);
1413 /* for zero counts, flags are not updated, so must do it dynamically */
1414 if (s1->cc_op != CC_OP_DYNAMIC)
1415 gen_op_set_cc_op(s1->cc_op);
1416
1417 if (d != OR_TMP0)
1418 gen_op_shift_T0_T1_cc[ot][op]();
1419 else
1420 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1421 if (d != OR_TMP0)
1422 gen_op_mov_reg_T0(ot, d);
1423 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1424 }
1425
1426 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1427 {
1428 /* currently not optimized */
1429 gen_op_movl_T1_im(c);
1430 gen_shift(s1, op, ot, d, OR_TMP1);
1431 }
1432
1433 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1434 {
1435 target_long disp;
1436 int havesib;
1437 int base;
1438 int index;
1439 int scale;
1440 int opreg;
1441 int mod, rm, code, override, must_add_seg;
1442
1443 override = s->override;
1444 must_add_seg = s->addseg;
1445 if (override >= 0)
1446 must_add_seg = 1;
1447 mod = (modrm >> 6) & 3;
1448 rm = modrm & 7;
1449
1450 if (s->aflag) {
1451
1452 havesib = 0;
1453 base = rm;
1454 index = 0;
1455 scale = 0;
1456
1457 if (base == 4) {
1458 havesib = 1;
1459 code = ldub_code(s->pc++);
1460 scale = (code >> 6) & 3;
1461 index = ((code >> 3) & 7) | REX_X(s);
1462 base = (code & 7);
1463 }
1464 base |= REX_B(s);
1465
1466 switch (mod) {
1467 case 0:
1468 if ((base & 7) == 5) {
1469 base = -1;
1470 disp = (int32_t)ldl_code(s->pc);
1471 s->pc += 4;
1472 if (CODE64(s) && !havesib) {
1473 disp += s->pc + s->rip_offset;
1474 }
1475 } else {
1476 disp = 0;
1477 }
1478 break;
1479 case 1:
1480 disp = (int8_t)ldub_code(s->pc++);
1481 break;
1482 default:
1483 case 2:
1484 disp = ldl_code(s->pc);
1485 s->pc += 4;
1486 break;
1487 }
1488
1489 if (base >= 0) {
1490 /* for correct popl handling with esp */
1491 if (base == 4 && s->popl_esp_hack)
1492 disp += s->popl_esp_hack;
1493 #ifdef TARGET_X86_64
1494 if (s->aflag == 2) {
1495 gen_op_movq_A0_reg(base);
1496 if (disp != 0) {
1497 gen_op_addq_A0_im(disp);
1498 }
1499 } else
1500 #endif
1501 {
1502 gen_op_movl_A0_reg(base);
1503 if (disp != 0)
1504 gen_op_addl_A0_im(disp);
1505 }
1506 } else {
1507 #ifdef TARGET_X86_64
1508 if (s->aflag == 2) {
1509 gen_op_movq_A0_im(disp);
1510 } else
1511 #endif
1512 {
1513 gen_op_movl_A0_im(disp);
1514 }
1515 }
1516 /* XXX: index == 4 is always invalid */
1517 if (havesib && (index != 4 || scale != 0)) {
1518 #ifdef TARGET_X86_64
1519 if (s->aflag == 2) {
1520 gen_op_addq_A0_reg_sN(scale, index);
1521 } else
1522 #endif
1523 {
1524 gen_op_addl_A0_reg_sN(scale, index);
1525 }
1526 }
1527 if (must_add_seg) {
1528 if (override < 0) {
1529 if (base == R_EBP || base == R_ESP)
1530 override = R_SS;
1531 else
1532 override = R_DS;
1533 }
1534 #ifdef TARGET_X86_64
1535 if (s->aflag == 2) {
1536 gen_op_addq_A0_seg(override);
1537 } else
1538 #endif
1539 {
1540 gen_op_addl_A0_seg(override);
1541 }
1542 }
1543 } else {
1544 switch (mod) {
1545 case 0:
1546 if (rm == 6) {
1547 disp = lduw_code(s->pc);
1548 s->pc += 2;
1549 gen_op_movl_A0_im(disp);
1550 rm = 0; /* avoid SS override */
1551 goto no_rm;
1552 } else {
1553 disp = 0;
1554 }
1555 break;
1556 case 1:
1557 disp = (int8_t)ldub_code(s->pc++);
1558 break;
1559 default:
1560 case 2:
1561 disp = lduw_code(s->pc);
1562 s->pc += 2;
1563 break;
1564 }
1565 switch(rm) {
1566 case 0:
1567 gen_op_movl_A0_reg(R_EBX);
1568 gen_op_addl_A0_reg_sN(0, R_ESI);
1569 break;
1570 case 1:
1571 gen_op_movl_A0_reg(R_EBX);
1572 gen_op_addl_A0_reg_sN(0, R_EDI);
1573 break;
1574 case 2:
1575 gen_op_movl_A0_reg(R_EBP);
1576 gen_op_addl_A0_reg_sN(0, R_ESI);
1577 break;
1578 case 3:
1579 gen_op_movl_A0_reg(R_EBP);
1580 gen_op_addl_A0_reg_sN(0, R_EDI);
1581 break;
1582 case 4:
1583 gen_op_movl_A0_reg(R_ESI);
1584 break;
1585 case 5:
1586 gen_op_movl_A0_reg(R_EDI);
1587 break;
1588 case 6:
1589 gen_op_movl_A0_reg(R_EBP);
1590 break;
1591 default:
1592 case 7:
1593 gen_op_movl_A0_reg(R_EBX);
1594 break;
1595 }
1596 if (disp != 0)
1597 gen_op_addl_A0_im(disp);
1598 gen_op_andl_A0_ffff();
1599 no_rm:
1600 if (must_add_seg) {
1601 if (override < 0) {
1602 if (rm == 2 || rm == 3 || rm == 6)
1603 override = R_SS;
1604 else
1605 override = R_DS;
1606 }
1607 gen_op_addl_A0_seg(override);
1608 }
1609 }
1610
1611 opreg = OR_A0;
1612 disp = 0;
1613 *reg_ptr = opreg;
1614 *offset_ptr = disp;
1615 }
1616
1617 static void gen_nop_modrm(DisasContext *s, int modrm)
1618 {
1619 int mod, rm, base, code;
1620
1621 mod = (modrm >> 6) & 3;
1622 if (mod == 3)
1623 return;
1624 rm = modrm & 7;
1625
1626 if (s->aflag) {
1627
1628 base = rm;
1629
1630 if (base == 4) {
1631 code = ldub_code(s->pc++);
1632 base = (code & 7);
1633 }
1634
1635 switch (mod) {
1636 case 0:
1637 if (base == 5) {
1638 s->pc += 4;
1639 }
1640 break;
1641 case 1:
1642 s->pc++;
1643 break;
1644 default:
1645 case 2:
1646 s->pc += 4;
1647 break;
1648 }
1649 } else {
1650 switch (mod) {
1651 case 0:
1652 if (rm == 6) {
1653 s->pc += 2;
1654 }
1655 break;
1656 case 1:
1657 s->pc++;
1658 break;
1659 default:
1660 case 2:
1661 s->pc += 2;
1662 break;
1663 }
1664 }
1665 }
1666
1667 /* used for LEA and MOV AX, mem */
1668 static void gen_add_A0_ds_seg(DisasContext *s)
1669 {
1670 int override, must_add_seg;
1671 must_add_seg = s->addseg;
1672 override = R_DS;
1673 if (s->override >= 0) {
1674 override = s->override;
1675 must_add_seg = 1;
1676 } else {
1677 override = R_DS;
1678 }
1679 if (must_add_seg) {
1680 #ifdef TARGET_X86_64
1681 if (CODE64(s)) {
1682 gen_op_addq_A0_seg(override);
1683 } else
1684 #endif
1685 {
1686 gen_op_addl_A0_seg(override);
1687 }
1688 }
1689 }
1690
1691 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1692 OR_TMP0 */
1693 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1694 {
1695 int mod, rm, opreg, disp;
1696
1697 mod = (modrm >> 6) & 3;
1698 rm = (modrm & 7) | REX_B(s);
1699 if (mod == 3) {
1700 if (is_store) {
1701 if (reg != OR_TMP0)
1702 gen_op_mov_TN_reg(ot, 0, reg);
1703 gen_op_mov_reg_T0(ot, rm);
1704 } else {
1705 gen_op_mov_TN_reg(ot, 0, rm);
1706 if (reg != OR_TMP0)
1707 gen_op_mov_reg_T0(ot, reg);
1708 }
1709 } else {
1710 gen_lea_modrm(s, modrm, &opreg, &disp);
1711 if (is_store) {
1712 if (reg != OR_TMP0)
1713 gen_op_mov_TN_reg(ot, 0, reg);
1714 gen_op_st_T0_A0(ot + s->mem_index);
1715 } else {
1716 gen_op_ld_T0_A0(ot + s->mem_index);
1717 if (reg != OR_TMP0)
1718 gen_op_mov_reg_T0(ot, reg);
1719 }
1720 }
1721 }
1722
1723 static inline uint32_t insn_get(DisasContext *s, int ot)
1724 {
1725 uint32_t ret;
1726
1727 switch(ot) {
1728 case OT_BYTE:
1729 ret = ldub_code(s->pc);
1730 s->pc++;
1731 break;
1732 case OT_WORD:
1733 ret = lduw_code(s->pc);
1734 s->pc += 2;
1735 break;
1736 default:
1737 case OT_LONG:
1738 ret = ldl_code(s->pc);
1739 s->pc += 4;
1740 break;
1741 }
1742 return ret;
1743 }
1744
1745 static inline int insn_const_size(unsigned int ot)
1746 {
1747 if (ot <= OT_LONG)
1748 return 1 << ot;
1749 else
1750 return 4;
1751 }
1752
1753 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1754 {
1755 TranslationBlock *tb;
1756 target_ulong pc;
1757
1758 pc = s->cs_base + eip;
1759 tb = s->tb;
1760 /* NOTE: we handle the case where the TB spans two pages here */
1761 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1762 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1763 /* jump to same page: we can use a direct jump */
1764 tcg_gen_goto_tb(tb_num);
1765 gen_jmp_im(eip);
1766 tcg_gen_exit_tb((long)tb + tb_num);
1767 } else {
1768 /* jump to another page: currently not optimized */
1769 gen_jmp_im(eip);
1770 gen_eob(s);
1771 }
1772 }
1773
1774 static inline void gen_jcc(DisasContext *s, int b,
1775 target_ulong val, target_ulong next_eip)
1776 {
1777 TranslationBlock *tb;
1778 int inv, jcc_op;
1779 GenOpFunc1 *func;
1780 target_ulong tmp;
1781 int l1, l2;
1782
1783 inv = b & 1;
1784 jcc_op = (b >> 1) & 7;
1785
1786 if (s->jmp_opt) {
1787 switch(s->cc_op) {
1788 /* we optimize the cmp/jcc case */
1789 case CC_OP_SUBB:
1790 case CC_OP_SUBW:
1791 case CC_OP_SUBL:
1792 case CC_OP_SUBQ:
1793 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1794 break;
1795
1796 /* some jumps are easy to compute */
1797 case CC_OP_ADDB:
1798 case CC_OP_ADDW:
1799 case CC_OP_ADDL:
1800 case CC_OP_ADDQ:
1801
1802 case CC_OP_ADCB:
1803 case CC_OP_ADCW:
1804 case CC_OP_ADCL:
1805 case CC_OP_ADCQ:
1806
1807 case CC_OP_SBBB:
1808 case CC_OP_SBBW:
1809 case CC_OP_SBBL:
1810 case CC_OP_SBBQ:
1811
1812 case CC_OP_LOGICB:
1813 case CC_OP_LOGICW:
1814 case CC_OP_LOGICL:
1815 case CC_OP_LOGICQ:
1816
1817 case CC_OP_INCB:
1818 case CC_OP_INCW:
1819 case CC_OP_INCL:
1820 case CC_OP_INCQ:
1821
1822 case CC_OP_DECB:
1823 case CC_OP_DECW:
1824 case CC_OP_DECL:
1825 case CC_OP_DECQ:
1826
1827 case CC_OP_SHLB:
1828 case CC_OP_SHLW:
1829 case CC_OP_SHLL:
1830 case CC_OP_SHLQ:
1831
1832 case CC_OP_SARB:
1833 case CC_OP_SARW:
1834 case CC_OP_SARL:
1835 case CC_OP_SARQ:
1836 switch(jcc_op) {
1837 case JCC_Z:
1838 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1839 break;
1840 case JCC_S:
1841 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1842 break;
1843 default:
1844 func = NULL;
1845 break;
1846 }
1847 break;
1848 default:
1849 func = NULL;
1850 break;
1851 }
1852
1853 if (s->cc_op != CC_OP_DYNAMIC) {
1854 gen_op_set_cc_op(s->cc_op);
1855 s->cc_op = CC_OP_DYNAMIC;
1856 }
1857
1858 if (!func) {
1859 gen_setcc_slow[jcc_op]();
1860 func = gen_op_jnz_T0_label;
1861 }
1862
1863 if (inv) {
1864 tmp = val;
1865 val = next_eip;
1866 next_eip = tmp;
1867 }
1868 tb = s->tb;
1869
1870 l1 = gen_new_label();
1871 func(l1);
1872
1873 gen_goto_tb(s, 0, next_eip);
1874
1875 gen_set_label(l1);
1876 gen_goto_tb(s, 1, val);
1877
1878 s->is_jmp = 3;
1879 } else {
1880
1881 if (s->cc_op != CC_OP_DYNAMIC) {
1882 gen_op_set_cc_op(s->cc_op);
1883 s->cc_op = CC_OP_DYNAMIC;
1884 }
1885 gen_setcc_slow[jcc_op]();
1886 if (inv) {
1887 tmp = val;
1888 val = next_eip;
1889 next_eip = tmp;
1890 }
1891 l1 = gen_new_label();
1892 l2 = gen_new_label();
1893 gen_op_jnz_T0_label(l1);
1894 gen_jmp_im(next_eip);
1895 gen_op_jmp_label(l2);
1896 gen_set_label(l1);
1897 gen_jmp_im(val);
1898 gen_set_label(l2);
1899 gen_eob(s);
1900 }
1901 }
1902
1903 static void gen_setcc(DisasContext *s, int b)
1904 {
1905 int inv, jcc_op;
1906 GenOpFunc *func;
1907
1908 inv = b & 1;
1909 jcc_op = (b >> 1) & 7;
1910 switch(s->cc_op) {
1911 /* we optimize the cmp/jcc case */
1912 case CC_OP_SUBB:
1913 case CC_OP_SUBW:
1914 case CC_OP_SUBL:
1915 case CC_OP_SUBQ:
1916 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1917 if (!func)
1918 goto slow_jcc;
1919 break;
1920
1921 /* some jumps are easy to compute */
1922 case CC_OP_ADDB:
1923 case CC_OP_ADDW:
1924 case CC_OP_ADDL:
1925 case CC_OP_ADDQ:
1926
1927 case CC_OP_LOGICB:
1928 case CC_OP_LOGICW:
1929 case CC_OP_LOGICL:
1930 case CC_OP_LOGICQ:
1931
1932 case CC_OP_INCB:
1933 case CC_OP_INCW:
1934 case CC_OP_INCL:
1935 case CC_OP_INCQ:
1936
1937 case CC_OP_DECB:
1938 case CC_OP_DECW:
1939 case CC_OP_DECL:
1940 case CC_OP_DECQ:
1941
1942 case CC_OP_SHLB:
1943 case CC_OP_SHLW:
1944 case CC_OP_SHLL:
1945 case CC_OP_SHLQ:
1946 switch(jcc_op) {
1947 case JCC_Z:
1948 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1949 break;
1950 case JCC_S:
1951 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1952 break;
1953 default:
1954 goto slow_jcc;
1955 }
1956 break;
1957 default:
1958 slow_jcc:
1959 if (s->cc_op != CC_OP_DYNAMIC)
1960 gen_op_set_cc_op(s->cc_op);
1961 func = gen_setcc_slow[jcc_op];
1962 break;
1963 }
1964 func();
1965 if (inv) {
1966 gen_op_xor_T0_1();
1967 }
1968 }
1969
1970 /* move T0 to seg_reg and compute if the CPU state may change. Never
1971 call this function with seg_reg == R_CS */
1972 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1973 {
1974 if (s->pe && !s->vm86) {
1975 /* XXX: optimize by finding processor state dynamically */
1976 if (s->cc_op != CC_OP_DYNAMIC)
1977 gen_op_set_cc_op(s->cc_op);
1978 gen_jmp_im(cur_eip);
1979 gen_op_movl_seg_T0(seg_reg);
1980 /* abort translation because the addseg value may change or
1981 because ss32 may change. For R_SS, translation must always
1982 stop as a special handling must be done to disable hardware
1983 interrupts for the next instruction */
1984 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1985 s->is_jmp = 3;
1986 } else {
1987 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1988 if (seg_reg == R_SS)
1989 s->is_jmp = 3;
1990 }
1991 }
1992
1993 #define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1994
1995 static inline int
1996 gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
1997 {
1998 #if !defined(CONFIG_USER_ONLY)
1999 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
2000 if (s->cc_op != CC_OP_DYNAMIC)
2001 gen_op_set_cc_op(s->cc_op);
2002 SVM_movq_T1_im(s->pc - s->cs_base);
2003 gen_jmp_im(pc_start - s->cs_base);
2004 gen_op_geneflags();
2005 gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2006 s->cc_op = CC_OP_DYNAMIC;
2007 /* FIXME: maybe we could move the io intercept vector to the TB as well
2008 so we know if this is an EOB or not ... let's assume it's not
2009 for now. */
2010 }
2011 #endif
2012 return 0;
2013 }
2014
2015 static inline int svm_is_rep(int prefixes)
2016 {
2017 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2018 }
2019
2020 static inline int
2021 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2022 uint64_t type, uint64_t param)
2023 {
2024 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2025 /* no SVM activated */
2026 return 0;
2027 switch(type) {
2028 /* CRx and DRx reads/writes */
2029 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2030 if (s->cc_op != CC_OP_DYNAMIC) {
2031 gen_op_set_cc_op(s->cc_op);
2032 s->cc_op = CC_OP_DYNAMIC;
2033 }
2034 gen_jmp_im(pc_start - s->cs_base);
2035 SVM_movq_T1_im(param);
2036 gen_op_geneflags();
2037 gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2038 /* this is a special case as we do not know if the interception occurs
2039 so we assume there was none */
2040 return 0;
2041 case SVM_EXIT_MSR:
2042 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2043 if (s->cc_op != CC_OP_DYNAMIC) {
2044 gen_op_set_cc_op(s->cc_op);
2045 s->cc_op = CC_OP_DYNAMIC;
2046 }
2047 gen_jmp_im(pc_start - s->cs_base);
2048 SVM_movq_T1_im(param);
2049 gen_op_geneflags();
2050 gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2051 /* this is a special case as we do not know if the interception occurs
2052 so we assume there was none */
2053 return 0;
2054 }
2055 break;
2056 default:
2057 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2058 if (s->cc_op != CC_OP_DYNAMIC) {
2059 gen_op_set_cc_op(s->cc_op);
2060 s->cc_op = CC_OP_EFLAGS;
2061 }
2062 gen_jmp_im(pc_start - s->cs_base);
2063 SVM_movq_T1_im(param);
2064 gen_op_geneflags();
2065 gen_op_svm_vmexit(type >> 32, type);
2066 /* we can optimize this one so TBs don't get longer
2067 than up to vmexit */
2068 gen_eob(s);
2069 return 1;
2070 }
2071 }
2072 return 0;
2073 }
2074
2075 static inline int
2076 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2077 {
2078 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2079 }
2080
2081 static inline void gen_stack_update(DisasContext *s, int addend)
2082 {
2083 #ifdef TARGET_X86_64
2084 if (CODE64(s)) {
2085 gen_op_addq_ESP_im(addend);
2086 } else
2087 #endif
2088 if (s->ss32) {
2089 gen_op_addl_ESP_im(addend);
2090 } else {
2091 gen_op_addw_ESP_im(addend);
2092 }
2093 }
2094
2095 /* generate a push. It depends on ss32, addseg and dflag */
2096 static void gen_push_T0(DisasContext *s)
2097 {
2098 #ifdef TARGET_X86_64
2099 if (CODE64(s)) {
2100 gen_op_movq_A0_reg(R_ESP);
2101 if (s->dflag) {
2102 gen_op_addq_A0_im(-8);
2103 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2104 } else {
2105 gen_op_addq_A0_im(-2);
2106 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2107 }
2108 gen_op_mov_reg_A0(2, R_ESP);
2109 } else
2110 #endif
2111 {
2112 gen_op_movl_A0_reg(R_ESP);
2113 if (!s->dflag)
2114 gen_op_addl_A0_im(-2);
2115 else
2116 gen_op_addl_A0_im(-4);
2117 if (s->ss32) {
2118 if (s->addseg) {
2119 gen_op_movl_T1_A0();
2120 gen_op_addl_A0_seg(R_SS);
2121 }
2122 } else {
2123 gen_op_andl_A0_ffff();
2124 gen_op_movl_T1_A0();
2125 gen_op_addl_A0_seg(R_SS);
2126 }
2127 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2128 if (s->ss32 && !s->addseg)
2129 gen_op_mov_reg_A0(1, R_ESP);
2130 else
2131 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2132 }
2133 }
2134
2135 /* generate a push. It depends on ss32, addseg and dflag */
2136 /* slower version for T1, only used for call Ev */
2137 static void gen_push_T1(DisasContext *s)
2138 {
2139 #ifdef TARGET_X86_64
2140 if (CODE64(s)) {
2141 gen_op_movq_A0_reg(R_ESP);
2142 if (s->dflag) {
2143 gen_op_addq_A0_im(-8);
2144 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2145 } else {
2146 gen_op_addq_A0_im(-2);
2147 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2148 }
2149 gen_op_mov_reg_A0(2, R_ESP);
2150 } else
2151 #endif
2152 {
2153 gen_op_movl_A0_reg(R_ESP);
2154 if (!s->dflag)
2155 gen_op_addl_A0_im(-2);
2156 else
2157 gen_op_addl_A0_im(-4);
2158 if (s->ss32) {
2159 if (s->addseg) {
2160 gen_op_addl_A0_seg(R_SS);
2161 }
2162 } else {
2163 gen_op_andl_A0_ffff();
2164 gen_op_addl_A0_seg(R_SS);
2165 }
2166 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2167
2168 if (s->ss32 && !s->addseg)
2169 gen_op_mov_reg_A0(1, R_ESP);
2170 else
2171 gen_stack_update(s, (-2) << s->dflag);
2172 }
2173 }
2174
2175 /* two step pop is necessary for precise exceptions */
2176 static void gen_pop_T0(DisasContext *s)
2177 {
2178 #ifdef TARGET_X86_64
2179 if (CODE64(s)) {
2180 gen_op_movq_A0_reg(R_ESP);
2181 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2182 } else
2183 #endif
2184 {
2185 gen_op_movl_A0_reg(R_ESP);
2186 if (s->ss32) {
2187 if (s->addseg)
2188 gen_op_addl_A0_seg(R_SS);
2189 } else {
2190 gen_op_andl_A0_ffff();
2191 gen_op_addl_A0_seg(R_SS);
2192 }
2193 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2194 }
2195 }
2196
2197 static void gen_pop_update(DisasContext *s)
2198 {
2199 #ifdef TARGET_X86_64
2200 if (CODE64(s) && s->dflag) {
2201 gen_stack_update(s, 8);
2202 } else
2203 #endif
2204 {
2205 gen_stack_update(s, 2 << s->dflag);
2206 }
2207 }
2208
2209 static void gen_stack_A0(DisasContext *s)
2210 {
2211 gen_op_movl_A0_reg(R_ESP);
2212 if (!s->ss32)
2213 gen_op_andl_A0_ffff();
2214 gen_op_movl_T1_A0();
2215 if (s->addseg)
2216 gen_op_addl_A0_seg(R_SS);
2217 }
2218
2219 /* NOTE: wrap around in 16 bit not fully handled */
2220 static void gen_pusha(DisasContext *s)
2221 {
2222 int i;
2223 gen_op_movl_A0_reg(R_ESP);
2224 gen_op_addl_A0_im(-16 << s->dflag);
2225 if (!s->ss32)
2226 gen_op_andl_A0_ffff();
2227 gen_op_movl_T1_A0();
2228 if (s->addseg)
2229 gen_op_addl_A0_seg(R_SS);
2230 for(i = 0;i < 8; i++) {
2231 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2232 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2233 gen_op_addl_A0_im(2 << s->dflag);
2234 }
2235 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2236 }
2237
2238 /* NOTE: wrap around in 16 bit not fully handled */
2239 static void gen_popa(DisasContext *s)
2240 {
2241 int i;
2242 gen_op_movl_A0_reg(R_ESP);
2243 if (!s->ss32)
2244 gen_op_andl_A0_ffff();
2245 gen_op_movl_T1_A0();
2246 gen_op_addl_T1_im(16 << s->dflag);
2247 if (s->addseg)
2248 gen_op_addl_A0_seg(R_SS);
2249 for(i = 0;i < 8; i++) {
2250 /* ESP is not reloaded */
2251 if (i != 3) {
2252 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2253 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2254 }
2255 gen_op_addl_A0_im(2 << s->dflag);
2256 }
2257 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2258 }
2259
2260 static void gen_enter(DisasContext *s, int esp_addend, int level)
2261 {
2262 int ot, opsize;
2263
2264 level &= 0x1f;
2265 #ifdef TARGET_X86_64
2266 if (CODE64(s)) {
2267 ot = s->dflag ? OT_QUAD : OT_WORD;
2268 opsize = 1 << ot;
2269
2270 gen_op_movl_A0_reg(R_ESP);
2271 gen_op_addq_A0_im(-opsize);
2272 gen_op_movl_T1_A0();
2273
2274 /* push bp */
2275 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2276 gen_op_st_T0_A0(ot + s->mem_index);
2277 if (level) {
2278 gen_op_enter64_level(level, (ot == OT_QUAD));
2279 }
2280 gen_op_mov_reg_T1(ot, R_EBP);
2281 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2282 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2283 } else
2284 #endif
2285 {
2286 ot = s->dflag + OT_WORD;
2287 opsize = 2 << s->dflag;
2288
2289 gen_op_movl_A0_reg(R_ESP);
2290 gen_op_addl_A0_im(-opsize);
2291 if (!s->ss32)
2292 gen_op_andl_A0_ffff();
2293 gen_op_movl_T1_A0();
2294 if (s->addseg)
2295 gen_op_addl_A0_seg(R_SS);
2296 /* push bp */
2297 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2298 gen_op_st_T0_A0(ot + s->mem_index);
2299 if (level) {
2300 gen_op_enter_level(level, s->dflag);
2301 }
2302 gen_op_mov_reg_T1(ot, R_EBP);
2303 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2304 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2305 }
2306 }
2307
2308 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2309 {
2310 if (s->cc_op != CC_OP_DYNAMIC)
2311 gen_op_set_cc_op(s->cc_op);
2312 gen_jmp_im(cur_eip);
2313 gen_op_raise_exception(trapno);
2314 s->is_jmp = 3;
2315 }
2316
2317 /* an interrupt is different from an exception because of the
2318 privilege checks */
2319 static void gen_interrupt(DisasContext *s, int intno,
2320 target_ulong cur_eip, target_ulong next_eip)
2321 {
2322 if (s->cc_op != CC_OP_DYNAMIC)
2323 gen_op_set_cc_op(s->cc_op);
2324 gen_jmp_im(cur_eip);
2325 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2326 s->is_jmp = 3;
2327 }
2328
2329 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2330 {
2331 if (s->cc_op != CC_OP_DYNAMIC)
2332 gen_op_set_cc_op(s->cc_op);
2333 gen_jmp_im(cur_eip);
2334 gen_op_debug();
2335 s->is_jmp = 3;
2336 }
2337
2338 /* generate a generic end of block. Trace exception is also generated
2339 if needed */
2340 static void gen_eob(DisasContext *s)
2341 {
2342 if (s->cc_op != CC_OP_DYNAMIC)
2343 gen_op_set_cc_op(s->cc_op);
2344 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2345 gen_op_reset_inhibit_irq();
2346 }
2347 if (s->singlestep_enabled) {
2348 gen_op_debug();
2349 } else if (s->tf) {
2350 gen_op_single_step();
2351 } else {
2352 tcg_gen_exit_tb(0);
2353 }
2354 s->is_jmp = 3;
2355 }
2356
2357 /* generate a jump to eip. No segment change must happen before as a
2358 direct call to the next block may occur */
2359 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2360 {
2361 if (s->jmp_opt) {
2362 if (s->cc_op != CC_OP_DYNAMIC) {
2363 gen_op_set_cc_op(s->cc_op);
2364 s->cc_op = CC_OP_DYNAMIC;
2365 }
2366 gen_goto_tb(s, tb_num, eip);
2367 s->is_jmp = 3;
2368 } else {
2369 gen_jmp_im(eip);
2370 gen_eob(s);
2371 }
2372 }
2373
2374 static void gen_jmp(DisasContext *s, target_ulong eip)
2375 {
2376 gen_jmp_tb(s, eip, 0);
2377 }
2378
2379 static inline void gen_ldq_env_A0(int idx, int offset)
2380 {
2381 int mem_index = (idx >> 2) - 1;
2382 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2383 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset);
2384 }
2385
2386 static inline void gen_stq_env_A0(int idx, int offset)
2387 {
2388 int mem_index = (idx >> 2) - 1;
2389 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset);
2390 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2391 }
2392
2393 static inline void gen_ldo_env_A0(int idx, int offset)
2394 {
2395 int mem_index = (idx >> 2) - 1;
2396 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2397 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2398 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2399 tcg_gen_qemu_ld64(cpu_tmp1, cpu_tmp0, mem_index);
2400 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2401 }
2402
2403 static inline void gen_sto_env_A0(int idx, int offset)
2404 {
2405 int mem_index = (idx >> 2) - 1;
2406 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2407 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2408 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2409 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2410 tcg_gen_qemu_st64(cpu_tmp1, cpu_tmp0, mem_index);
2411 }
2412
2413 static inline void gen_op_movo(int d_offset, int s_offset)
2414 {
2415 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset);
2416 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2417 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset + 8);
2418 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset + 8);
2419 }
2420
2421 static inline void gen_op_movq(int d_offset, int s_offset)
2422 {
2423 tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset);
2424 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2425 }
2426
2427 static inline void gen_op_movl(int d_offset, int s_offset)
2428 {
2429 tcg_gen_ld_i32(cpu_tmp2, cpu_env, s_offset);
2430 tcg_gen_st_i32(cpu_tmp2, cpu_env, d_offset);
2431 }
2432
2433 static inline void gen_op_movq_env_0(int d_offset)
2434 {
2435 tcg_gen_movi_i64(cpu_tmp1, 0);
2436 tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2437 }
2438
2439 #define SSE_SPECIAL ((void *)1)
2440 #define SSE_DUMMY ((void *)2)
2441
2442 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2443 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2444 helper_ ## x ## ss, helper_ ## x ## sd, }
2445
2446 static void *sse_op_table1[256][4] = {
2447 /* 3DNow! extensions */
2448 [0x0e] = { SSE_DUMMY }, /* femms */
2449 [0x0f] = { SSE_DUMMY }, /* pf... */
2450 /* pure SSE operations */
2451 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2452 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2453 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2454 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2455 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2456 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2457 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2458 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2459
2460 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2461 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2462 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2463 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2464 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2465 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2466 [0x2e] = { helper_ucomiss, helper_ucomisd },
2467 [0x2f] = { helper_comiss, helper_comisd },
2468 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2469 [0x51] = SSE_FOP(sqrt),
2470 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2471 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2472 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2473 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2474 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2475 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2476 [0x58] = SSE_FOP(add),
2477 [0x59] = SSE_FOP(mul),
2478 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2479 helper_cvtss2sd, helper_cvtsd2ss },
2480 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2481 [0x5c] = SSE_FOP(sub),
2482 [0x5d] = SSE_FOP(min),
2483 [0x5e] = SSE_FOP(div),
2484 [0x5f] = SSE_FOP(max),
2485
2486 [0xc2] = SSE_FOP(cmpeq),
2487 [0xc6] = { helper_shufps, helper_shufpd },
2488
2489 /* MMX ops and their SSE extensions */
2490 [0x60] = MMX_OP2(punpcklbw),
2491 [0x61] = MMX_OP2(punpcklwd),
2492 [0x62] = MMX_OP2(punpckldq),
2493 [0x63] = MMX_OP2(packsswb),
2494 [0x64] = MMX_OP2(pcmpgtb),
2495 [0x65] = MMX_OP2(pcmpgtw),
2496 [0x66] = MMX_OP2(pcmpgtl),
2497 [0x67] = MMX_OP2(packuswb),
2498 [0x68] = MMX_OP2(punpckhbw),
2499 [0x69] = MMX_OP2(punpckhwd),
2500 [0x6a] = MMX_OP2(punpckhdq),
2501 [0x6b] = MMX_OP2(packssdw),
2502 [0x6c] = { NULL, helper_punpcklqdq_xmm },
2503 [0x6d] = { NULL, helper_punpckhqdq_xmm },
2504 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2505 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2506 [0x70] = { helper_pshufw_mmx,
2507 helper_pshufd_xmm,
2508 helper_pshufhw_xmm,
2509 helper_pshuflw_xmm },
2510 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2511 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2512 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2513 [0x74] = MMX_OP2(pcmpeqb),
2514 [0x75] = MMX_OP2(pcmpeqw),
2515 [0x76] = MMX_OP2(pcmpeql),
2516 [0x77] = { SSE_DUMMY }, /* emms */
2517 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2518 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2519 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2520 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2521 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2522 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2523 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2524 [0xd1] = MMX_OP2(psrlw),
2525 [0xd2] = MMX_OP2(psrld),
2526 [0xd3] = MMX_OP2(psrlq),
2527 [0xd4] = MMX_OP2(paddq),
2528 [0xd5] = MMX_OP2(pmullw),
2529 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2530 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2531 [0xd8] = MMX_OP2(psubusb),
2532 [0xd9] = MMX_OP2(psubusw),
2533 [0xda] = MMX_OP2(pminub),
2534 [0xdb] = MMX_OP2(pand),
2535 [0xdc] = MMX_OP2(paddusb),
2536 [0xdd] = MMX_OP2(paddusw),
2537 [0xde] = MMX_OP2(pmaxub),
2538 [0xdf] = MMX_OP2(pandn),
2539 [0xe0] = MMX_OP2(pavgb),
2540 [0xe1] = MMX_OP2(psraw),
2541 [0xe2] = MMX_OP2(psrad),
2542 [0xe3] = MMX_OP2(pavgw),
2543 [0xe4] = MMX_OP2(pmulhuw),
2544 [0xe5] = MMX_OP2(pmulhw),
2545 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2546 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2547 [0xe8] = MMX_OP2(psubsb),
2548 [0xe9] = MMX_OP2(psubsw),
2549 [0xea] = MMX_OP2(pminsw),
2550 [0xeb] = MMX_OP2(por),
2551 [0xec] = MMX_OP2(paddsb),
2552 [0xed] = MMX_OP2(paddsw),
2553 [0xee] = MMX_OP2(pmaxsw),
2554 [0xef] = MMX_OP2(pxor),
2555 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2556 [0xf1] = MMX_OP2(psllw),
2557 [0xf2] = MMX_OP2(pslld),
2558 [0xf3] = MMX_OP2(psllq),
2559 [0xf4] = MMX_OP2(pmuludq),
2560 [0xf5] = MMX_OP2(pmaddwd),
2561 [0xf6] = MMX_OP2(psadbw),
2562 [0xf7] = MMX_OP2(maskmov),
2563 [0xf8] = MMX_OP2(psubb),
2564 [0xf9] = MMX_OP2(psubw),
2565 [0xfa] = MMX_OP2(psubl),
2566 [0xfb] = MMX_OP2(psubq),
2567 [0xfc] = MMX_OP2(paddb),
2568 [0xfd] = MMX_OP2(paddw),
2569 [0xfe] = MMX_OP2(paddl),
2570 };
2571
2572 static void *sse_op_table2[3 * 8][2] = {
2573 [0 + 2] = MMX_OP2(psrlw),
2574 [0 + 4] = MMX_OP2(psraw),
2575 [0 + 6] = MMX_OP2(psllw),
2576 [8 + 2] = MMX_OP2(psrld),
2577 [8 + 4] = MMX_OP2(psrad),
2578 [8 + 6] = MMX_OP2(pslld),
2579 [16 + 2] = MMX_OP2(psrlq),
2580 [16 + 3] = { NULL, helper_psrldq_xmm },
2581 [16 + 6] = MMX_OP2(psllq),
2582 [16 + 7] = { NULL, helper_pslldq_xmm },
2583 };
2584
2585 static void *sse_op_table3[4 * 3] = {
2586 helper_cvtsi2ss,
2587 helper_cvtsi2sd,
2588 X86_64_ONLY(helper_cvtsq2ss),
2589 X86_64_ONLY(helper_cvtsq2sd),
2590
2591 helper_cvttss2si,
2592 helper_cvttsd2si,
2593 X86_64_ONLY(helper_cvttss2sq),
2594 X86_64_ONLY(helper_cvttsd2sq),
2595
2596 helper_cvtss2si,
2597 helper_cvtsd2si,
2598 X86_64_ONLY(helper_cvtss2sq),
2599 X86_64_ONLY(helper_cvtsd2sq),
2600 };
2601
2602 static void *sse_op_table4[8][4] = {
2603 SSE_FOP(cmpeq),
2604 SSE_FOP(cmplt),
2605 SSE_FOP(cmple),
2606 SSE_FOP(cmpunord),
2607 SSE_FOP(cmpneq),
2608 SSE_FOP(cmpnlt),
2609 SSE_FOP(cmpnle),
2610 SSE_FOP(cmpord),
2611 };
2612
2613 static void *sse_op_table5[256] = {
2614 [0x0c] = helper_pi2fw,
2615 [0x0d] = helper_pi2fd,
2616 [0x1c] = helper_pf2iw,
2617 [0x1d] = helper_pf2id,
2618 [0x8a] = helper_pfnacc,
2619 [0x8e] = helper_pfpnacc,
2620 [0x90] = helper_pfcmpge,
2621 [0x94] = helper_pfmin,
2622 [0x96] = helper_pfrcp,
2623 [0x97] = helper_pfrsqrt,
2624 [0x9a] = helper_pfsub,
2625 [0x9e] = helper_pfadd,
2626 [0xa0] = helper_pfcmpgt,
2627 [0xa4] = helper_pfmax,
2628 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2629 [0xa7] = helper_movq, /* pfrsqit1 */
2630 [0xaa] = helper_pfsubr,
2631 [0xae] = helper_pfacc,
2632 [0xb0] = helper_pfcmpeq,
2633 [0xb4] = helper_pfmul,
2634 [0xb6] = helper_movq, /* pfrcpit2 */
2635 [0xb7] = helper_pmulhrw_mmx,
2636 [0xbb] = helper_pswapd,
2637 [0xbf] = helper_pavgb_mmx /* pavgusb */
2638 };
2639
2640 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2641 {
2642 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2643 int modrm, mod, rm, reg, reg_addr, offset_addr;
2644 void *sse_op2;
2645
2646 b &= 0xff;
2647 if (s->prefix & PREFIX_DATA)
2648 b1 = 1;
2649 else if (s->prefix & PREFIX_REPZ)
2650 b1 = 2;
2651 else if (s->prefix & PREFIX_REPNZ)
2652 b1 = 3;
2653 else
2654 b1 = 0;
2655 sse_op2 = sse_op_table1[b][b1];
2656 if (!sse_op2)
2657 goto illegal_op;
2658 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2659 is_xmm = 1;
2660 } else {
2661 if (b1 == 0) {
2662 /* MMX case */
2663 is_xmm = 0;
2664 } else {
2665 is_xmm = 1;
2666 }
2667 }
2668 /* simple MMX/SSE operation */
2669 if (s->flags & HF_TS_MASK) {
2670 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2671 return;
2672 }
2673 if (s->flags & HF_EM_MASK) {
2674 illegal_op:
2675 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2676 return;
2677 }
2678 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2679 goto illegal_op;
2680 if (b == 0x0e) {
2681 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2682 goto illegal_op;
2683 /* femms */
2684 tcg_gen_helper_0_0(helper_emms);
2685 return;
2686 }
2687 if (b == 0x77) {
2688 /* emms */
2689 tcg_gen_helper_0_0(helper_emms);
2690 return;
2691 }
2692 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2693 the static cpu state) */
2694 if (!is_xmm) {
2695 tcg_gen_helper_0_0(helper_enter_mmx);
2696 }
2697
2698 modrm = ldub_code(s->pc++);
2699 reg = ((modrm >> 3) & 7);
2700 if (is_xmm)
2701 reg |= rex_r;
2702 mod = (modrm >> 6) & 3;
2703 if (sse_op2 == SSE_SPECIAL) {
2704 b |= (b1 << 8);
2705 switch(b) {
2706 case 0x0e7: /* movntq */
2707 if (mod == 3)
2708 goto illegal_op;
2709 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2710 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2711 break;
2712 case 0x1e7: /* movntdq */
2713 case 0x02b: /* movntps */
2714 case 0x12b: /* movntps */
2715 case 0x3f0: /* lddqu */
2716 if (mod == 3)
2717 goto illegal_op;
2718 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2719 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2720 break;
2721 case 0x6e: /* movd mm, ea */
2722 #ifdef TARGET_X86_64
2723 if (s->dflag == 2) {
2724 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2725 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2726 } else
2727 #endif
2728 {
2729 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2730 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2731 offsetof(CPUX86State,fpregs[reg].mmx));
2732 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2733 }
2734 break;
2735 case 0x16e: /* movd xmm, ea */
2736 #ifdef TARGET_X86_64
2737 if (s->dflag == 2) {
2738 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2739 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2740 offsetof(CPUX86State,xmm_regs[reg]));
2741 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2742 } else
2743 #endif
2744 {
2745 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2746 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2747 offsetof(CPUX86State,xmm_regs[reg]));
2748 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
2749 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2);
2750 }
2751 break;
2752 case 0x6f: /* movq mm, ea */
2753 if (mod != 3) {
2754 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2755 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2756 } else {
2757 rm = (modrm & 7);
2758 tcg_gen_ld_i64(cpu_tmp1, cpu_env,
2759 offsetof(CPUX86State,fpregs[rm].mmx));
2760 tcg_gen_st_i64(cpu_tmp1, cpu_env,
2761 offsetof(CPUX86State,fpregs[reg].mmx));
2762 }
2763 break;
2764 case 0x010: /* movups */
2765 case 0x110: /* movupd */
2766 case 0x028: /* movaps */
2767 case 0x128: /* movapd */
2768 case 0x16f: /* movdqa xmm, ea */
2769 case 0x26f: /* movdqu xmm, ea */
2770 if (mod != 3) {
2771 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2772 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2773 } else {
2774 rm = (modrm & 7) | REX_B(s);
2775 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2776 offsetof(CPUX86State,xmm_regs[rm]));
2777 }
2778 break;
2779 case 0x210: /* movss xmm, ea */
2780 if (mod != 3) {
2781 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2782 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2783 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2784 gen_op_movl_T0_0();
2785 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2786 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2787 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2788 } else {
2789 rm = (modrm & 7) | REX_B(s);
2790 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2791 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2792 }
2793 break;
2794 case 0x310: /* movsd xmm, ea */
2795 if (mod != 3) {
2796 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2797 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2798 gen_op_movl_T0_0();
2799 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2800 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2801 } else {
2802 rm = (modrm & 7) | REX_B(s);
2803 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2804 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2805 }
2806 break;
2807 case 0x012: /* movlps */
2808 case 0x112: /* movlpd */
2809 if (mod != 3) {
2810 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2811 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2812 } else {
2813 /* movhlps */
2814 rm = (modrm & 7) | REX_B(s);
2815 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2816 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2817 }
2818 break;
2819 case 0x212: /* movsldup */
2820 if (mod != 3) {
2821 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2822 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2823 } else {
2824 rm = (modrm & 7) | REX_B(s);
2825 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2826 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2827 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2828 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2829 }
2830 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2831 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2832 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2833 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2834 break;
2835 case 0x312: /* movddup */
2836 if (mod != 3) {
2837 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2838 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2839 } else {
2840 rm = (modrm & 7) | REX_B(s);
2841 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2842 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2843 }
2844 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2845 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2846 break;
2847 case 0x016: /* movhps */
2848 case 0x116: /* movhpd */
2849 if (mod != 3) {
2850 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2851 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2852 } else {
2853 /* movlhps */
2854 rm = (modrm & 7) | REX_B(s);
2855 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2856 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2857 }
2858 break;
2859 case 0x216: /* movshdup */
2860 if (mod != 3) {
2861 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2862 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2863 } else {
2864 rm = (modrm & 7) | REX_B(s);
2865 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2866 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2867 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2868 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2869 }
2870 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2871 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2872 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2873 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2874 break;
2875 case 0x7e: /* movd ea, mm */
2876 #ifdef TARGET_X86_64
2877 if (s->dflag == 2) {
2878 tcg_gen_ld_i64(cpu_T[0], cpu_env,
2879 offsetof(CPUX86State,fpregs[reg].mmx));
2880 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2881 } else
2882 #endif
2883 {
2884 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2885 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
2886 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2887 }
2888 break;
2889 case 0x17e: /* movd ea, xmm */
2890 #ifdef TARGET_X86_64
2891 if (s->dflag == 2) {
2892 tcg_gen_ld_i64(cpu_T[0], cpu_env,
2893 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2894 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2895 } else
2896 #endif
2897 {
2898 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2899 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2900 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2901 }
2902 break;
2903 case 0x27e: /* movq xmm, ea */
2904 if (mod != 3) {
2905 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2906 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2907 } else {
2908 rm = (modrm & 7) | REX_B(s);
2909 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2910 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2911 }
2912 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2913 break;
2914 case 0x7f: /* movq ea, mm */
2915 if (mod != 3) {
2916 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2917 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2918 } else {
2919 rm = (modrm & 7);
2920 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2921 offsetof(CPUX86State,fpregs[reg].mmx));
2922 }
2923 break;
2924 case 0x011: /* movups */
2925 case 0x111: /* movupd */
2926 case 0x029: /* movaps */
2927 case 0x129: /* movapd */
2928 case 0x17f: /* movdqa ea, xmm */
2929 case 0x27f: /* movdqu ea, xmm */
2930 if (mod != 3) {
2931 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2932 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2933 } else {
2934 rm = (modrm & 7) | REX_B(s);
2935 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2936 offsetof(CPUX86State,xmm_regs[reg]));
2937 }
2938 break;
2939 case 0x211: /* movss ea, xmm */
2940 if (mod != 3) {
2941 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2942 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2943 gen_op_st_T0_A0(OT_LONG + s->mem_index);
2944 } else {
2945 rm = (modrm & 7) | REX_B(s);
2946 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2947 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2948 }
2949 break;
2950 case 0x311: /* movsd ea, xmm */
2951 if (mod != 3) {
2952 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2953 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2954 } else {
2955 rm = (modrm & 7) | REX_B(s);
2956 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2957 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2958 }
2959 break;
2960 case 0x013: /* movlps */
2961 case 0x113: /* movlpd */
2962 if (mod != 3) {
2963 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2964 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2965 } else {
2966 goto illegal_op;
2967 }
2968 break;
2969 case 0x017: /* movhps */
2970 case 0x117: /* movhpd */
2971 if (mod != 3) {
2972 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2973 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2974 } else {
2975 goto illegal_op;
2976 }
2977 break;
2978 case 0x71: /* shift mm, im */
2979 case 0x72:
2980 case 0x73:
2981 case 0x171: /* shift xmm, im */
2982 case 0x172:
2983 case 0x173:
2984 val = ldub_code(s->pc++);
2985 if (is_xmm) {
2986 gen_op_movl_T0_im(val);
2987 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2988 gen_op_movl_T0_0();
2989 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2990 op1_offset = offsetof(CPUX86State,xmm_t0);
2991 } else {
2992 gen_op_movl_T0_im(val);
2993 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2994 gen_op_movl_T0_0();
2995 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2996 op1_offset = offsetof(CPUX86State,mmx_t0);
2997 }
2998 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2999 if (!sse_op2)
3000 goto illegal_op;
3001 if (is_xmm) {
3002 rm = (modrm & 7) | REX_B(s);
3003 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3004 } else {
3005 rm = (modrm & 7);
3006 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3007 }
3008 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3009 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3010 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3011 break;
3012 case 0x050: /* movmskps */
3013 rm = (modrm & 7) | REX_B(s);
3014 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3015 offsetof(CPUX86State,xmm_regs[rm]));
3016 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2, cpu_ptr0);
3017 tcg_gen_extu_i32_i64(cpu_T[0], cpu_tmp2);
3018 gen_op_mov_reg_T0(OT_LONG, reg);
3019 break;
3020 case 0x150: /* movmskpd */
3021 rm = (modrm & 7) | REX_B(s);
3022 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3023 offsetof(CPUX86State,xmm_regs[rm]));
3024 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2, cpu_ptr0);
3025 tcg_gen_extu_i32_i64(cpu_T[0], cpu_tmp2);
3026 gen_op_mov_reg_T0(OT_LONG, reg);
3027 break;
3028 case 0x02a: /* cvtpi2ps */
3029 case 0x12a: /* cvtpi2pd */
3030 tcg_gen_helper_0_0(helper_enter_mmx);
3031 if (mod != 3) {
3032 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3033 op2_offset = offsetof(CPUX86State,mmx_t0);
3034 gen_ldq_env_A0(s->mem_index, op2_offset);
3035 } else {
3036 rm = (modrm & 7);
3037 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3038 }
3039 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3040 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3041 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3042 switch(b >> 8) {
3043 case 0x0:
3044 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3045 break;
3046 default:
3047 case 0x1:
3048 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3049 break;
3050 }
3051 break;
3052 case 0x22a: /* cvtsi2ss */
3053 case 0x32a: /* cvtsi2sd */
3054 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3055 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3056 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3057 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3058 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3059 tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3060 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2);
3061 break;
3062 case 0x02c: /* cvttps2pi */
3063 case 0x12c: /* cvttpd2pi */
3064 case 0x02d: /* cvtps2pi */
3065 case 0x12d: /* cvtpd2pi */
3066 tcg_gen_helper_0_0(helper_enter_mmx);
3067 if (mod != 3) {
3068 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3069 op2_offset = offsetof(CPUX86State,xmm_t0);
3070 gen_ldo_env_A0(s->mem_index, op2_offset);
3071 } else {
3072 rm = (modrm & 7) | REX_B(s);
3073 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3074 }
3075 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3076 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3077 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3078 switch(b) {
3079 case 0x02c:
3080 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3081 break;
3082 case 0x12c:
3083 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3084 break;
3085 case 0x02d:
3086 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3087 break;
3088 case 0x12d:
3089 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3090 break;
3091 }
3092 break;
3093 case 0x22c: /* cvttss2si */
3094 case 0x32c: /* cvttsd2si */
3095 case 0x22d: /* cvtss2si */
3096 case 0x32d: /* cvtsd2si */
3097 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3098 if (mod != 3) {
3099 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3100 if ((b >> 8) & 1) {
3101 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3102 } else {
3103 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3104 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3105 }
3106 op2_offset = offsetof(CPUX86State,xmm_t0);
3107 } else {
3108 rm = (modrm & 7) | REX_B(s);
3109 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3110 }
3111 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3112 (b & 1) * 4];
3113 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3114 if (ot == OT_LONG) {
3115 tcg_gen_helper_1_1(sse_op2, cpu_tmp2, cpu_ptr0);
3116 tcg_gen_extu_i32_i64(cpu_T[0], cpu_tmp2);
3117 } else {
3118 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3119 }
3120 gen_op_mov_reg_T0(ot, reg);
3121 break;
3122 case 0xc4: /* pinsrw */
3123 case 0x1c4:
3124 s->rip_offset = 1;
3125 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3126 val = ldub_code(s->pc++);
3127 if (b1) {
3128 val &= 7;
3129 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3130 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3131 } else {
3132 val &= 3;
3133 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3134 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3135 }
3136 break;
3137 case 0xc5: /* pextrw */
3138 case 0x1c5:
3139 if (mod != 3)
3140 goto illegal_op;
3141 val = ldub_code(s->pc++);
3142 if (b1) {
3143 val &= 7;
3144 rm = (modrm & 7) | REX_B(s);
3145 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3146 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3147 } else {
3148 val &= 3;
3149 rm = (modrm & 7);
3150 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3151 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3152 }
3153 reg = ((modrm >> 3) & 7) | rex_r;
3154 gen_op_mov_reg_T0(OT_LONG, reg);
3155 break;
3156 case 0x1d6: /* movq ea, xmm */
3157 if (mod != 3) {
3158 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3159 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3160 } else {
3161 rm = (modrm & 7) | REX_B(s);
3162 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3163 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3164 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3165 }
3166 break;
3167 case 0x2d6: /* movq2dq */
3168 tcg_gen_helper_0_0(helper_enter_mmx);
3169 rm = (modrm & 7);
3170 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3171 offsetof(CPUX86State,fpregs[rm].mmx));
3172 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3173 break;
3174 case 0x3d6: /* movdq2q */
3175 tcg_gen_helper_0_0(helper_enter_mmx);
3176 rm = (modrm & 7) | REX_B(s);
3177 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3178 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3179 break;
3180 case 0xd7: /* pmovmskb */
3181 case 0x1d7:
3182 if (mod != 3)
3183 goto illegal_op;
3184 if (b1) {
3185 rm = (modrm & 7) | REX_B(s);
3186 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3187 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2, cpu_ptr0);
3188 } else {
3189 rm = (modrm & 7);
3190 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3191 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2, cpu_ptr0);
3192 }
3193 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3194 reg = ((modrm >> 3) & 7) | rex_r;
3195 gen_op_mov_reg_T0(OT_LONG, reg);
3196 break;
3197 default:
3198 goto illegal_op;
3199 }
3200 } else {
3201 /* generic MMX or SSE operation */
3202 switch(b) {
3203 case 0xf7:
3204 /* maskmov : we must prepare A0 */
3205 if (mod != 3)
3206 goto illegal_op;
3207 #ifdef TARGET_X86_64
3208 if (s->aflag == 2) {
3209 gen_op_movq_A0_reg(R_EDI);
3210 } else
3211 #endif
3212 {
3213 gen_op_movl_A0_reg(R_EDI);
3214 if (s->aflag == 0)
3215 gen_op_andl_A0_ffff();
3216 }
3217 gen_add_A0_ds_seg(s);
3218 break;
3219 case 0x70: /* pshufx insn */
3220 case 0xc6: /* pshufx insn */
3221 case 0xc2: /* compare insns */
3222 s->rip_offset = 1;
3223 break;
3224 default:
3225 break;
3226 }
3227 if (is_xmm) {
3228 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3229 if (mod != 3) {
3230 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3231 op2_offset = offsetof(CPUX86State,xmm_t0);
3232 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3233 b == 0xc2)) {
3234 /* specific case for SSE single instructions */
3235 if (b1 == 2) {
3236 /* 32 bit access */
3237 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3238 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3239 } else {
3240 /* 64 bit access */
3241 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3242 }
3243 } else {
3244 gen_ldo_env_A0(s->mem_index, op2_offset);
3245 }
3246 } else {
3247 rm = (modrm & 7) | REX_B(s);
3248 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3249 }
3250 } else {
3251 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3252 if (mod != 3) {
3253 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3254 op2_offset = offsetof(CPUX86State,mmx_t0);
3255 gen_ldq_env_A0(s->mem_index, op2_offset);
3256 } else {
3257 rm = (modrm & 7);
3258 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3259 }
3260 }
3261 switch(b) {
3262 case 0x0f: /* 3DNow! data insns */
3263 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3264 goto illegal_op;
3265 val = ldub_code(s->pc++);
3266 sse_op2 = sse_op_table5[val];
3267 if (!sse_op2)
3268 goto illegal_op;
3269 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3270 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3271 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3272 break;
3273 case 0x70: /* pshufx insn */
3274 case 0xc6: /* pshufx insn */
3275 val = ldub_code(s->pc++);
3276 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3277 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3278 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3279 break;
3280 case 0xc2:
3281 /* compare insns */
3282 val = ldub_code(s->pc++);
3283 if (val >= 8)
3284 goto illegal_op;
3285 sse_op2 = sse_op_table4[val][b1];
3286 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3287 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3288 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3289 break;
3290 default:
3291 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3292 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3293 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3294 break;
3295 }
3296 if (b == 0x2e || b == 0x2f) {
3297 /* just to keep the EFLAGS optimization correct */
3298 gen_op_com_dummy();
3299 s->cc_op = CC_OP_EFLAGS;
3300 }
3301 }
3302 }
3303
3304
3305 /* convert one instruction. s->is_jmp is set if the translation must
3306 be stopped. Return the next pc value */
3307 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3308 {
3309 int b, prefixes, aflag, dflag;
3310 int shift, ot;
3311 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3312 target_ulong next_eip, tval;
3313 int rex_w, rex_r;
3314
3315 s->pc = pc_start;
3316 prefixes = 0;
3317 aflag = s->code32;
3318 dflag = s->code32;
3319 s->override = -1;
3320 rex_w = -1;
3321 rex_r = 0;
3322 #ifdef TARGET_X86_64
3323 s->rex_x = 0;
3324 s->rex_b = 0;
3325 x86_64_hregs = 0;
3326 #endif
3327 s->rip_offset = 0; /* for relative ip address */
3328 next_byte:
3329 b = ldub_code(s->pc);
3330 s->pc++;
3331 /* check prefixes */
3332 #ifdef TARGET_X86_64
3333 if (CODE64(s)) {
3334 switch (b) {
3335 case 0xf3:
3336 prefixes |= PREFIX_REPZ;
3337 goto next_byte;
3338 case 0xf2:
3339 prefixes |= PREFIX_REPNZ;
3340 goto next_byte;
3341 case 0xf0:
3342 prefixes |= PREFIX_LOCK;
3343 goto next_byte;
3344 case 0x2e:
3345 s->override = R_CS;
3346 goto next_byte;
3347 case 0x36:
3348 s->override = R_SS;
3349 goto next_byte;
3350 case 0x3e:
3351 s->override = R_DS;
3352 goto next_byte;
3353 case 0x26:
3354 s->override = R_ES;
3355 goto next_byte;
3356 case 0x64:
3357 s->override = R_FS;
3358 goto next_byte;
3359 case 0x65:
3360 s->override = R_GS;
3361 goto next_byte;
3362 case 0x66:
3363 prefixes |= PREFIX_DATA;
3364 goto next_byte;
3365 case 0x67:
3366 prefixes |= PREFIX_ADR;
3367 goto next_byte;
3368 case 0x40 ... 0x4f:
3369 /* REX prefix */
3370 rex_w = (b >> 3) & 1;
3371 rex_r = (b & 0x4) << 1;
3372 s->rex_x = (b & 0x2) << 2;
3373 REX_B(s) = (b & 0x1) << 3;
3374 x86_64_hregs = 1; /* select uniform byte register addressing */
3375 goto next_byte;
3376 }
3377 if (rex_w == 1) {
3378 /* 0x66 is ignored if rex.w is set */
3379 dflag = 2;
3380 } else {
3381 if (prefixes & PREFIX_DATA)
3382 dflag ^= 1;
3383 }
3384 if (!(prefixes & PREFIX_ADR))
3385 aflag = 2;
3386 } else
3387 #endif
3388 {
3389 switch (b) {
3390 case 0xf3:
3391 prefixes |= PREFIX_REPZ;
3392 goto next_byte;
3393 case 0xf2:
3394 prefixes |= PREFIX_REPNZ;
3395 goto next_byte;
3396 case 0xf0:
3397 prefixes |= PREFIX_LOCK;
3398 goto next_byte;
3399 case 0x2e:
3400 s->override = R_CS;
3401 goto next_byte;
3402 case 0x36:
3403 s->override = R_SS;
3404 goto next_byte;
3405 case 0x3e:
3406 s->override = R_DS;
3407 goto next_byte;
3408 case 0x26:
3409 s->override = R_ES;
3410 goto next_byte;
3411 case 0x64:
3412 s->override = R_FS;
3413 goto next_byte;
3414 case 0x65:
3415 s->override = R_GS;
3416 goto next_byte;
3417 case 0x66:
3418 prefixes |= PREFIX_DATA;
3419 goto next_byte;
3420 case 0x67:
3421 prefixes |= PREFIX_ADR;
3422 goto next_byte;
3423 }
3424 if (prefixes & PREFIX_DATA)
3425 dflag ^= 1;
3426 if (prefixes & PREFIX_ADR)
3427 aflag ^= 1;
3428 }
3429
3430 s->prefix = prefixes;
3431 s->aflag = aflag;
3432 s->dflag = dflag;
3433
3434 /* lock generation */
3435 if (prefixes & PREFIX_LOCK)
3436 gen_op_lock();
3437
3438 /* now check op code */
3439 reswitch:
3440 switch(b) {
3441 case 0x0f:
3442 /**************************/
3443 /* extended op code */
3444 b = ldub_code(s->pc++) | 0x100;
3445 goto reswitch;
3446
3447 /**************************/
3448 /* arith & logic */
3449 case 0x00 ... 0x05:
3450 case 0x08 ... 0x0d:
3451 case 0x10 ... 0x15:
3452 case 0x18 ... 0x1d:
3453 case 0x20 ... 0x25:
3454 case 0x28 ... 0x2d:
3455 case 0x30 ... 0x35:
3456 case 0x38 ... 0x3d:
3457 {
3458 int op, f, val;
3459 op = (b >> 3) & 7;
3460 f = (b >> 1) & 3;
3461
3462 if ((b & 1) == 0)
3463 ot = OT_BYTE;
3464 else
3465 ot = dflag + OT_WORD;
3466
3467 switch(f) {
3468 case 0: /* OP Ev, Gv */
3469 modrm = ldub_code(s->pc++);
3470 reg = ((modrm >> 3) & 7) | rex_r;
3471 mod = (modrm >> 6) & 3;
3472 rm = (modrm & 7) | REX_B(s);
3473 if (mod != 3) {
3474 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3475 opreg = OR_TMP0;
3476 } else if (op == OP_XORL && rm == reg) {
3477 xor_zero:
3478 /* xor reg, reg optimisation */
3479 gen_op_movl_T0_0();
3480 s->cc_op = CC_OP_LOGICB + ot;
3481 gen_op_mov_reg_T0(ot, reg);
3482 gen_op_update1_cc();
3483 break;
3484 } else {
3485 opreg = rm;
3486 }
3487 gen_op_mov_TN_reg(ot, 1, reg);
3488 gen_op(s, op, ot, opreg);
3489 break;
3490 case 1: /* OP Gv, Ev */
3491 modrm = ldub_code(s->pc++);
3492 mod = (modrm >> 6) & 3;
3493 reg = ((modrm >> 3) & 7) | rex_r;
3494 rm = (modrm & 7) | REX_B(s);
3495 if (mod != 3) {
3496 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3497 gen_op_ld_T1_A0(ot + s->mem_index);
3498 } else if (op == OP_XORL && rm == reg) {
3499 goto xor_zero;
3500 } else {
3501 gen_op_mov_TN_reg(ot, 1, rm);
3502 }
3503 gen_op(s, op, ot, reg);
3504 break;
3505 case 2: /* OP A, Iv */
3506 val = insn_get(s, ot);
3507 gen_op_movl_T1_im(val);
3508 gen_op(s, op, ot, OR_EAX);
3509 break;
3510 }
3511 }
3512 break;
3513
3514 case 0x80: /* GRP1 */
3515 case 0x81:
3516 case 0x82:
3517 case 0x83:
3518 {
3519 int val;
3520
3521 if ((b & 1) == 0)
3522 ot = OT_BYTE;
3523 else
3524 ot = dflag + OT_WORD;
3525
3526 modrm = ldub_code(s->pc++);
3527 mod = (modrm >> 6) & 3;
3528 rm = (modrm & 7) | REX_B(s);
3529 op = (modrm >> 3) & 7;
3530
3531 if (mod != 3) {
3532 if (b == 0x83)
3533 s->rip_offset = 1;
3534 else
3535 s->rip_offset = insn_const_size(ot);
3536 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3537 opreg = OR_TMP0;
3538 } else {
3539 opreg = rm;
3540 }
3541
3542 switch(b) {
3543 default:
3544 case 0x80:
3545 case 0x81:
3546 case 0x82:
3547 val = insn_get(s, ot);
3548 break;
3549 case 0x83:
3550 val = (int8_t)insn_get(s, OT_BYTE);
3551 break;
3552 }
3553 gen_op_movl_T1_im(val);
3554 gen_op(s, op, ot, opreg);
3555 }
3556 break;
3557
3558 /**************************/
3559 /* inc, dec, and other misc arith */
3560 case 0x40 ... 0x47: /* inc Gv */
3561 ot = dflag ? OT_LONG : OT_WORD;
3562 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3563 break;
3564 case 0x48 ... 0x4f: /* dec Gv */
3565 ot = dflag ? OT_LONG : OT_WORD;
3566 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3567 break;
3568 case 0xf6: /* GRP3 */
3569 case 0xf7:
3570 if ((b & 1) == 0)
3571 ot = OT_BYTE;
3572 else
3573 ot = dflag + OT_WORD;
3574
3575 modrm = ldub_code(s->pc++);
3576 mod = (modrm >> 6) & 3;
3577 rm = (modrm & 7) | REX_B(s);
3578 op = (modrm >> 3) & 7;
3579 if (mod != 3) {
3580 if (op == 0)
3581 s->rip_offset = insn_const_size(ot);
3582 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3583 gen_op_ld_T0_A0(ot + s->mem_index);
3584 } else {
3585 gen_op_mov_TN_reg(ot, 0, rm);
3586 }
3587
3588 switch(op) {
3589 case 0: /* test */
3590 val = insn_get(s, ot);
3591 gen_op_movl_T1_im(val);
3592 gen_op_testl_T0_T1_cc();
3593 s->cc_op = CC_OP_LOGICB + ot;
3594 break;
3595 case 2: /* not */
3596 gen_op_notl_T0();
3597 if (mod != 3) {
3598 gen_op_st_T0_A0(ot + s->mem_index);
3599 } else {
3600 gen_op_mov_reg_T0(ot, rm);
3601 }
3602 break;
3603 case 3: /* neg */
3604 gen_op_negl_T0();
3605 if (mod != 3) {
3606 gen_op_st_T0_A0(ot + s->mem_index);
3607 } else {
3608 gen_op_mov_reg_T0(ot, rm);
3609 }
3610 gen_op_update_neg_cc();
3611 s->cc_op = CC_OP_SUBB + ot;
3612 break;
3613 case 4: /* mul */
3614 switch(ot) {
3615 case OT_BYTE:
3616 gen_op_mulb_AL_T0();
3617 s->cc_op = CC_OP_MULB;
3618 break;
3619 case OT_WORD:
3620 gen_op_mulw_AX_T0();
3621 s->cc_op = CC_OP_MULW;
3622 break;
3623 default:
3624 case OT_LONG:
3625 gen_op_mull_EAX_T0();
3626 s->cc_op = CC_OP_MULL;
3627 break;
3628 #ifdef TARGET_X86_64
3629 case OT_QUAD:
3630 gen_op_mulq_EAX_T0();
3631 s->cc_op = CC_OP_MULQ;
3632 break;
3633 #endif
3634 }
3635 break;
3636 case 5: /* imul */
3637 switch(ot) {
3638 case OT_BYTE:
3639 gen_op_imulb_AL_T0();
3640 s->cc_op = CC_OP_MULB;
3641 break;
3642 case OT_WORD:
3643 gen_op_imulw_AX_T0();
3644 s->cc_op = CC_OP_MULW;
3645 break;
3646 default:
3647 case OT_LONG:
3648 gen_op_imull_EAX_T0();
3649 s->cc_op = CC_OP_MULL;
3650 break;
3651 #ifdef TARGET_X86_64
3652 case OT_QUAD:
3653 gen_op_imulq_EAX_T0();
3654 s->cc_op = CC_OP_MULQ;
3655 break;
3656 #endif
3657 }
3658 break;
3659 case 6: /* div */
3660 switch(ot) {
3661 case OT_BYTE:
3662 gen_jmp_im(pc_start - s->cs_base);
3663 gen_op_divb_AL_T0();
3664 break;
3665 case OT_WORD:
3666 gen_jmp_im(pc_start - s->cs_base);
3667 gen_op_divw_AX_T0();
3668 break;
3669 default:
3670 case OT_LONG:
3671 gen_jmp_im(pc_start - s->cs_base);
3672 #ifdef MACRO_TEST
3673 /* XXX: this is just a test */
3674 tcg_gen_macro_2(cpu_T[0], cpu_T[0], MACRO_TEST);
3675 #else
3676 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
3677 #endif
3678 break;
3679 #ifdef TARGET_X86_64
3680 case OT_QUAD:
3681 gen_jmp_im(pc_start - s->cs_base);
3682 gen_op_divq_EAX_T0();
3683 break;
3684 #endif
3685 }
3686 break;
3687 case 7: /* idiv */
3688 switch(ot) {
3689 case OT_BYTE:
3690 gen_jmp_im(pc_start - s->cs_base);
3691 gen_op_idivb_AL_T0();
3692 break;
3693 case OT_WORD:
3694 gen_jmp_im(pc_start - s->cs_base);
3695 gen_op_idivw_AX_T0();
3696 break;
3697 default:
3698 case OT_LONG:
3699 gen_jmp_im(pc_start - s->cs_base);
3700 tcg_gen_helper_0_1(helper_idivl_EAX_T0, cpu_T[0]);
3701 break;
3702 #ifdef TARGET_X86_64
3703 case OT_QUAD:
3704 gen_jmp_im(pc_start - s->cs_base);
3705 gen_op_idivq_EAX_T0();
3706 break;
3707 #endif
3708 }
3709 break;
3710 default:
3711 goto illegal_op;
3712 }
3713 break;
3714
3715 case 0xfe: /* GRP4 */
3716 case 0xff: /* GRP5 */
3717 if ((b & 1) == 0)
3718 ot = OT_BYTE;
3719 else
3720 ot = dflag + OT_WORD;
3721
3722 modrm = ldub_code(s->pc++);
3723 mod = (modrm >> 6) & 3;
3724 rm = (modrm & 7) | REX_B(s);
3725 op = (modrm >> 3) & 7;
3726 if (op >= 2 && b == 0xfe) {
3727 goto illegal_op;
3728 }
3729 if (CODE64(s)) {
3730 if (op == 2 || op == 4) {
3731 /* operand size for jumps is 64 bit */
3732 ot = OT_QUAD;
3733 } else if (op == 3 || op == 5) {
3734 /* for call calls, the operand is 16 or 32 bit, even
3735 in long mode */
3736 ot = dflag ? OT_LONG : OT_WORD;
3737 } else if (op == 6) {
3738 /* default push size is 64 bit */
3739 ot = dflag ? OT_QUAD : OT_WORD;
3740 }
3741 }
3742 if (mod != 3) {
3743 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3744 if (op >= 2 && op != 3 && op != 5)
3745 gen_op_ld_T0_A0(ot + s->mem_index);
3746 } else {
3747 gen_op_mov_TN_reg(ot, 0, rm);
3748 }
3749
3750 switch(op) {
3751 case 0: /* inc Ev */
3752 if (mod != 3)
3753 opreg = OR_TMP0;
3754 else
3755 opreg = rm;
3756 gen_inc(s, ot, opreg, 1);
3757 break;
3758 case 1: /* dec Ev */
3759 if (mod != 3)
3760 opreg = OR_TMP0;
3761 else
3762 opreg = rm;
3763 gen_inc(s, ot, opreg, -1);
3764 break;
3765 case 2: /* call Ev */
3766 /* XXX: optimize if memory (no 'and' is necessary) */
3767 if (s->dflag == 0)
3768 gen_op_andl_T0_ffff();
3769 next_eip = s->pc - s->cs_base;
3770 gen_movtl_T1_im(next_eip);
3771 gen_push_T1(s);
3772 gen_op_jmp_T0();
3773 gen_eob(s);
3774 break;
3775 case 3: /* lcall Ev */
3776 gen_op_ld_T1_A0(ot + s->mem_index);
3777 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3778 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3779 do_lcall:
3780 if (s->pe && !s->vm86) {
3781 if (s->cc_op != CC_OP_DYNAMIC)
3782 gen_op_set_cc_op(s->cc_op);
3783 gen_jmp_im(pc_start - s->cs_base);
3784 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3785 } else {
3786 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3787 }
3788 gen_eob(s);
3789 break;
3790 case 4: /* jmp Ev */
3791 if (s->dflag == 0)
3792 gen_op_andl_T0_ffff();
3793 gen_op_jmp_T0();
3794 gen_eob(s);
3795 break;
3796 case 5: /* ljmp Ev */
3797 gen_op_ld_T1_A0(ot + s->mem_index);
3798 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3799 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3800 do_ljmp:
3801 if (s->pe && !s->vm86) {
3802 if (s->cc_op != CC_OP_DYNAMIC)
3803 gen_op_set_cc_op(s->cc_op);
3804 gen_jmp_im(pc_start - s->cs_base);
3805 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3806 } else {
3807 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3808 gen_op_movl_T0_T1();
3809 gen_op_jmp_T0();
3810 }
3811 gen_eob(s);
3812 break;
3813 case 6: /* push Ev */
3814 gen_push_T0(s);
3815 break;
3816 default:
3817 goto illegal_op;
3818 }
3819 break;
3820
3821 case 0x84: /* test Ev, Gv */
3822 case 0x85:
3823 if ((b & 1) == 0)
3824 ot = OT_BYTE;
3825 else
3826 ot = dflag + OT_WORD;
3827
3828 modrm = ldub_code(s->pc++);
3829 mod = (modrm >> 6) & 3;
3830 rm = (modrm & 7) | REX_B(s);
3831 reg = ((modrm >> 3) & 7) | rex_r;
3832
3833 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3834 gen_op_mov_TN_reg(ot, 1, reg);
3835 gen_op_testl_T0_T1_cc();
3836 s->cc_op = CC_OP_LOGICB + ot;
3837 break;
3838
3839 case 0xa8: /* test eAX, Iv */
3840 case 0xa9:
3841 if ((b & 1) == 0)
3842 ot = OT_BYTE;
3843 else
3844 ot = dflag + OT_WORD;
3845 val = insn_get(s, ot);
3846
3847 gen_op_mov_TN_reg(ot, 0, OR_EAX);
3848 gen_op_movl_T1_im(val);
3849 gen_op_testl_T0_T1_cc();
3850 s->cc_op = CC_OP_LOGICB + ot;
3851 break;
3852
3853 case 0x98: /* CWDE/CBW */
3854 #ifdef TARGET_X86_64
3855 if (dflag == 2) {
3856 gen_op_movslq_RAX_EAX();
3857 } else
3858 #endif
3859 if (dflag == 1)
3860 gen_op_movswl_EAX_AX();
3861 else
3862 gen_op_movsbw_AX_AL();
3863 break;
3864 case 0x99: /* CDQ/CWD */
3865 #ifdef TARGET_X86_64
3866 if (dflag == 2) {
3867 gen_op_movsqo_RDX_RAX();
3868 } else
3869 #endif
3870 if (dflag == 1)
3871 gen_op_movslq_EDX_EAX();
3872 else
3873 gen_op_movswl_DX_AX();
3874 break;
3875 case 0x1af: /* imul Gv, Ev */
3876 case 0x69: /* imul Gv, Ev, I */
3877 case 0x6b:
3878 ot = dflag + OT_WORD;
3879 modrm = ldub_code(s->pc++);
3880 reg = ((modrm >> 3) & 7) | rex_r;
3881 if (b == 0x69)
3882 s->rip_offset = insn_const_size(ot);
3883 else if (b == 0x6b)
3884 s->rip_offset = 1;
3885 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3886 if (b == 0x69) {
3887 val = insn_get(s, ot);
3888 gen_op_movl_T1_im(val);
3889 } else if (b == 0x6b) {
3890 val = (int8_t)insn_get(s, OT_BYTE);
3891 gen_op_movl_T1_im(val);
3892 } else {
3893 gen_op_mov_TN_reg(ot, 1, reg);
3894 }
3895
3896 #ifdef TARGET_X86_64
3897 if (ot == OT_QUAD) {
3898 gen_op_imulq_T0_T1();
3899 } else
3900 #endif
3901 if (ot == OT_LONG) {
3902 gen_op_imull_T0_T1();
3903 } else {
3904 gen_op_imulw_T0_T1();
3905 }
3906 gen_op_mov_reg_T0(ot, reg);
3907 s->cc_op = CC_OP_MULB + ot;
3908 break;
3909 case 0x1c0:
3910 case 0x1c1: /* xadd Ev, Gv */
3911 if ((b & 1) == 0)
3912 ot = OT_BYTE;
3913 else
3914 ot = dflag + OT_WORD;
3915 modrm = ldub_code(s->pc++);
3916 reg = ((modrm >> 3) & 7) | rex_r;
3917 mod = (modrm >> 6) & 3;
3918 if (mod == 3) {
3919 rm = (modrm & 7) | REX_B(s);
3920 gen_op_mov_TN_reg(ot, 0, reg);
3921 gen_op_mov_TN_reg(ot, 1, rm);
3922 gen_op_addl_T0_T1();
3923 gen_op_mov_reg_T1(ot, reg);
3924 gen_op_mov_reg_T0(ot, rm);
3925 } else {
3926 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3927 gen_op_mov_TN_reg(ot, 0, reg);
3928 gen_op_ld_T1_A0(ot + s->mem_index);
3929 gen_op_addl_T0_T1();
3930 gen_op_st_T0_A0(ot + s->mem_index);
3931 gen_op_mov_reg_T1(ot, reg);
3932 }
3933 gen_op_update2_cc();
3934 s->cc_op = CC_OP_ADDB + ot;
3935 break;
3936 case 0x1b0:
3937 case 0x1b1: /* cmpxchg Ev, Gv */
3938 if ((b & 1) == 0)
3939 ot = OT_BYTE;
3940 else
3941 ot = dflag + OT_WORD;
3942 modrm = ldub_code(s->pc++);
3943 reg = ((modrm >> 3) & 7) | rex_r;
3944 mod = (modrm >> 6) & 3;
3945 gen_op_mov_TN_reg(ot, 1, reg);
3946 if (mod == 3) {
3947 rm = (modrm & 7) | REX_B(s);
3948 gen_op_mov_TN_reg(ot, 0, rm);
3949 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3950 gen_op_mov_reg_T0(ot, rm);
3951 } else {
3952 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3953 gen_op_ld_T0_A0(ot + s->mem_index);
3954 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3955 }
3956 s->cc_op = CC_OP_SUBB + ot;
3957 break;
3958 case 0x1c7: /* cmpxchg8b */
3959 modrm = ldub_code(s->pc++);
3960 mod = (modrm >> 6) & 3;
3961 if ((mod == 3) || ((modrm & 0x38) != 0x8))
3962 goto illegal_op;
3963 gen_jmp_im(pc_start - s->cs_base);
3964 if (s->cc_op != CC_OP_DYNAMIC)
3965 gen_op_set_cc_op(s->cc_op);
3966 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3967 gen_op_cmpxchg8b();
3968 s->cc_op = CC_OP_EFLAGS;
3969 break;
3970
3971 /**************************/
3972 /* push/pop */
3973 case 0x50 ... 0x57: /* push */
3974 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
3975 gen_push_T0(s);
3976 break;
3977 case 0x58 ... 0x5f: /* pop */
3978 if (CODE64(s)) {
3979 ot = dflag ? OT_QUAD : OT_WORD;
3980 } else {
3981 ot = dflag + OT_WORD;
3982 }
3983 gen_pop_T0(s);
3984 /* NOTE: order is important for pop %sp */
3985 gen_pop_update(s);
3986 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
3987 break;
3988 case 0x60: /* pusha */
3989 if (CODE64(s))
3990 goto illegal_op;
3991 gen_pusha(s);
3992 break;
3993 case 0x61: /* popa */
3994 if (CODE64(s))
3995 goto illegal_op;
3996 gen_popa(s);
3997 break;
3998 case 0x68: /* push Iv */
3999 case 0x6a:
4000 if (CODE64(s)) {
4001 ot = dflag ? OT_QUAD : OT_WORD;
4002 } else {
4003 ot = dflag + OT_WORD;
4004 }
4005 if (b == 0x68)
4006 val = insn_get(s, ot);
4007 else
4008 val = (int8_t)insn_get(s, OT_BYTE);
4009 gen_op_movl_T0_im(val);
4010 gen_push_T0(s);
4011 break;
4012 case 0x8f: /* pop Ev */
4013 if (CODE64(s)) {
4014 ot = dflag ? OT_QUAD : OT_WORD;
4015 } else {
4016 ot = dflag + OT_WORD;
4017 }
4018 modrm = ldub_code(s->pc++);
4019 mod = (modrm >> 6) & 3;
4020 gen_pop_T0(s);
4021 if (mod == 3) {
4022 /* NOTE: order is important for pop %sp */
4023 gen_pop_update(s);
4024 rm = (modrm & 7) | REX_B(s);
4025 gen_op_mov_reg_T0(ot, rm);
4026 } else {
4027 /* NOTE: order is important too for MMU exceptions */
4028 s->popl_esp_hack = 1 << ot;
4029 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4030 s->popl_esp_hack = 0;
4031 gen_pop_update(s);
4032 }
4033 break;
4034 case 0xc8: /* enter */
4035 {
4036 int level;
4037 val = lduw_code(s->pc);
4038 s->pc += 2;
4039 level = ldub_code(s->pc++);
4040 gen_enter(s, val, level);
4041 }
4042 break;
4043 case 0xc9: /* leave */
4044 /* XXX: exception not precise (ESP is updated before potential exception) */
4045 if (CODE64(s)) {
4046 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4047 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4048 } else if (s->ss32) {
4049 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4050 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4051 } else {
4052 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4053 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4054 }
4055 gen_pop_T0(s);
4056 if (CODE64(s)) {
4057 ot = dflag ? OT_QUAD : OT_WORD;
4058 } else {
4059 ot = dflag + OT_WORD;
4060 }
4061 gen_op_mov_reg_T0(ot, R_EBP);
4062 gen_pop_update(s);
4063 break;
4064 case 0x06: /* push es */
4065 case 0x0e: /* push cs */
4066 case 0x16: /* push ss */
4067 case 0x1e: /* push ds */
4068 if (CODE64(s))
4069 goto illegal_op;
4070 gen_op_movl_T0_seg(b >> 3);
4071 gen_push_T0(s);
4072 break;
4073 case 0x1a0: /* push fs */
4074 case 0x1a8: /* push gs */
4075 gen_op_movl_T0_seg((b >> 3) & 7);
4076 gen_push_T0(s);
4077 break;
4078 case 0x07: /* pop es */
4079 case 0x17: /* pop ss */
4080 case 0x1f: /* pop ds */
4081 if (CODE64(s))
4082 goto illegal_op;
4083 reg = b >> 3;
4084 gen_pop_T0(s);
4085 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4086 gen_pop_update(s);
4087 if (reg == R_SS) {
4088 /* if reg == SS, inhibit interrupts/trace. */
4089 /* If several instructions disable interrupts, only the
4090 _first_ does it */
4091 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4092 gen_op_set_inhibit_irq();
4093 s->tf = 0;
4094 }
4095 if (s->is_jmp) {
4096 gen_jmp_im(s->pc - s->cs_base);
4097 gen_eob(s);
4098 }
4099 break;
4100 case 0x1a1: /* pop fs */
4101 case 0x1a9: /* pop gs */
4102 gen_pop_T0(s);
4103 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4104 gen_pop_update(s);
4105 if (s->is_jmp) {
4106 gen_jmp_im(s->pc - s->cs_base);
4107 gen_eob(s);
4108 }
4109 break;
4110
4111 /**************************/
4112 /* mov */
4113 case 0x88:
4114 case 0x89: /* mov Gv, Ev */
4115 if ((b & 1) == 0)
4116 ot = OT_BYTE;
4117 else
4118 ot = dflag + OT_WORD;
4119 modrm = ldub_code(s->pc++);
4120 reg = ((modrm >> 3) & 7) | rex_r;
4121
4122 /* generate a generic store */
4123 gen_ldst_modrm(s, modrm, ot, reg, 1);
4124 break;
4125 case 0xc6:
4126 case 0xc7: /* mov Ev, Iv */
4127 if ((b & 1) == 0)
4128 ot = OT_BYTE;
4129 else
4130 ot = dflag + OT_WORD;
4131 modrm = ldub_code(s->pc++);
4132 mod = (modrm >> 6) & 3;
4133 if (mod != 3) {
4134 s->rip_offset = insn_const_size(ot);
4135 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4136 }
4137 val = insn_get(s, ot);
4138 gen_op_movl_T0_im(val);
4139 if (mod != 3)
4140 gen_op_st_T0_A0(ot + s->mem_index);
4141 else
4142 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4143 break;
4144 case 0x8a:
4145 case 0x8b: /* mov Ev, Gv */
4146 if ((b & 1) == 0)
4147 ot = OT_BYTE;
4148 else
4149 ot = OT_WORD + dflag;
4150 modrm = ldub_code(s->pc++);
4151 reg = ((modrm >> 3) & 7) | rex_r;
4152
4153 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4154 gen_op_mov_reg_T0(ot, reg);
4155 break;
4156 case 0x8e: /* mov seg, Gv */
4157 modrm = ldub_code(s->pc++);
4158 reg = (modrm >> 3) & 7;
4159 if (reg >= 6 || reg == R_CS)
4160 goto illegal_op;
4161 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4162 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4163 if (reg == R_SS) {
4164 /* if reg == SS, inhibit interrupts/trace */
4165 /* If several instructions disable interrupts, only the
4166 _first_ does it */
4167 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4168 gen_op_set_inhibit_irq();
4169 s->tf = 0;
4170 }
4171 if (s->is_jmp) {
4172 gen_jmp_im(s->pc - s->cs_base);
4173 gen_eob(s);
4174 }
4175 break;
4176 case 0x8c: /* mov Gv, seg */
4177 modrm = ldub_code(s->pc++);
4178 reg = (modrm >> 3) & 7;
4179 mod = (modrm >> 6) & 3;
4180 if (reg >= 6)
4181 goto illegal_op;
4182 gen_op_movl_T0_seg(reg);
4183 if (mod == 3)
4184 ot = OT_WORD + dflag;
4185 else
4186 ot = OT_WORD;
4187 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4188 break;
4189
4190 case 0x1b6: /* movzbS Gv, Eb */
4191 case 0x1b7: /* movzwS Gv, Eb */
4192 case 0x1be: /* movsbS Gv, Eb */
4193 case 0x1bf: /* movswS Gv, Eb */
4194 {
4195 int d_ot;
4196 /* d_ot is the size of destination */
4197 d_ot = dflag + OT_WORD;
4198 /* ot is the size of source */
4199 ot = (b & 1) + OT_BYTE;
4200 modrm = ldub_code(s->pc++);
4201 reg = ((modrm >> 3) & 7) | rex_r;
4202 mod = (modrm >> 6) & 3;
4203 rm = (modrm & 7) | REX_B(s);
4204
4205 if (mod == 3) {
4206 gen_op_mov_TN_reg(ot, 0, rm);
4207 switch(ot | (b & 8)) {
4208 case OT_BYTE:
4209 gen_op_movzbl_T0_T0();
4210 break;
4211 case OT_BYTE | 8:
4212 gen_op_movsbl_T0_T0();
4213 break;
4214 case OT_WORD:
4215 gen_op_movzwl_T0_T0();
4216 break;
4217 default:
4218 case OT_WORD | 8:
4219 gen_op_movswl_T0_T0();
4220 break;
4221 }
4222 gen_op_mov_reg_T0(d_ot, reg);
4223 } else {
4224 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4225 if (b & 8) {
4226 gen_op_lds_T0_A0(ot + s->mem_index);
4227 } else {
4228 gen_op_ldu_T0_A0(ot + s->mem_index);
4229 }
4230 gen_op_mov_reg_T0(d_ot, reg);
4231 }
4232 }
4233 break;
4234
4235 case 0x8d: /* lea */
4236 ot = dflag + OT_WORD;
4237 modrm = ldub_code(s->pc++);
4238 mod = (modrm >> 6) & 3;
4239 if (mod == 3)
4240 goto illegal_op;
4241 reg = ((modrm >> 3) & 7) | rex_r;
4242 /* we must ensure that no segment is added */
4243 s->override = -1;
4244 val = s->addseg;
4245 s->addseg = 0;
4246 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4247 s->addseg = val;
4248 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4249 break;
4250
4251 case 0xa0: /* mov EAX, Ov */
4252 case 0xa1:
4253 case 0xa2: /* mov Ov, EAX */
4254 case 0xa3:
4255 {
4256 target_ulong offset_addr;
4257
4258 if ((b & 1) == 0)
4259 ot = OT_BYTE;
4260 else
4261 ot = dflag + OT_WORD;
4262 #ifdef TARGET_X86_64
4263 if (s->aflag == 2) {
4264 offset_addr = ldq_code(s->pc);
4265 s->pc += 8;
4266 gen_op_movq_A0_im(offset_addr);
4267 } else
4268 #endif
4269 {
4270 if (s->aflag) {
4271 offset_addr = insn_get(s, OT_LONG);
4272 } else {
4273 offset_addr = insn_get(s, OT_WORD);
4274 }
4275 gen_op_movl_A0_im(offset_addr);
4276 }
4277 gen_add_A0_ds_seg(s);
4278 if ((b & 2) == 0) {
4279 gen_op_ld_T0_A0(ot + s->mem_index);
4280 gen_op_mov_reg_T0(ot, R_EAX);
4281 } else {
4282 gen_op_mov_TN_reg(ot, 0, R_EAX);
4283 gen_op_st_T0_A0(ot + s->mem_index);
4284 }
4285 }
4286 break;
4287 case 0xd7: /* xlat */
4288 #ifdef TARGET_X86_64
4289 if (s->aflag == 2) {
4290 gen_op_movq_A0_reg(R_EBX);
4291 gen_op_addq_A0_AL();
4292 } else
4293 #endif
4294 {
4295 gen_op_movl_A0_reg(R_EBX);
4296 gen_op_addl_A0_AL();
4297 if (s->aflag == 0)
4298 gen_op_andl_A0_ffff();
4299 }
4300 gen_add_A0_ds_seg(s);
4301 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4302 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4303 break;
4304 case 0xb0 ... 0xb7: /* mov R, Ib */
4305 val = insn_get(s, OT_BYTE);
4306 gen_op_movl_T0_im(val);
4307 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4308 break;
4309 case 0xb8 ... 0xbf: /* mov R, Iv */
4310 #ifdef TARGET_X86_64
4311 if (dflag == 2) {
4312 uint64_t tmp;
4313 /* 64 bit case */
4314 tmp = ldq_code(s->pc);
4315 s->pc += 8;
4316 reg = (b & 7) | REX_B(s);
4317 gen_movtl_T0_im(tmp);
4318 gen_op_mov_reg_T0(OT_QUAD, reg);
4319 } else
4320 #endif
4321 {
4322 ot = dflag ? OT_LONG : OT_WORD;
4323 val = insn_get(s, ot);
4324 reg = (b & 7) | REX_B(s);
4325 gen_op_movl_T0_im(val);
4326 gen_op_mov_reg_T0(ot, reg);
4327 }
4328 break;
4329
4330 case 0x91 ... 0x97: /* xchg R, EAX */
4331 ot = dflag + OT_WORD;
4332 reg = (b & 7) | REX_B(s);
4333 rm = R_EAX;
4334 goto do_xchg_reg;
4335 case 0x86:
4336 case 0x87: /* xchg Ev, Gv */
4337 if ((b & 1) == 0)
4338 ot = OT_BYTE;
4339 else
4340 ot = dflag + OT_WORD;
4341 modrm = ldub_code(s->pc++);
4342 reg = ((modrm >> 3) & 7) | rex_r;
4343 mod = (modrm >> 6) & 3;
4344 if (mod == 3) {
4345 rm = (modrm & 7) | REX_B(s);
4346 do_xchg_reg:
4347 gen_op_mov_TN_reg(ot, 0, reg);
4348 gen_op_mov_TN_reg(ot, 1, rm);
4349 gen_op_mov_reg_T0(ot, rm);
4350 gen_op_mov_reg_T1(ot, reg);
4351 } else {
4352 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4353 gen_op_mov_TN_reg(ot, 0, reg);
4354 /* for xchg, lock is implicit */
4355 if (!(prefixes & PREFIX_LOCK))
4356 gen_op_lock();
4357 gen_op_ld_T1_A0(ot + s->mem_index);
4358 gen_op_st_T0_A0(ot + s->mem_index);
4359 if (!(prefixes & PREFIX_LOCK))
4360 gen_op_unlock();
4361 gen_op_mov_reg_T1(ot, reg);
4362 }
4363 break;
4364 case 0xc4: /* les Gv */
4365 if (CODE64(s))
4366 goto illegal_op;
4367 op = R_ES;
4368 goto do_lxx;
4369 case 0xc5: /* lds Gv */
4370 if (CODE64(s))
4371 goto illegal_op;
4372 op = R_DS;
4373 goto do_lxx;
4374 case 0x1b2: /* lss Gv */
4375 op = R_SS;
4376 goto do_lxx;
4377 case 0x1b4: /* lfs Gv */
4378 op = R_FS;
4379 goto do_lxx;
4380 case 0x1b5: /* lgs Gv */
4381 op = R_GS;
4382 do_lxx:
4383 ot = dflag ? OT_LONG : OT_WORD;
4384 modrm = ldub_code(s->pc++);
4385 reg = ((modrm >> 3) & 7) | rex_r;
4386 mod = (modrm >> 6) & 3;
4387 if (mod == 3)
4388 goto illegal_op;
4389 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4390 gen_op_ld_T1_A0(ot + s->mem_index);
4391 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4392 /* load the segment first to handle exceptions properly */
4393 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4394 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4395 /* then put the data */
4396 gen_op_mov_reg_T1(ot, reg);
4397 if (s->is_jmp) {
4398 gen_jmp_im(s->pc - s->cs_base);
4399 gen_eob(s);
4400 }
4401 break;
4402
4403 /************************/
4404 /* shifts */
4405 case 0xc0:
4406 case 0xc1:
4407 /* shift Ev,Ib */
4408 shift = 2;
4409 grp2:
4410 {
4411 if ((b & 1) == 0)
4412 ot = OT_BYTE;
4413 else
4414 ot = dflag + OT_WORD;
4415
4416 modrm = ldub_code(s->pc++);
4417 mod = (modrm >> 6) & 3;
4418 op = (modrm >> 3) & 7;
4419
4420 if (mod != 3) {
4421 if (shift == 2) {
4422 s->rip_offset = 1;
4423 }
4424 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4425 opreg = OR_TMP0;
4426 } else {
4427 opreg = (modrm & 7) | REX_B(s);
4428 }
4429
4430 /* simpler op */
4431 if (shift == 0) {
4432 gen_shift(s, op, ot, opreg, OR_ECX);
4433 } else {
4434 if (shift == 2) {
4435 shift = ldub_code(s->pc++);
4436 }
4437 gen_shifti(s, op, ot, opreg, shift);
4438 }
4439 }
4440 break;
4441 case 0xd0:
4442 case 0xd1:
4443 /* shift Ev,1 */
4444 shift = 1;
4445 goto grp2;
4446 case 0xd2:
4447 case 0xd3:
4448 /* shift Ev,cl */
4449 shift = 0;
4450 goto grp2;
4451
4452 case 0x1a4: /* shld imm */
4453 op = 0;
4454 shift = 1;
4455 goto do_shiftd;
4456 case 0x1a5: /* shld cl */
4457 op = 0;
4458 shift = 0;
4459 goto do_shiftd;
4460 case 0x1ac: /* shrd imm */
4461 op = 1;
4462 shift = 1;
4463 goto do_shiftd;
4464 case 0x1ad: /* shrd cl */
4465 op = 1;
4466 shift = 0;
4467 do_shiftd:
4468 ot = dflag + OT_WORD;
4469 modrm = ldub_code(s->pc++);
4470 mod = (modrm >> 6) & 3;
4471 rm = (modrm & 7) | REX_B(s);
4472 reg = ((modrm >> 3) & 7) | rex_r;
4473
4474 if (mod != 3) {
4475 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4476 gen_op_ld_T0_A0(ot + s->mem_index);
4477 } else {
4478 gen_op_mov_TN_reg(ot, 0, rm);
4479 }
4480 gen_op_mov_TN_reg(ot, 1, reg);
4481
4482 if (shift) {
4483 val = ldub_code(s->pc++);
4484 if (ot == OT_QUAD)
4485 val &= 0x3f;
4486 else
4487 val &= 0x1f;
4488 if (val) {
4489 if (mod == 3)
4490 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4491 else
4492 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4493 if (op == 0 && ot != OT_WORD)
4494 s->cc_op = CC_OP_SHLB + ot;
4495 else
4496 s->cc_op = CC_OP_SARB + ot;
4497 }
4498 } else {
4499 if (s->cc_op != CC_OP_DYNAMIC)
4500 gen_op_set_cc_op(s->cc_op);
4501 if (mod == 3)
4502 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4503 else
4504 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4505 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4506 }
4507 if (mod == 3) {
4508 gen_op_mov_reg_T0(ot, rm);
4509 }
4510 break;
4511
4512 /************************/
4513 /* floats */
4514 case 0xd8 ... 0xdf:
4515 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4516 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4517 /* XXX: what to do if illegal op ? */
4518 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4519 break;
4520 }
4521 modrm = ldub_code(s->pc++);
4522 mod = (modrm >> 6) & 3;
4523 rm = modrm & 7;
4524 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4525 if (mod != 3) {
4526 /* memory op */
4527 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4528 switch(op) {
4529 case 0x00 ... 0x07: /* fxxxs */
4530 case 0x10 ... 0x17: /* fixxxl */
4531 case 0x20 ... 0x27: /* fxxxl */
4532 case 0x30 ... 0x37: /* fixxx */
4533 {
4534 int op1;
4535 op1 = op & 7;
4536
4537 switch(op >> 4) {
4538 case 0:
4539 gen_op_flds_FT0_A0();
4540 break;
4541 case 1:
4542 gen_op_fildl_FT0_A0();
4543 break;
4544 case 2:
4545 gen_op_fldl_FT0_A0();
4546 break;
4547 case 3:
4548 default:
4549 gen_op_fild_FT0_A0();
4550 break;
4551 }
4552
4553 gen_op_fp_arith_ST0_FT0[op1]();
4554 if (op1 == 3) {
4555 /* fcomp needs pop */
4556 gen_op_fpop();
4557 }
4558 }
4559 break;
4560 case 0x08: /* flds */
4561 case 0x0a: /* fsts */
4562 case 0x0b: /* fstps */
4563 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4564 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4565 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4566 switch(op & 7) {
4567 case 0:
4568 switch(op >> 4) {
4569 case 0:
4570 gen_op_flds_ST0_A0();
4571 break;
4572 case 1:
4573 gen_op_fildl_ST0_A0();
4574 break;
4575 case 2:
4576 gen_op_fldl_ST0_A0();
4577 break;
4578 case 3:
4579 default:
4580 gen_op_fild_ST0_A0();
4581 break;
4582 }
4583 break;
4584 case 1:
4585 switch(op >> 4) {
4586 case 1:
4587 gen_op_fisttl_ST0_A0();
4588 break;
4589 case 2:
4590 gen_op_fisttll_ST0_A0();
4591 break;
4592 case 3:
4593 default:
4594 gen_op_fistt_ST0_A0();
4595 }
4596 gen_op_fpop();
4597 break;
4598 default:
4599 switch(op >> 4) {
4600 case 0:
4601 gen_op_fsts_ST0_A0();
4602 break;
4603 case 1:
4604 gen_op_fistl_ST0_A0();
4605 break;
4606 case 2:
4607 gen_op_fstl_ST0_A0();
4608 break;
4609 case 3:
4610 default:
4611 gen_op_fist_ST0_A0();
4612 break;
4613 }
4614 if ((op & 7) == 3)
4615 gen_op_fpop();
4616 break;
4617 }
4618 break;
4619 case 0x0c: /* fldenv mem */
4620 gen_op_fldenv_A0(s->dflag);
4621 break;
4622 case 0x0d: /* fldcw mem */
4623 gen_op_fldcw_A0();
4624 break;
4625 case 0x0e: /* fnstenv mem */
4626 gen_op_fnstenv_A0(s->dflag);
4627 break;
4628 case 0x0f: /* fnstcw mem */
4629 gen_op_fnstcw_A0();
4630 break;
4631 case 0x1d: /* fldt mem */
4632 gen_op_fldt_ST0_A0();
4633 break;
4634 case 0x1f: /* fstpt mem */
4635 gen_op_fstt_ST0_A0();
4636 gen_op_fpop();
4637 break;
4638 case 0x2c: /* frstor mem */
4639 gen_op_frstor_A0(s->dflag);
4640 break;
4641 case 0x2e: /* fnsave mem */
4642 gen_op_fnsave_A0(s->dflag);
4643 break;
4644 case 0x2f: /* fnstsw mem */
4645 gen_op_fnstsw_A0();
4646 break;
4647 case 0x3c: /* fbld */
4648 gen_op_fbld_ST0_A0();
4649 break;
4650 case 0x3e: /* fbstp */
4651 gen_op_fbst_ST0_A0();
4652 gen_op_fpop();
4653 break;
4654 case 0x3d: /* fildll */
4655 gen_op_fildll_ST0_A0();
4656 break;
4657 case 0x3f: /* fistpll */
4658 gen_op_fistll_ST0_A0();
4659 gen_op_fpop();
4660 break;
4661 default:
4662 goto illegal_op;
4663 }
4664 } else {
4665 /* register float ops */
4666 opreg = rm;
4667
4668 switch(op) {
4669 case 0x08: /* fld sti */
4670 gen_op_fpush();
4671 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4672 break;
4673 case 0x09: /* fxchg sti */
4674 case 0x29: /* fxchg4 sti, undocumented op */
4675 case 0x39: /* fxchg7 sti, undocumented op */
4676 gen_op_fxchg_ST0_STN(opreg);
4677 break;
4678 case 0x0a: /* grp d9/2 */
4679 switch(rm) {
4680 case 0: /* fnop */
4681 /* check exceptions (FreeBSD FPU probe) */
4682 if (s->cc_op != CC_OP_DYNAMIC)
4683 gen_op_set_cc_op(s->cc_op);
4684 gen_jmp_im(pc_start - s->cs_base);
4685 gen_op_fwait();
4686 break;
4687 default:
4688 goto illegal_op;
4689 }
4690 break;
4691 case 0x0c: /* grp d9/4 */
4692 switch(rm) {
4693 case 0: /* fchs */
4694 gen_op_fchs_ST0();
4695 break;
4696 case 1: /* fabs */
4697 gen_op_fabs_ST0();
4698 break;
4699 case 4: /* ftst */
4700 gen_op_fldz_FT0();
4701 gen_op_fcom_ST0_FT0();
4702 break;
4703 case 5: /* fxam */
4704 gen_op_fxam_ST0();
4705 break;
4706 default:
4707 goto illegal_op;
4708 }
4709 break;
4710 case 0x0d: /* grp d9/5 */
4711 {
4712 switch(rm) {
4713 case 0:
4714 gen_op_fpush();
4715 gen_op_fld1_ST0();
4716 break;
4717 case 1:
4718 gen_op_fpush();
4719 gen_op_fldl2t_ST0();
4720 break;
4721 case 2:
4722 gen_op_fpush();
4723 gen_op_fldl2e_ST0();
4724 break;
4725 case 3:
4726 gen_op_fpush();
4727 gen_op_fldpi_ST0();
4728 break;
4729 case 4:
4730 gen_op_fpush();
4731 gen_op_fldlg2_ST0();
4732 break;
4733 case 5:
4734 gen_op_fpush();
4735 gen_op_fldln2_ST0();
4736 break;
4737 case 6:
4738 gen_op_fpush();
4739 gen_op_fldz_ST0();
4740 break;
4741 default:
4742 goto illegal_op;
4743 }
4744 }
4745 break;
4746 case 0x0e: /* grp d9/6 */
4747 switch(rm) {
4748 case 0: /* f2xm1 */
4749 gen_op_f2xm1();
4750 break;
4751 case 1: /* fyl2x */
4752 gen_op_fyl2x();
4753 break;
4754 case 2: /* fptan */
4755 gen_op_fptan();
4756 break;
4757 case 3: /* fpatan */
4758 gen_op_fpatan();
4759 break;
4760 case 4: /* fxtract */
4761 gen_op_fxtract();
4762 break;
4763 case 5: /* fprem1 */
4764 gen_op_fprem1();
4765 break;
4766 case 6: /* fdecstp */
4767 gen_op_fdecstp();
4768 break;
4769 default:
4770 case 7: /* fincstp */
4771 gen_op_fincstp();
4772 break;
4773 }
4774 break;
4775 case 0x0f: /* grp d9/7 */
4776 switch(rm) {
4777 case 0: /* fprem */
4778 gen_op_fprem();
4779 break;
4780 case 1: /* fyl2xp1 */
4781 gen_op_fyl2xp1();
4782 break;
4783 case 2: /* fsqrt */
4784 gen_op_fsqrt();
4785 break;
4786 case 3: /* fsincos */
4787 gen_op_fsincos();
4788 break;
4789 case 5: /* fscale */
4790 gen_op_fscale();
4791 break;
4792 case 4: /* frndint */
4793 gen_op_frndint();
4794 break;
4795 case 6: /* fsin */
4796 gen_op_fsin();
4797 break;
4798 default:
4799 case 7: /* fcos */
4800 gen_op_fcos();
4801 break;
4802 }
4803 break;
4804 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4805 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4806 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4807 {
4808 int op1;
4809
4810 op1 = op & 7;
4811 if (op >= 0x20) {
4812 gen_op_fp_arith_STN_ST0[op1](opreg);
4813 if (op >= 0x30)
4814 gen_op_fpop();
4815 } else {
4816 gen_op_fmov_FT0_STN(opreg);
4817 gen_op_fp_arith_ST0_FT0[op1]();
4818 }
4819 }
4820 break;
4821 case 0x02: /* fcom */
4822 case 0x22: /* fcom2, undocumented op */
4823 gen_op_fmov_FT0_STN(opreg);
4824 gen_op_fcom_ST0_FT0();
4825 break;
4826 case 0x03: /* fcomp */
4827 case 0x23: /* fcomp3, undocumented op */
4828 case 0x32: /* fcomp5, undocumented op */
4829 gen_op_fmov_FT0_STN(opreg);
4830 gen_op_fcom_ST0_FT0();
4831 gen_op_fpop();
4832 break;
4833 case 0x15: /* da/5 */
4834 switch(rm) {
4835 case 1: /* fucompp */
4836 gen_op_fmov_FT0_STN(1);
4837 gen_op_fucom_ST0_FT0();
4838 gen_op_fpop();
4839 gen_op_fpop();
4840 break;
4841 default:
4842 goto illegal_op;
4843 }
4844 break;
4845 case 0x1c:
4846 switch(rm) {
4847 case 0: /* feni (287 only, just do nop here) */
4848 break;
4849 case 1: /* fdisi (287 only, just do nop here) */
4850 break;
4851 case 2: /* fclex */
4852 gen_op_fclex();
4853 break;
4854 case 3: /* fninit */
4855 gen_op_fninit();
4856 break;
4857 case 4: /* fsetpm (287 only, just do nop here) */
4858 break;
4859 default:
4860 goto illegal_op;
4861 }
4862 break;
4863 case 0x1d: /* fucomi */
4864 if (s->cc_op != CC_OP_DYNAMIC)
4865 gen_op_set_cc_op(s->cc_op);
4866 gen_op_fmov_FT0_STN(opreg);
4867 gen_op_fucomi_ST0_FT0();
4868 s->cc_op = CC_OP_EFLAGS;
4869 break;
4870 case 0x1e: /* fcomi */
4871 if (s->cc_op != CC_OP_DYNAMIC)
4872 gen_op_set_cc_op(s->cc_op);
4873 gen_op_fmov_FT0_STN(opreg);
4874 gen_op_fcomi_ST0_FT0();
4875 s->cc_op = CC_OP_EFLAGS;
4876 break;
4877 case 0x28: /* ffree sti */
4878 gen_op_ffree_STN(opreg);
4879 break;
4880 case 0x2a: /* fst sti */
4881 gen_op_fmov_STN_ST0(opreg);
4882 break;
4883 case 0x2b: /* fstp sti */
4884 case 0x0b: /* fstp1 sti, undocumented op */
4885 case 0x3a: /* fstp8 sti, undocumented op */
4886 case 0x3b: /* fstp9 sti, undocumented op */
4887 gen_op_fmov_STN_ST0(opreg);
4888 gen_op_fpop();
4889 break;
4890 case 0x2c: /* fucom st(i) */
4891 gen_op_fmov_FT0_STN(opreg);
4892 gen_op_fucom_ST0_FT0();
4893 break;
4894 case 0x2d: /* fucomp st(i) */
4895 gen_op_fmov_FT0_STN(opreg);
4896 gen_op_fucom_ST0_FT0();
4897 gen_op_fpop();
4898 break;
4899 case 0x33: /* de/3 */
4900 switch(rm) {
4901 case 1: /* fcompp */
4902 gen_op_fmov_FT0_STN(1);
4903 gen_op_fcom_ST0_FT0();
4904 gen_op_fpop();
4905 gen_op_fpop();
4906 break;
4907 default:
4908 goto illegal_op;
4909 }
4910 break;
4911 case 0x38: /* ffreep sti, undocumented op */
4912 gen_op_ffree_STN(opreg);
4913 gen_op_fpop();
4914 break;
4915 case 0x3c: /* df/4 */
4916 switch(rm) {
4917 case 0:
4918 gen_op_fnstsw_EAX();
4919 break;
4920 default:
4921 goto illegal_op;
4922 }
4923 break;
4924 case 0x3d: /* fucomip */
4925 if (s->cc_op != CC_OP_DYNAMIC)
4926 gen_op_set_cc_op(s->cc_op);
4927 gen_op_fmov_FT0_STN(opreg);
4928 gen_op_fucomi_ST0_FT0();
4929 gen_op_fpop();
4930 s->cc_op = CC_OP_EFLAGS;
4931 break;
4932 case 0x3e: /* fcomip */
4933 if (s->cc_op != CC_OP_DYNAMIC)
4934 gen_op_set_cc_op(s->cc_op);
4935 gen_op_fmov_FT0_STN(opreg);
4936 gen_op_fcomi_ST0_FT0();
4937 gen_op_fpop();
4938 s->cc_op = CC_OP_EFLAGS;
4939 break;
4940 case 0x10 ... 0x13: /* fcmovxx */
4941 case 0x18 ... 0x1b:
4942 {
4943 int op1;
4944 const static uint8_t fcmov_cc[8] = {
4945 (JCC_B << 1),
4946 (JCC_Z << 1),
4947 (JCC_BE << 1),
4948 (JCC_P << 1),
4949 };
4950 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4951 gen_setcc(s, op1);
4952 gen_op_fcmov_ST0_STN_T0(opreg);
4953 }
4954 break;
4955 default:
4956 goto illegal_op;
4957 }
4958 }
4959 break;
4960 /************************/
4961 /* string ops */
4962
4963 case 0xa4: /* movsS */
4964 case 0xa5:
4965 if ((b & 1) == 0)
4966 ot = OT_BYTE;
4967 else
4968 ot = dflag + OT_WORD;
4969
4970 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4971 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4972 } else {
4973 gen_movs(s, ot);
4974 }
4975 break;
4976
4977 case 0xaa: /* stosS */
4978 case 0xab:
4979 if ((b & 1) == 0)
4980 ot = OT_BYTE;
4981 else
4982 ot = dflag + OT_WORD;
4983
4984 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4985 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4986 } else {
4987 gen_stos(s, ot);
4988 }
4989 break;
4990 case 0xac: /* lodsS */
4991 case 0xad:
4992 if ((b & 1) == 0)
4993 ot = OT_BYTE;
4994 else
4995 ot = dflag + OT_WORD;
4996 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4997 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4998 } else {
4999 gen_lods(s, ot);
5000 }
5001 break;
5002 case 0xae: /* scasS */
5003 case 0xaf:
5004 if ((b & 1) == 0)
5005 ot = OT_BYTE;
5006 else
5007 ot = dflag + OT_WORD;
5008 if (prefixes & PREFIX_REPNZ) {
5009 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5010 } else if (prefixes & PREFIX_REPZ) {
5011 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5012 } else {
5013 gen_scas(s, ot);
5014 s->cc_op = CC_OP_SUBB + ot;
5015 }
5016 break;
5017
5018 case 0xa6: /* cmpsS */
5019 case 0xa7:
5020 if ((b & 1) == 0)
5021 ot = OT_BYTE;
5022 else
5023 ot = dflag + OT_WORD;
5024 if (prefixes & PREFIX_REPNZ) {
5025 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5026 } else if (prefixes & PREFIX_REPZ) {
5027 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5028 } else {
5029 gen_cmps(s, ot);
5030 s->cc_op = CC_OP_SUBB + ot;
5031 }
5032 break;
5033 case 0x6c: /* insS */
5034 case 0x6d:
5035 if ((b & 1) == 0)
5036 ot = OT_BYTE;
5037 else
5038 ot = dflag ? OT_LONG : OT_WORD;
5039 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5040 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5041 gen_op_andl_T0_ffff();
5042 if (gen_svm_check_io(s, pc_start,
5043 SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
5044 svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
5045 break;
5046 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5047 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5048 } else {
5049 gen_ins(s, ot);
5050 }
5051 break;
5052 case 0x6e: /* outsS */
5053 case 0x6f:
5054 if ((b & 1) == 0)
5055 ot = OT_BYTE;
5056 else
5057 ot = dflag ? OT_LONG : OT_WORD;
5058 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5059 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5060 gen_op_andl_T0_ffff();
5061 if (gen_svm_check_io(s, pc_start,
5062 (1 << (4+ot)) | svm_is_rep(prefixes) |
5063 4 | (1 << (7+s->aflag))))
5064 break;
5065 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5066 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5067 } else {
5068 gen_outs(s, ot);
5069 }
5070 break;
5071
5072 /************************/
5073 /* port I/O */
5074
5075 case 0xe4:
5076 case 0xe5:
5077 if ((b & 1) == 0)
5078 ot = OT_BYTE;
5079 else
5080 ot = dflag ? OT_LONG : OT_WORD;
5081 val = ldub_code(s->pc++);
5082 gen_op_movl_T0_im(val);
5083 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5084 if (gen_svm_check_io(s, pc_start,
5085 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5086 (1 << (4+ot))))
5087 break;
5088 gen_op_in[ot]();
5089 gen_op_mov_reg_T1(ot, R_EAX);
5090 break;
5091 case 0xe6:
5092 case 0xe7:
5093 if ((b & 1) == 0)
5094 ot = OT_BYTE;
5095 else
5096 ot = dflag ? OT_LONG : OT_WORD;
5097 val = ldub_code(s->pc++);
5098 gen_op_movl_T0_im(val);
5099 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5100 if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
5101 (1 << (4+ot))))
5102 break;
5103 gen_op_mov_TN_reg(ot, 1, R_EAX);
5104 gen_op_out[ot]();
5105 break;
5106 case 0xec:
5107 case 0xed:
5108 if ((b & 1) == 0)
5109 ot = OT_BYTE;
5110 else
5111 ot = dflag ? OT_LONG : OT_WORD;
5112 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5113 gen_op_andl_T0_ffff();
5114 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5115 if (gen_svm_check_io(s, pc_start,
5116 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5117 (1 << (4+ot))))
5118 break;
5119 gen_op_in[ot]();
5120 gen_op_mov_reg_T1(ot, R_EAX);
5121 break;
5122 case 0xee:
5123 case 0xef:
5124 if ((b & 1) == 0)
5125 ot = OT_BYTE;
5126 else
5127 ot = dflag ? OT_LONG : OT_WORD;
5128 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5129 gen_op_andl_T0_ffff();
5130 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5131 if (gen_svm_check_io(s, pc_start,
5132 svm_is_rep(prefixes) | (1 << (4+ot))))
5133 break;
5134 gen_op_mov_TN_reg(ot, 1, R_EAX);
5135 gen_op_out[ot]();
5136 break;
5137
5138 /************************/
5139 /* control */
5140 case 0xc2: /* ret im */
5141 val = ldsw_code(s->pc);
5142 s->pc += 2;
5143 gen_pop_T0(s);
5144 if (CODE64(s) && s->dflag)
5145 s->dflag = 2;
5146 gen_stack_update(s, val + (2 << s->dflag));
5147 if (s->dflag == 0)
5148 gen_op_andl_T0_ffff();
5149 gen_op_jmp_T0();
5150 gen_eob(s);
5151 break;
5152 case 0xc3: /* ret */
5153 gen_pop_T0(s);
5154 gen_pop_update(s);
5155 if (s->dflag == 0)
5156 gen_op_andl_T0_ffff();
5157 gen_op_jmp_T0();
5158 gen_eob(s);
5159 break;
5160 case 0xca: /* lret im */
5161 val = ldsw_code(s->pc);
5162 s->pc += 2;
5163 do_lret:
5164 if (s->pe && !s->vm86) {
5165 if (s->cc_op != CC_OP_DYNAMIC)
5166 gen_op_set_cc_op(s->cc_op);
5167 gen_jmp_im(pc_start - s->cs_base);
5168 gen_op_lret_protected(s->dflag, val);
5169 } else {
5170 gen_stack_A0(s);
5171 /* pop offset */
5172 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5173 if (s->dflag == 0)
5174 gen_op_andl_T0_ffff();
5175 /* NOTE: keeping EIP updated is not a problem in case of
5176 exception */
5177 gen_op_jmp_T0();
5178 /* pop selector */
5179 gen_op_addl_A0_im(2 << s->dflag);
5180 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5181 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5182 /* add stack offset */
5183 gen_stack_update(s, val + (4 << s->dflag));
5184 }
5185 gen_eob(s);
5186 break;
5187 case 0xcb: /* lret */
5188 val = 0;
5189 goto do_lret;
5190 case 0xcf: /* iret */
5191 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5192 break;
5193 if (!s->pe) {
5194 /* real mode */
5195 gen_op_iret_real(s->dflag);
5196 s->cc_op = CC_OP_EFLAGS;
5197 } else if (s->vm86) {
5198 if (s->iopl != 3) {
5199 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5200 } else {
5201 gen_op_iret_real(s->dflag);
5202 s->cc_op = CC_OP_EFLAGS;
5203 }
5204 } else {
5205 if (s->cc_op != CC_OP_DYNAMIC)
5206 gen_op_set_cc_op(s->cc_op);
5207 gen_jmp_im(pc_start - s->cs_base);
5208 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5209 s->cc_op = CC_OP_EFLAGS;
5210 }
5211 gen_eob(s);
5212 break;
5213 case 0xe8: /* call im */
5214 {
5215 if (dflag)
5216 tval = (int32_t)insn_get(s, OT_LONG);
5217 else
5218 tval = (int16_t)insn_get(s, OT_WORD);
5219 next_eip = s->pc - s->cs_base;
5220 tval += next_eip;
5221 if (s->dflag == 0)
5222 tval &= 0xffff;
5223 gen_movtl_T0_im(next_eip);
5224 gen_push_T0(s);
5225 gen_jmp(s, tval);
5226 }
5227 break;
5228 case 0x9a: /* lcall im */
5229 {
5230 unsigned int selector, offset;
5231
5232 if (CODE64(s))
5233 goto illegal_op;
5234 ot = dflag ? OT_LONG : OT_WORD;
5235 offset = insn_get(s, ot);
5236 selector = insn_get(s, OT_WORD);
5237
5238 gen_op_movl_T0_im(selector);
5239 gen_op_movl_T1_imu(offset);
5240 }
5241 goto do_lcall;
5242 case 0xe9: /* jmp im */
5243 if (dflag)
5244 tval = (int32_t)insn_get(s, OT_LONG);
5245 else
5246 tval = (int16_t)insn_get(s, OT_WORD);
5247 tval += s->pc - s->cs_base;
5248 if (s->dflag == 0)
5249 tval &= 0xffff;
5250 gen_jmp(s, tval);
5251 break;
5252 case 0xea: /* ljmp im */
5253 {
5254 unsigned int selector, offset;
5255
5256 if (CODE64(s))
5257 goto illegal_op;
5258 ot = dflag ? OT_LONG : OT_WORD;
5259 offset = insn_get(s, ot);
5260 selector = insn_get(s, OT_WORD);
5261
5262 gen_op_movl_T0_im(selector);
5263 gen_op_movl_T1_imu(offset);
5264 }
5265 goto do_ljmp;
5266 case 0xeb: /* jmp Jb */
5267 tval = (int8_t)insn_get(s, OT_BYTE);
5268 tval += s->pc - s->cs_base;
5269 if (s->dflag == 0)
5270 tval &= 0xffff;
5271 gen_jmp(s, tval);
5272 break;
5273 case 0x70 ... 0x7f: /* jcc Jb */
5274 tval = (int8_t)insn_get(s, OT_BYTE);
5275 goto do_jcc;
5276 case 0x180 ... 0x18f: /* jcc Jv */
5277 if (dflag) {
5278 tval = (int32_t)insn_get(s, OT_LONG);
5279 } else {
5280 tval = (int16_t)insn_get(s, OT_WORD);
5281 }
5282 do_jcc:
5283 next_eip = s->pc - s->cs_base;
5284 tval += next_eip;
5285 if (s->dflag == 0)
5286 tval &= 0xffff;
5287 gen_jcc(s, b, tval, next_eip);
5288 break;
5289
5290 case 0x190 ... 0x19f: /* setcc Gv */
5291 modrm = ldub_code(s->pc++);
5292 gen_setcc(s, b);
5293 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5294 break;
5295 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5296 ot = dflag + OT_WORD;
5297 modrm = ldub_code(s->pc++);
5298 reg = ((modrm >> 3) & 7) | rex_r;
5299 mod = (modrm >> 6) & 3;
5300 gen_setcc(s, b);
5301 if (mod != 3) {
5302 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5303 gen_op_ld_T1_A0(ot + s->mem_index);
5304 } else {
5305 rm = (modrm & 7) | REX_B(s);
5306 gen_op_mov_TN_reg(ot, 1, rm);
5307 }
5308 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5309 break;
5310
5311 /************************/
5312 /* flags */
5313 case 0x9c: /* pushf */
5314 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5315 break;
5316 if (s->vm86 && s->iopl != 3) {
5317 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5318 } else {
5319 if (s->cc_op != CC_OP_DYNAMIC)
5320 gen_op_set_cc_op(s->cc_op);
5321 gen_op_movl_T0_eflags();
5322 gen_push_T0(s);
5323 }
5324 break;
5325 case 0x9d: /* popf */
5326 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5327 break;
5328 if (s->vm86 && s->iopl != 3) {
5329 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5330 } else {
5331 gen_pop_T0(s);
5332 if (s->cpl == 0) {
5333 if (s->dflag) {
5334 gen_op_movl_eflags_T0_cpl0();
5335 } else {
5336 gen_op_movw_eflags_T0_cpl0();
5337 }
5338 } else {
5339 if (s->cpl <= s->iopl) {
5340 if (s->dflag) {
5341 gen_op_movl_eflags_T0_io();
5342 } else {
5343 gen_op_movw_eflags_T0_io();
5344 }
5345 } else {
5346 if (s->dflag) {
5347 gen_op_movl_eflags_T0();
5348 } else {
5349 gen_op_movw_eflags_T0();
5350 }
5351 }
5352 }
5353 gen_pop_update(s);
5354 s->cc_op = CC_OP_EFLAGS;
5355 /* abort translation because TF flag may change */
5356 gen_jmp_im(s->pc - s->cs_base);
5357 gen_eob(s);
5358 }
5359 break;
5360 case 0x9e: /* sahf */
5361 if (CODE64(s))
5362 goto illegal_op;
5363 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5364 if (s->cc_op != CC_OP_DYNAMIC)
5365 gen_op_set_cc_op(s->cc_op);
5366 gen_op_movb_eflags_T0();
5367 s->cc_op = CC_OP_EFLAGS;
5368 break;
5369 case 0x9f: /* lahf */
5370 if (CODE64(s))
5371 goto illegal_op;
5372 if (s->cc_op != CC_OP_DYNAMIC)
5373 gen_op_set_cc_op(s->cc_op);
5374 gen_op_movl_T0_eflags();
5375 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5376 break;
5377 case 0xf5: /* cmc */
5378 if (s->cc_op != CC_OP_DYNAMIC)
5379 gen_op_set_cc_op(s->cc_op);
5380 gen_op_cmc();
5381 s->cc_op = CC_OP_EFLAGS;
5382 break;
5383 case 0xf8: /* clc */
5384 if (s->cc_op != CC_OP_DYNAMIC)
5385 gen_op_set_cc_op(s->cc_op);
5386 gen_op_clc();
5387 s->cc_op = CC_OP_EFLAGS;
5388 break;
5389 case 0xf9: /* stc */
5390 if (s->cc_op != CC_OP_DYNAMIC)
5391 gen_op_set_cc_op(s->cc_op);
5392 gen_op_stc();
5393 s->cc_op = CC_OP_EFLAGS;
5394 break;
5395 case 0xfc: /* cld */
5396 gen_op_cld();
5397 break;
5398 case 0xfd: /* std */
5399 gen_op_std();
5400 break;
5401
5402 /************************/
5403 /* bit operations */
5404 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5405 ot = dflag + OT_WORD;
5406 modrm = ldub_code(s->pc++);
5407 op = (modrm >> 3) & 7;
5408 mod = (modrm >> 6) & 3;
5409 rm = (modrm & 7) | REX_B(s);
5410 if (mod != 3) {
5411 s->rip_offset = 1;
5412 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5413 gen_op_ld_T0_A0(ot + s->mem_index);
5414 } else {
5415 gen_op_mov_TN_reg(ot, 0, rm);
5416 }
5417 /* load shift */
5418 val = ldub_code(s->pc++);
5419 gen_op_movl_T1_im(val);
5420 if (op < 4)
5421 goto illegal_op;
5422 op -= 4;
5423 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5424 s->cc_op = CC_OP_SARB + ot;
5425 if (op != 0) {
5426 if (mod != 3)
5427 gen_op_st_T0_A0(ot + s->mem_index);
5428 else
5429 gen_op_mov_reg_T0(ot, rm);
5430 gen_op_update_bt_cc();
5431 }
5432 break;
5433 case 0x1a3: /* bt Gv, Ev */
5434 op = 0;
5435 goto do_btx;
5436 case 0x1ab: /* bts */
5437 op = 1;
5438 goto do_btx;
5439 case 0x1b3: /* btr */
5440 op = 2;
5441 goto do_btx;
5442 case 0x1bb: /* btc */
5443 op = 3;
5444 do_btx:
5445 ot = dflag + OT_WORD;
5446 modrm = ldub_code(s->pc++);
5447 reg = ((modrm >> 3) & 7) | rex_r;
5448 mod = (modrm >> 6) & 3;
5449 rm = (modrm & 7) | REX_B(s);
5450 gen_op_mov_TN_reg(OT_LONG, 1, reg);
5451 if (mod != 3) {
5452 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5453 /* specific case: we need to add a displacement */
5454 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5455 gen_op_ld_T0_A0(ot + s->mem_index);
5456 } else {
5457 gen_op_mov_TN_reg(ot, 0, rm);
5458 }
5459 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5460 s->cc_op = CC_OP_SARB + ot;
5461 if (op != 0) {
5462 if (mod != 3)
5463 gen_op_st_T0_A0(ot + s->mem_index);
5464 else
5465 gen_op_mov_reg_T0(ot, rm);
5466 gen_op_update_bt_cc();
5467 }
5468 break;
5469 case 0x1bc: /* bsf */
5470 case 0x1bd: /* bsr */
5471 ot = dflag + OT_WORD;
5472 modrm = ldub_code(s->pc++);
5473 reg = ((modrm >> 3) & 7) | rex_r;
5474 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5475 /* NOTE: in order to handle the 0 case, we must load the
5476 result. It could be optimized with a generated jump */
5477 gen_op_mov_TN_reg(ot, 1, reg);
5478 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5479 gen_op_mov_reg_T1(ot, reg);
5480 s->cc_op = CC_OP_LOGICB + ot;
5481 break;
5482 /************************/
5483 /* bcd */
5484 case 0x27: /* daa */
5485 if (CODE64(s))
5486 goto illegal_op;
5487 if (s->cc_op != CC_OP_DYNAMIC)
5488 gen_op_set_cc_op(s->cc_op);
5489 gen_op_daa();
5490 s->cc_op = CC_OP_EFLAGS;
5491 break;
5492 case 0x2f: /* das */
5493 if (CODE64(s))
5494 goto illegal_op;
5495 if (s->cc_op != CC_OP_DYNAMIC)
5496 gen_op_set_cc_op(s->cc_op);
5497 gen_op_das();
5498 s->cc_op = CC_OP_EFLAGS;
5499 break;
5500 case 0x37: /* aaa */
5501 if (CODE64(s))
5502 goto illegal_op;
5503 if (s->cc_op != CC_OP_DYNAMIC)
5504 gen_op_set_cc_op(s->cc_op);
5505 gen_op_aaa();
5506 s->cc_op = CC_OP_EFLAGS;
5507 break;
5508 case 0x3f: /* aas */
5509 if (CODE64(s))
5510 goto illegal_op;
5511 if (s->cc_op != CC_OP_DYNAMIC)
5512 gen_op_set_cc_op(s->cc_op);
5513 gen_op_aas();
5514 s->cc_op = CC_OP_EFLAGS;
5515 break;
5516 case 0xd4: /* aam */
5517 if (CODE64(s))
5518 goto illegal_op;
5519 val = ldub_code(s->pc++);
5520 if (val == 0) {
5521 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5522 } else {
5523 gen_op_aam(val);
5524 s->cc_op = CC_OP_LOGICB;
5525 }
5526 break;
5527 case 0xd5: /* aad */
5528 if (CODE64(s))
5529 goto illegal_op;
5530 val = ldub_code(s->pc++);
5531 gen_op_aad(val);
5532 s->cc_op = CC_OP_LOGICB;
5533 break;
5534 /************************/
5535 /* misc */
5536 case 0x90: /* nop */
5537 /* XXX: xchg + rex handling */
5538 /* XXX: correct lock test for all insn */
5539 if (prefixes & PREFIX_LOCK)
5540 goto illegal_op;
5541 if (prefixes & PREFIX_REPZ) {
5542 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5543 }
5544 break;
5545 case 0x9b: /* fwait */
5546 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5547 (HF_MP_MASK | HF_TS_MASK)) {
5548 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5549 } else {
5550 if (s->cc_op != CC_OP_DYNAMIC)
5551 gen_op_set_cc_op(s->cc_op);
5552 gen_jmp_im(pc_start - s->cs_base);
5553 gen_op_fwait();
5554 }
5555 break;
5556 case 0xcc: /* int3 */
5557 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5558 break;
5559 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5560 break;
5561 case 0xcd: /* int N */
5562 val = ldub_code(s->pc++);
5563 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5564 break;
5565 if (s->vm86 && s->iopl != 3) {
5566 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5567 } else {
5568 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5569 }
5570 break;
5571 case 0xce: /* into */
5572 if (CODE64(s))
5573 goto illegal_op;
5574 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5575 break;
5576 if (s->cc_op != CC_OP_DYNAMIC)
5577 gen_op_set_cc_op(s->cc_op);
5578 gen_jmp_im(pc_start - s->cs_base);
5579 gen_op_into(s->pc - pc_start);
5580 break;
5581 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5582 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5583 break;
5584 #if 1
5585 gen_debug(s, pc_start - s->cs_base);
5586 #else
5587 /* start debug */
5588 tb_flush(cpu_single_env);
5589 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5590 #endif
5591 break;
5592 case 0xfa: /* cli */
5593 if (!s->vm86) {
5594 if (s->cpl <= s->iopl) {
5595 gen_op_cli();
5596 } else {
5597 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5598 }
5599 } else {
5600 if (s->iopl == 3) {
5601 gen_op_cli();
5602 } else {
5603 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5604 }
5605 }
5606 break;
5607 case 0xfb: /* sti */
5608 if (!s->vm86) {
5609 if (s->cpl <= s->iopl) {
5610 gen_sti:
5611 gen_op_sti();
5612 /* interruptions are enabled only the first insn after sti */
5613 /* If several instructions disable interrupts, only the
5614 _first_ does it */
5615 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5616 gen_op_set_inhibit_irq();
5617 /* give a chance to handle pending irqs */
5618 gen_jmp_im(s->pc - s->cs_base);
5619 gen_eob(s);
5620 } else {
5621 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5622 }
5623 } else {
5624 if (s->iopl == 3) {
5625 goto gen_sti;
5626 } else {
5627 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5628 }
5629 }
5630 break;
5631 case 0x62: /* bound */
5632 if (CODE64(s))
5633 goto illegal_op;
5634 ot = dflag ? OT_LONG : OT_WORD;
5635 modrm = ldub_code(s->pc++);
5636 reg = (modrm >> 3) & 7;
5637 mod = (modrm >> 6) & 3;
5638 if (mod == 3)
5639 goto illegal_op;
5640 gen_op_mov_TN_reg(ot, 0, reg);
5641 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5642 gen_jmp_im(pc_start - s->cs_base);
5643 if (ot == OT_WORD)
5644 gen_op_boundw();
5645 else
5646 gen_op_boundl();
5647 break;
5648 case 0x1c8 ... 0x1cf: /* bswap reg */
5649 reg = (b & 7) | REX_B(s);
5650 #ifdef TARGET_X86_64
5651 if (dflag == 2) {
5652 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
5653 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
5654 gen_op_mov_reg_T0(OT_QUAD, reg);
5655 } else
5656 {
5657 TCGv tmp0;
5658 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5659
5660 tmp0 = tcg_temp_new(TCG_TYPE_I32);
5661 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
5662 tcg_gen_bswap_i32(tmp0, tmp0);
5663 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
5664 gen_op_mov_reg_T0(OT_LONG, reg);
5665 }
5666 #else
5667 {
5668 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5669 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
5670 gen_op_mov_reg_T0(OT_LONG, reg);
5671 }
5672 #endif
5673 break;
5674 case 0xd6: /* salc */
5675 if (CODE64(s))
5676 goto illegal_op;
5677 if (s->cc_op != CC_OP_DYNAMIC)
5678 gen_op_set_cc_op(s->cc_op);
5679 gen_op_salc();
5680 break;
5681 case 0xe0: /* loopnz */
5682 case 0xe1: /* loopz */
5683 if (s->cc_op != CC_OP_DYNAMIC)
5684 gen_op_set_cc_op(s->cc_op);
5685 /* FALL THRU */
5686 case 0xe2: /* loop */
5687 case 0xe3: /* jecxz */
5688 {
5689 int l1, l2;
5690
5691 tval = (int8_t)insn_get(s, OT_BYTE);
5692 next_eip = s->pc - s->cs_base;
5693 tval += next_eip;
5694 if (s->dflag == 0)
5695 tval &= 0xffff;
5696
5697 l1 = gen_new_label();
5698 l2 = gen_new_label();
5699 b &= 3;
5700 if (b == 3) {
5701 gen_op_jz_ecx[s->aflag](l1);
5702 } else {
5703 gen_op_dec_ECX[s->aflag]();
5704 if (b <= 1)
5705 gen_op_mov_T0_cc();
5706 gen_op_loop[s->aflag][b](l1);
5707 }
5708
5709 gen_jmp_im(next_eip);
5710 gen_op_jmp_label(l2);
5711 gen_set_label(l1);
5712 gen_jmp_im(tval);
5713 gen_set_label(l2);
5714 gen_eob(s);
5715 }
5716 break;
5717 case 0x130: /* wrmsr */
5718 case 0x132: /* rdmsr */
5719 if (s->cpl != 0) {
5720 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5721 } else {
5722 int retval = 0;
5723 if (b & 2) {
5724 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5725 gen_op_rdmsr();
5726 } else {
5727 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5728 gen_op_wrmsr();
5729 }
5730 if(retval)
5731 gen_eob(s);
5732 }
5733 break;
5734 case 0x131: /* rdtsc */
5735 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5736 break;
5737 gen_jmp_im(pc_start - s->cs_base);
5738 gen_op_rdtsc();
5739 break;
5740 case 0x133: /* rdpmc */
5741 gen_jmp_im(pc_start - s->cs_base);
5742 gen_op_rdpmc();
5743 break;
5744 case 0x134: /* sysenter */
5745 if (CODE64(s))
5746 goto illegal_op;
5747 if (!s->pe) {
5748 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5749 } else {
5750 if (s->cc_op != CC_OP_DYNAMIC) {
5751 gen_op_set_cc_op(s->cc_op);
5752 s->cc_op = CC_OP_DYNAMIC;
5753 }
5754 gen_jmp_im(pc_start - s->cs_base);
5755 gen_op_sysenter();
5756 gen_eob(s);
5757 }
5758 break;
5759 case 0x135: /* sysexit */
5760 if (CODE64(s))
5761 goto illegal_op;
5762 if (!s->pe) {
5763 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5764 } else {
5765 if (s->cc_op != CC_OP_DYNAMIC) {
5766 gen_op_set_cc_op(s->cc_op);
5767 s->cc_op = CC_OP_DYNAMIC;
5768 }
5769 gen_jmp_im(pc_start - s->cs_base);
5770 gen_op_sysexit();
5771 gen_eob(s);
5772 }
5773 break;
5774 #ifdef TARGET_X86_64
5775 case 0x105: /* syscall */
5776 /* XXX: is it usable in real mode ? */
5777 if (s->cc_op != CC_OP_DYNAMIC) {
5778 gen_op_set_cc_op(s->cc_op);
5779 s->cc_op = CC_OP_DYNAMIC;
5780 }
5781 gen_jmp_im(pc_start - s->cs_base);
5782 gen_op_syscall(s->pc - pc_start);
5783 gen_eob(s);
5784 break;
5785 case 0x107: /* sysret */
5786 if (!s->pe) {
5787 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5788 } else {
5789 if (s->cc_op != CC_OP_DYNAMIC) {
5790 gen_op_set_cc_op(s->cc_op);
5791 s->cc_op = CC_OP_DYNAMIC;
5792 }
5793 gen_jmp_im(pc_start - s->cs_base);
5794 gen_op_sysret(s->dflag);
5795 /* condition codes are modified only in long mode */
5796 if (s->lma)
5797 s->cc_op = CC_OP_EFLAGS;
5798 gen_eob(s);
5799 }
5800 break;
5801 #endif
5802 case 0x1a2: /* cpuid */
5803 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5804 break;
5805 gen_op_cpuid();
5806 break;
5807 case 0xf4: /* hlt */
5808 if (s->cpl != 0) {
5809 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5810 } else {
5811 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5812 break;
5813 if (s->cc_op != CC_OP_DYNAMIC)
5814 gen_op_set_cc_op(s->cc_op);
5815 gen_jmp_im(s->pc - s->cs_base);
5816 gen_op_hlt();
5817 s->is_jmp = 3;
5818 }
5819 break;
5820 case 0x100:
5821 modrm = ldub_code(s->pc++);
5822 mod = (modrm >> 6) & 3;
5823 op = (modrm >> 3) & 7;
5824 switch(op) {
5825 case 0: /* sldt */
5826 if (!s->pe || s->vm86)
5827 goto illegal_op;
5828 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5829 break;
5830 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5831 ot = OT_WORD;
5832 if (mod == 3)
5833 ot += s->dflag;
5834 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5835 break;
5836 case 2: /* lldt */
5837 if (!s->pe || s->vm86)
5838 goto illegal_op;
5839 if (s->cpl != 0) {
5840 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5841 } else {
5842 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5843 break;
5844 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5845 gen_jmp_im(pc_start - s->cs_base);
5846 gen_op_lldt_T0();
5847 }
5848 break;
5849 case 1: /* str */
5850 if (!s->pe || s->vm86)
5851 goto illegal_op;
5852 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5853 break;
5854 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5855 ot = OT_WORD;
5856 if (mod == 3)
5857 ot += s->dflag;
5858 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5859 break;
5860 case 3: /* ltr */
5861 if (!s->pe || s->vm86)
5862 goto illegal_op;
5863 if (s->cpl != 0) {
5864 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5865 } else {
5866 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5867 break;
5868 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5869 gen_jmp_im(pc_start - s->cs_base);
5870 gen_op_ltr_T0();
5871 }
5872 break;
5873 case 4: /* verr */
5874 case 5: /* verw */
5875 if (!s->pe || s->vm86)
5876 goto illegal_op;
5877 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5878 if (s->cc_op != CC_OP_DYNAMIC)
5879 gen_op_set_cc_op(s->cc_op);
5880 if (op == 4)
5881 gen_op_verr();
5882 else
5883 gen_op_verw();
5884 s->cc_op = CC_OP_EFLAGS;
5885 break;
5886 default:
5887 goto illegal_op;
5888 }
5889 break;
5890 case 0x101:
5891 modrm = ldub_code(s->pc++);
5892 mod = (modrm >> 6) & 3;
5893 op = (modrm >> 3) & 7;
5894 rm = modrm & 7;
5895 switch(op) {
5896 case 0: /* sgdt */
5897 if (mod == 3)
5898 goto illegal_op;
5899 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5900 break;
5901 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5902 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5903 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5904 gen_add_A0_im(s, 2);
5905 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5906 if (!s->dflag)
5907 gen_op_andl_T0_im(0xffffff);
5908 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5909 break;
5910 case 1:
5911 if (mod == 3) {
5912 switch (rm) {
5913 case 0: /* monitor */
5914 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5915 s->cpl != 0)
5916 goto illegal_op;
5917 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
5918 break;
5919 gen_jmp_im(pc_start - s->cs_base);
5920 #ifdef TARGET_X86_64
5921 if (s->aflag == 2) {
5922 gen_op_movq_A0_reg(R_EBX);
5923 gen_op_addq_A0_AL();
5924 } else
5925 #endif
5926 {
5927 gen_op_movl_A0_reg(R_EBX);
5928 gen_op_addl_A0_AL();
5929 if (s->aflag == 0)
5930 gen_op_andl_A0_ffff();
5931 }
5932 gen_add_A0_ds_seg(s);
5933 gen_op_monitor();
5934 break;
5935 case 1: /* mwait */
5936 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5937 s->cpl != 0)
5938 goto illegal_op;
5939 if (s->cc_op != CC_OP_DYNAMIC) {
5940 gen_op_set_cc_op(s->cc_op);
5941 s->cc_op = CC_OP_DYNAMIC;
5942 }
5943 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
5944 break;
5945 gen_jmp_im(s->pc - s->cs_base);
5946 gen_op_mwait();
5947 gen_eob(s);
5948 break;
5949 default:
5950 goto illegal_op;
5951 }
5952 } else { /* sidt */
5953 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
5954 break;
5955 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5956 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5957 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5958 gen_add_A0_im(s, 2);
5959 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5960 if (!s->dflag)
5961 gen_op_andl_T0_im(0xffffff);
5962 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5963 }
5964 break;
5965 case 2: /* lgdt */
5966 case 3: /* lidt */
5967 if (mod == 3) {
5968 switch(rm) {
5969 case 0: /* VMRUN */
5970 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
5971 break;
5972 if (s->cc_op != CC_OP_DYNAMIC)
5973 gen_op_set_cc_op(s->cc_op);
5974 gen_jmp_im(s->pc - s->cs_base);
5975 gen_op_vmrun();
5976 s->cc_op = CC_OP_EFLAGS;
5977 gen_eob(s);
5978 break;
5979 case 1: /* VMMCALL */
5980 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
5981 break;
5982 /* FIXME: cause #UD if hflags & SVM */
5983 gen_op_vmmcall();
5984 break;
5985 case 2: /* VMLOAD */
5986 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
5987 break;
5988 gen_op_vmload();
5989 break;
5990 case 3: /* VMSAVE */
5991 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
5992 break;
5993 gen_op_vmsave();
5994 break;
5995 case 4: /* STGI */
5996 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
5997 break;
5998 gen_op_stgi();
5999 break;
6000 case 5: /* CLGI */
6001 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6002 break;
6003 gen_op_clgi();
6004 break;
6005 case 6: /* SKINIT */
6006 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6007 break;
6008 gen_op_skinit();
6009 break;
6010 case 7: /* INVLPGA */
6011 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6012 break;
6013 gen_op_invlpga();
6014 break;
6015 default:
6016 goto illegal_op;
6017 }
6018 } else if (s->cpl != 0) {
6019 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6020 } else {
6021 if (gen_svm_check_intercept(s, pc_start,
6022 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6023 break;
6024 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6025 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6026 gen_add_A0_im(s, 2);
6027 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6028 if (!s->dflag)
6029 gen_op_andl_T0_im(0xffffff);
6030 if (op == 2) {
6031 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6032 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6033 } else {
6034 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6035 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6036 }
6037 }
6038 break;
6039 case 4: /* smsw */
6040 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6041 break;
6042 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6043 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6044 break;
6045 case 6: /* lmsw */
6046 if (s->cpl != 0) {
6047 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6048 } else {
6049 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6050 break;
6051 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6052 gen_op_lmsw_T0();
6053 gen_jmp_im(s->pc - s->cs_base);
6054 gen_eob(s);
6055 }
6056 break;
6057 case 7: /* invlpg */
6058 if (s->cpl != 0) {
6059 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6060 } else {
6061 if (mod == 3) {
6062 #ifdef TARGET_X86_64
6063 if (CODE64(s) && rm == 0) {
6064 /* swapgs */
6065 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6066 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6067 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6068 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6069 } else
6070 #endif
6071 {
6072 goto illegal_op;
6073 }
6074 } else {
6075 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6076 break;
6077 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6078 gen_op_invlpg_A0();
6079 gen_jmp_im(s->pc - s->cs_base);
6080 gen_eob(s);
6081 }
6082 }
6083 break;
6084 default:
6085 goto illegal_op;
6086 }
6087 break;
6088 case 0x108: /* invd */
6089 case 0x109: /* wbinvd */
6090 if (s->cpl != 0) {
6091 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6092 } else {
6093 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6094 break;
6095 /* nothing to do */
6096 }
6097 break;
6098 case 0x63: /* arpl or movslS (x86_64) */
6099 #ifdef TARGET_X86_64
6100 if (CODE64(s)) {
6101 int d_ot;
6102 /* d_ot is the size of destination */
6103 d_ot = dflag + OT_WORD;
6104
6105 modrm = ldub_code(s->pc++);
6106 reg = ((modrm >> 3) & 7) | rex_r;
6107 mod = (modrm >> 6) & 3;
6108 rm = (modrm & 7) | REX_B(s);
6109
6110 if (mod == 3) {
6111 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6112 /* sign extend */
6113 if (d_ot == OT_QUAD)
6114 gen_op_movslq_T0_T0();
6115 gen_op_mov_reg_T0(d_ot, reg);
6116 } else {
6117 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6118 if (d_ot == OT_QUAD) {
6119 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6120 } else {
6121 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6122 }
6123 gen_op_mov_reg_T0(d_ot, reg);
6124 }
6125 } else
6126 #endif
6127 {
6128 if (!s->pe || s->vm86)
6129 goto illegal_op;
6130 ot = dflag ? OT_LONG : OT_WORD;
6131 modrm = ldub_code(s->pc++);
6132 reg = (modrm >> 3) & 7;
6133 mod = (modrm >> 6) & 3;
6134 rm = modrm & 7;
6135 if (mod != 3) {
6136 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6137 gen_op_ld_T0_A0(ot + s->mem_index);
6138 } else {
6139 gen_op_mov_TN_reg(ot, 0, rm);
6140 }
6141 if (s->cc_op != CC_OP_DYNAMIC)
6142 gen_op_set_cc_op(s->cc_op);
6143 gen_op_arpl();
6144 s->cc_op = CC_OP_EFLAGS;
6145 if (mod != 3) {
6146 gen_op_st_T0_A0(ot + s->mem_index);
6147 } else {
6148 gen_op_mov_reg_T0(ot, rm);
6149 }
6150 gen_op_arpl_update();
6151 }
6152 break;
6153 case 0x102: /* lar */
6154 case 0x103: /* lsl */
6155 if (!s->pe || s->vm86)
6156 goto illegal_op;
6157 ot = dflag ? OT_LONG : OT_WORD;
6158 modrm = ldub_code(s->pc++);
6159 reg = ((modrm >> 3) & 7) | rex_r;
6160 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6161 gen_op_mov_TN_reg(ot, 1, reg);
6162 if (s->cc_op != CC_OP_DYNAMIC)
6163 gen_op_set_cc_op(s->cc_op);
6164 if (b == 0x102)
6165 gen_op_lar();
6166 else
6167 gen_op_lsl();
6168 s->cc_op = CC_OP_EFLAGS;
6169 gen_op_mov_reg_T1(ot, reg);
6170 break;
6171 case 0x118:
6172 modrm = ldub_code(s->pc++);
6173 mod = (modrm >> 6) & 3;
6174 op = (modrm >> 3) & 7;
6175 switch(op) {
6176 case 0: /* prefetchnta */
6177 case 1: /* prefetchnt0 */
6178 case 2: /* prefetchnt0 */
6179 case 3: /* prefetchnt0 */
6180 if (mod == 3)
6181 goto illegal_op;
6182 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6183 /* nothing more to do */
6184 break;
6185 default: /* nop (multi byte) */
6186 gen_nop_modrm(s, modrm);
6187 break;
6188 }
6189 break;
6190 case 0x119 ... 0x11f: /* nop (multi byte) */
6191 modrm = ldub_code(s->pc++);
6192 gen_nop_modrm(s, modrm);
6193 break;
6194 case 0x120: /* mov reg, crN */
6195 case 0x122: /* mov crN, reg */
6196 if (s->cpl != 0) {
6197 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6198 } else {
6199 modrm = ldub_code(s->pc++);
6200 if ((modrm & 0xc0) != 0xc0)
6201 goto illegal_op;
6202 rm = (modrm & 7) | REX_B(s);
6203 reg = ((modrm >> 3) & 7) | rex_r;
6204 if (CODE64(s))
6205 ot = OT_QUAD;
6206 else
6207 ot = OT_LONG;
6208 switch(reg) {
6209 case 0:
6210 case 2:
6211 case 3:
6212 case 4:
6213 case 8:
6214 if (b & 2) {
6215 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6216 gen_op_mov_TN_reg(ot, 0, rm);
6217 gen_op_movl_crN_T0(reg);
6218 gen_jmp_im(s->pc - s->cs_base);
6219 gen_eob(s);
6220 } else {
6221 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6222 #if !defined(CONFIG_USER_ONLY)
6223 if (reg == 8)
6224 gen_op_movtl_T0_cr8();
6225 else
6226 #endif
6227 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6228 gen_op_mov_reg_T0(ot, rm);
6229 }
6230 break;
6231 default:
6232 goto illegal_op;
6233 }
6234 }
6235 break;
6236 case 0x121: /* mov reg, drN */
6237 case 0x123: /* mov drN, reg */
6238 if (s->cpl != 0) {
6239 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6240 } else {
6241 modrm = ldub_code(s->pc++);
6242 if ((modrm & 0xc0) != 0xc0)
6243 goto illegal_op;
6244 rm = (modrm & 7) | REX_B(s);
6245 reg = ((modrm >> 3) & 7) | rex_r;
6246 if (CODE64(s))
6247 ot = OT_QUAD;
6248 else
6249 ot = OT_LONG;
6250 /* XXX: do it dynamically with CR4.DE bit */
6251 if (reg == 4 || reg == 5 || reg >= 8)
6252 goto illegal_op;
6253 if (b & 2) {
6254 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6255 gen_op_mov_TN_reg(ot, 0, rm);
6256 gen_op_movl_drN_T0(reg);
6257 gen_jmp_im(s->pc - s->cs_base);
6258 gen_eob(s);
6259 } else {
6260 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6261 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6262 gen_op_mov_reg_T0(ot, rm);
6263 }
6264 }
6265 break;
6266 case 0x106: /* clts */
6267 if (s->cpl != 0) {
6268 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6269 } else {
6270 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6271 gen_op_clts();
6272 /* abort block because static cpu state changed */
6273 gen_jmp_im(s->pc - s->cs_base);
6274 gen_eob(s);
6275 }
6276 break;
6277 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6278 case 0x1c3: /* MOVNTI reg, mem */
6279 if (!(s->cpuid_features & CPUID_SSE2))
6280 goto illegal_op;
6281 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6282 modrm = ldub_code(s->pc++);
6283 mod = (modrm >> 6) & 3;
6284 if (mod == 3)
6285 goto illegal_op;
6286 reg = ((modrm >> 3) & 7) | rex_r;
6287 /* generate a generic store */
6288 gen_ldst_modrm(s, modrm, ot, reg, 1);
6289 break;
6290 case 0x1ae:
6291 modrm = ldub_code(s->pc++);
6292 mod = (modrm >> 6) & 3;
6293 op = (modrm >> 3) & 7;
6294 switch(op) {
6295 case 0: /* fxsave */
6296 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6297 (s->flags & HF_EM_MASK))
6298 goto illegal_op;
6299 if (s->flags & HF_TS_MASK) {
6300 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6301 break;
6302 }
6303 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6304 gen_op_fxsave_A0((s->dflag == 2));
6305 break;
6306 case 1: /* fxrstor */
6307 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6308 (s->flags & HF_EM_MASK))
6309 goto illegal_op;
6310 if (s->flags & HF_TS_MASK) {
6311 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6312 break;
6313 }
6314 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6315 gen_op_fxrstor_A0((s->dflag == 2));
6316 break;
6317 case 2: /* ldmxcsr */
6318 case 3: /* stmxcsr */
6319 if (s->flags & HF_TS_MASK) {
6320 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6321 break;
6322 }
6323 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6324 mod == 3)
6325 goto illegal_op;
6326 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6327 if (op == 2) {
6328 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6329 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6330 } else {
6331 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6332 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6333 }
6334 break;
6335 case 5: /* lfence */
6336 case 6: /* mfence */
6337 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6338 goto illegal_op;
6339 break;
6340 case 7: /* sfence / clflush */
6341 if ((modrm & 0xc7) == 0xc0) {
6342 /* sfence */
6343 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6344 if (!(s->cpuid_features & CPUID_SSE))
6345 goto illegal_op;
6346 } else {
6347 /* clflush */
6348 if (!(s->cpuid_features & CPUID_CLFLUSH))
6349 goto illegal_op;
6350 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6351 }
6352 break;
6353 default:
6354 goto illegal_op;
6355 }
6356 break;
6357 case 0x10d: /* 3DNow! prefetch(w) */
6358 modrm = ldub_code(s->pc++);
6359 mod = (modrm >> 6) & 3;
6360 if (mod == 3)
6361 goto illegal_op;
6362 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6363 /* ignore for now */
6364 break;
6365 case 0x1aa: /* rsm */
6366 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6367 break;
6368 if (!(s->flags & HF_SMM_MASK))
6369 goto illegal_op;
6370 if (s->cc_op != CC_OP_DYNAMIC) {
6371 gen_op_set_cc_op(s->cc_op);
6372 s->cc_op = CC_OP_DYNAMIC;
6373 }
6374 gen_jmp_im(s->pc - s->cs_base);
6375 gen_op_rsm();
6376 gen_eob(s);
6377 break;
6378 case 0x10e ... 0x10f:
6379 /* 3DNow! instructions, ignore prefixes */
6380 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6381 case 0x110 ... 0x117:
6382 case 0x128 ... 0x12f:
6383 case 0x150 ... 0x177:
6384 case 0x17c ... 0x17f:
6385 case 0x1c2:
6386 case 0x1c4 ... 0x1c6:
6387 case 0x1d0 ... 0x1fe:
6388 gen_sse(s, b, pc_start, rex_r);
6389 break;
6390 default:
6391 goto illegal_op;
6392 }
6393 /* lock generation */
6394 if (s->prefix & PREFIX_LOCK)
6395 gen_op_unlock();
6396 return s->pc;
6397 illegal_op:
6398 if (s->prefix & PREFIX_LOCK)
6399 gen_op_unlock();
6400 /* XXX: ensure that no lock was generated */
6401 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6402 return s->pc;
6403 }
6404
6405 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6406 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6407
6408 /* flags read by an operation */
6409 static uint16_t opc_read_flags[NB_OPS] = {
6410 [INDEX_op_aas] = CC_A,
6411 [INDEX_op_aaa] = CC_A,
6412 [INDEX_op_das] = CC_A | CC_C,
6413 [INDEX_op_daa] = CC_A | CC_C,
6414
6415 /* subtle: due to the incl/decl implementation, C is used */
6416 [INDEX_op_update_inc_cc] = CC_C,
6417
6418 [INDEX_op_into] = CC_O,
6419
6420 [INDEX_op_jb_subb] = CC_C,
6421 [INDEX_op_jb_subw] = CC_C,
6422 [INDEX_op_jb_subl] = CC_C,
6423
6424 [INDEX_op_jz_subb] = CC_Z,
6425 [INDEX_op_jz_subw] = CC_Z,
6426 [INDEX_op_jz_subl] = CC_Z,
6427
6428 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6429 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6430 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6431
6432 [INDEX_op_js_subb] = CC_S,
6433 [INDEX_op_js_subw] = CC_S,
6434 [INDEX_op_js_subl] = CC_S,
6435
6436 [INDEX_op_jl_subb] = CC_O | CC_S,
6437 [INDEX_op_jl_subw] = CC_O | CC_S,
6438 [INDEX_op_jl_subl] = CC_O | CC_S,
6439
6440 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6441 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6442 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6443
6444 [INDEX_op_loopnzw] = CC_Z,
6445 [INDEX_op_loopnzl] = CC_Z,
6446 [INDEX_op_loopzw] = CC_Z,
6447 [INDEX_op_loopzl] = CC_Z,
6448
6449 [INDEX_op_seto_T0_cc] = CC_O,
6450 [INDEX_op_setb_T0_cc] = CC_C,
6451 [INDEX_op_setz_T0_cc] = CC_Z,
6452 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6453 [INDEX_op_sets_T0_cc] = CC_S,
6454 [INDEX_op_setp_T0_cc] = CC_P,
6455 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6456 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6457
6458 [INDEX_op_setb_T0_subb] = CC_C,
6459 [INDEX_op_setb_T0_subw] = CC_C,
6460 [INDEX_op_setb_T0_subl] = CC_C,
6461
6462 [INDEX_op_setz_T0_subb] = CC_Z,
6463 [INDEX_op_setz_T0_subw] = CC_Z,
6464 [INDEX_op_setz_T0_subl] = CC_Z,
6465
6466 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6467 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6468 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6469
6470 [INDEX_op_sets_T0_subb] = CC_S,
6471 [INDEX_op_sets_T0_subw] = CC_S,
6472 [INDEX_op_sets_T0_subl] = CC_S,
6473
6474 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6475 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6476 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6477
6478 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6479 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6480 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6481
6482 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6483 [INDEX_op_cmc] = CC_C,
6484 [INDEX_op_salc] = CC_C,
6485
6486 /* needed for correct flag optimisation before string ops */
6487 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6488 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6489 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6490 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6491
6492 #ifdef TARGET_X86_64
6493 [INDEX_op_jb_subq] = CC_C,
6494 [INDEX_op_jz_subq] = CC_Z,
6495 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6496 [INDEX_op_js_subq] = CC_S,
6497 [INDEX_op_jl_subq] = CC_O | CC_S,
6498 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6499
6500 [INDEX_op_loopnzq] = CC_Z,
6501 [INDEX_op_loopzq] = CC_Z,
6502
6503 [INDEX_op_setb_T0_subq] = CC_C,
6504 [INDEX_op_setz_T0_subq] = CC_Z,
6505 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6506 [INDEX_op_sets_T0_subq] = CC_S,
6507 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6508 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6509
6510 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6511 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6512 #endif
6513
6514 #define DEF_READF(SUFFIX)\
6515 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6516 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6517 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6518 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6519 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6520 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6521 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6522 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6523 \
6524 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6525 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6526 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6527 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6528 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6529 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6530 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6531 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6532
6533 DEF_READF( )
6534 DEF_READF(_raw)
6535 #ifndef CONFIG_USER_ONLY
6536 DEF_READF(_kernel)
6537 DEF_READF(_user)
6538 #endif
6539 };
6540
6541 /* flags written by an operation */
6542 static uint16_t opc_write_flags[NB_OPS] = {
6543 [INDEX_op_update2_cc] = CC_OSZAPC,
6544 [INDEX_op_update1_cc] = CC_OSZAPC,
6545 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6546 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6547 /* subtle: due to the incl/decl implementation, C is used */
6548 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6549 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6550
6551 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6552 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6553 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6554 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6555 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6556 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6557 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6558 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6559 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6560 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6561 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6562
6563 /* sse */
6564 [INDEX_op_com_dummy] = CC_OSZAPC,
6565 [INDEX_op_com_dummy] = CC_OSZAPC,
6566 [INDEX_op_com_dummy] = CC_OSZAPC,
6567 [INDEX_op_com_dummy] = CC_OSZAPC,
6568
6569 /* bcd */
6570 [INDEX_op_aam] = CC_OSZAPC,
6571 [INDEX_op_aad] = CC_OSZAPC,
6572 [INDEX_op_aas] = CC_OSZAPC,
6573 [INDEX_op_aaa] = CC_OSZAPC,
6574 [INDEX_op_das] = CC_OSZAPC,
6575 [INDEX_op_daa] = CC_OSZAPC,
6576
6577 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6578 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6579 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6580 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6581 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6582 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6583 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6584 [INDEX_op_clc] = CC_C,
6585 [INDEX_op_stc] = CC_C,
6586 [INDEX_op_cmc] = CC_C,
6587
6588 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6589 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6590 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6591 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6592 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6593 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6594 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6595 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6596 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6597 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6598 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6599 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6600
6601 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6602 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6603 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6604 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6605 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6606 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6607
6608 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6609 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6610 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6611 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6612
6613 [INDEX_op_cmpxchg8b] = CC_Z,
6614 [INDEX_op_lar] = CC_Z,
6615 [INDEX_op_lsl] = CC_Z,
6616 [INDEX_op_verr] = CC_Z,
6617 [INDEX_op_verw] = CC_Z,
6618 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6619 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6620
6621 #define DEF_WRITEF(SUFFIX)\
6622 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6623 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6624 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6625 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6626 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6627 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6628 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6629 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6630 \
6631 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6632 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6633 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6634 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6635 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6636 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6637 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6638 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6639 \
6640 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6641 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6642 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6643 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6644 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6645 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6646 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6647 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6648 \
6649 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6650 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6651 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6652 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6653 \
6654 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6655 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6656 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6657 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6658 \
6659 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6660 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6661 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6662 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6663 \
6664 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6665 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6666 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6667 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6668 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6669 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6670 \
6671 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6672 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6673 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6674 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6675 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6676 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6677 \
6678 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6679 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6680 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6681 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6682
6683
6684 DEF_WRITEF( )
6685 DEF_WRITEF(_raw)
6686 #ifndef CONFIG_USER_ONLY
6687 DEF_WRITEF(_kernel)
6688 DEF_WRITEF(_user)
6689 #endif
6690 };
6691
6692 /* simpler form of an operation if no flags need to be generated */
6693 static uint16_t opc_simpler[NB_OPS] = {
6694 [INDEX_op_update2_cc] = INDEX_op_nop,
6695 [INDEX_op_update1_cc] = INDEX_op_nop,
6696 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6697 #if 0
6698 /* broken: CC_OP logic must be rewritten */
6699 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6700 #endif
6701
6702 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6703 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6704 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6705 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6706
6707 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6708 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6709 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6710 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6711
6712 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6713 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6714 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6715 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6716
6717 #define DEF_SIMPLER(SUFFIX)\
6718 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6719 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6720 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6721 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6722 \
6723 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6724 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6725 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6726 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6727
6728 DEF_SIMPLER( )
6729 DEF_SIMPLER(_raw)
6730 #ifndef CONFIG_USER_ONLY
6731 DEF_SIMPLER(_kernel)
6732 DEF_SIMPLER(_user)
6733 #endif
6734 };
6735
6736 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6737 {
6738 switch(macro_id) {
6739 #ifdef MACRO_TEST
6740 case MACRO_TEST:
6741 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6742 break;
6743 #endif
6744 }
6745 }
6746
6747 void optimize_flags_init(void)
6748 {
6749 int i;
6750 /* put default values in arrays */
6751 for(i = 0; i < NB_OPS; i++) {
6752 if (opc_simpler[i] == 0)
6753 opc_simpler[i] = i;
6754 }
6755
6756 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6757
6758 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6759 #if TARGET_LONG_BITS > HOST_LONG_BITS
6760 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
6761 TCG_AREG0, offsetof(CPUState, t0), "T0");
6762 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6763 TCG_AREG0, offsetof(CPUState, t1), "T1");
6764 cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6765 TCG_AREG0, offsetof(CPUState, t2), "A0");
6766 #else
6767 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6768 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6769 cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6770 cpu_tmp1 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6771 #endif
6772 /* the helpers are only registered to print debug info */
6773 TCG_HELPER(helper_divl_EAX_T0);
6774 TCG_HELPER(helper_idivl_EAX_T0);
6775 }
6776
6777 /* CPU flags computation optimization: we move backward thru the
6778 generated code to see which flags are needed. The operation is
6779 modified if suitable */
6780 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6781 {
6782 uint16_t *opc_ptr;
6783 int live_flags, write_flags, op;
6784
6785 opc_ptr = opc_buf + opc_buf_len;
6786 /* live_flags contains the flags needed by the next instructions
6787 in the code. At the end of the block, we consider that all the
6788 flags are live. */
6789 live_flags = CC_OSZAPC;
6790 while (opc_ptr > opc_buf) {
6791 op = *--opc_ptr;
6792 /* if none of the flags written by the instruction is used,
6793 then we can try to find a simpler instruction */
6794 write_flags = opc_write_flags[op];
6795 if ((live_flags & write_flags) == 0) {
6796 *opc_ptr = opc_simpler[op];
6797 }
6798 /* compute the live flags before the instruction */
6799 live_flags &= ~write_flags;
6800 live_flags |= opc_read_flags[op];
6801 }
6802 }
6803
6804 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6805 basic block 'tb'. If search_pc is TRUE, also generate PC
6806 information for each intermediate instruction. */
6807 static inline int gen_intermediate_code_internal(CPUState *env,
6808 TranslationBlock *tb,
6809 int search_pc)
6810 {
6811 DisasContext dc1, *dc = &dc1;
6812 target_ulong pc_ptr;
6813 uint16_t *gen_opc_end;
6814 int j, lj, cflags;
6815 uint64_t flags;
6816 target_ulong pc_start;
6817 target_ulong cs_base;
6818
6819 /* generate intermediate code */
6820 pc_start = tb->pc;
6821 cs_base = tb->cs_base;
6822 flags = tb->flags;
6823 cflags = tb->cflags;
6824
6825 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6826 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6827 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6828 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6829 dc->f_st = 0;
6830 dc->vm86 = (flags >> VM_SHIFT) & 1;
6831 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6832 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6833 dc->tf = (flags >> TF_SHIFT) & 1;
6834 dc->singlestep_enabled = env->singlestep_enabled;
6835 dc->cc_op = CC_OP_DYNAMIC;
6836 dc->cs_base = cs_base;
6837 dc->tb = tb;
6838 dc->popl_esp_hack = 0;
6839 /* select memory access functions */
6840 dc->mem_index = 0;
6841 if (flags & HF_SOFTMMU_MASK) {
6842 if (dc->cpl == 3)
6843 dc->mem_index = 2 * 4;
6844 else
6845 dc->mem_index = 1 * 4;
6846 }
6847 dc->cpuid_features = env->cpuid_features;
6848 dc->cpuid_ext_features = env->cpuid_ext_features;
6849 dc->cpuid_ext2_features = env->cpuid_ext2_features;
6850 #ifdef TARGET_X86_64
6851 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6852 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6853 #endif
6854 dc->flags = flags;
6855 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6856 (flags & HF_INHIBIT_IRQ_MASK)
6857 #ifndef CONFIG_SOFTMMU
6858 || (flags & HF_SOFTMMU_MASK)
6859 #endif
6860 );
6861 #if 0
6862 /* check addseg logic */
6863 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6864 printf("ERROR addseg\n");
6865 #endif
6866
6867 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6868 #if TARGET_LONG_BITS > HOST_LONG_BITS
6869 cpu_tmp1 = tcg_temp_new(TCG_TYPE_I64);
6870 #endif
6871 cpu_tmp2 = tcg_temp_new(TCG_TYPE_I32);
6872 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6873 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6874
6875 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6876
6877 dc->is_jmp = DISAS_NEXT;
6878 pc_ptr = pc_start;
6879 lj = -1;
6880
6881 for(;;) {
6882 if (env->nb_breakpoints > 0) {
6883 for(j = 0; j < env->nb_breakpoints; j++) {
6884 if (env->breakpoints[j] == pc_ptr) {
6885 gen_debug(dc, pc_ptr - dc->cs_base);
6886 break;
6887 }
6888 }
6889 }
6890 if (search_pc) {
6891 j = gen_opc_ptr - gen_opc_buf;
6892 if (lj < j) {
6893 lj++;
6894 while (lj < j)
6895 gen_opc_instr_start[lj++] = 0;
6896 }
6897 gen_opc_pc[lj] = pc_ptr;
6898 gen_opc_cc_op[lj] = dc->cc_op;
6899 gen_opc_instr_start[lj] = 1;
6900 }
6901 pc_ptr = disas_insn(dc, pc_ptr);
6902 /* stop translation if indicated */
6903 if (dc->is_jmp)
6904 break;
6905 /* if single step mode, we generate only one instruction and
6906 generate an exception */
6907 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6908 the flag and abort the translation to give the irqs a
6909 change to be happen */
6910 if (dc->tf || dc->singlestep_enabled ||
6911 (flags & HF_INHIBIT_IRQ_MASK) ||
6912 (cflags & CF_SINGLE_INSN)) {
6913 gen_jmp_im(pc_ptr - dc->cs_base);
6914 gen_eob(dc);
6915 break;
6916 }
6917 /* if too long translation, stop generation too */
6918 if (gen_opc_ptr >= gen_opc_end ||
6919 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6920 gen_jmp_im(pc_ptr - dc->cs_base);
6921 gen_eob(dc);
6922 break;
6923 }
6924 }
6925 *gen_opc_ptr = INDEX_op_end;
6926 /* we don't forget to fill the last values */
6927 if (search_pc) {
6928 j = gen_opc_ptr - gen_opc_buf;
6929 lj++;
6930 while (lj <= j)
6931 gen_opc_instr_start[lj++] = 0;
6932 }
6933
6934 #ifdef DEBUG_DISAS
6935 if (loglevel & CPU_LOG_TB_CPU) {
6936 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6937 }
6938 if (loglevel & CPU_LOG_TB_IN_ASM) {
6939 int disas_flags;
6940 fprintf(logfile, "----------------\n");
6941 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6942 #ifdef TARGET_X86_64
6943 if (dc->code64)
6944 disas_flags = 2;
6945 else
6946 #endif
6947 disas_flags = !dc->code32;
6948 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6949 fprintf(logfile, "\n");
6950 if (loglevel & CPU_LOG_TB_OP_OPT) {
6951 fprintf(logfile, "OP before opt:\n");
6952 tcg_dump_ops(&tcg_ctx, logfile);
6953 fprintf(logfile, "\n");
6954 }
6955 }
6956 #endif
6957
6958 /* optimize flag computations */
6959 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6960
6961 if (!search_pc)
6962 tb->size = pc_ptr - pc_start;
6963 return 0;
6964 }
6965
6966 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6967 {
6968 return gen_intermediate_code_internal(env, tb, 0);
6969 }
6970
6971 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6972 {
6973 return gen_intermediate_code_internal(env, tb, 1);
6974 }
6975
6976 void gen_pc_load(CPUState *env, TranslationBlock *tb,
6977 unsigned long searched_pc, int pc_pos, void *puc)
6978 {
6979 int cc_op;
6980 #ifdef DEBUG_DISAS
6981 if (loglevel & CPU_LOG_TB_OP) {
6982 int i;
6983 fprintf(logfile, "RESTORE:\n");
6984 for(i = 0;i <= pc_pos; i++) {
6985 if (gen_opc_instr_start[i]) {
6986 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
6987 }
6988 }
6989 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
6990 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
6991 (uint32_t)tb->cs_base);
6992 }
6993 #endif
6994 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
6995 cc_op = gen_opc_cc_op[pc_pos];
6996 if (cc_op != CC_OP_DYNAMIC)
6997 env->cc_op = cc_op;
6998 }