]> git.proxmox.com Git - qemu.git/blob - target-i386/translate.c
use TCG for MMX/SSE memory accesses
[qemu.git] / target-i386 / translate.c
1 /*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25 #include <signal.h>
26 #include <assert.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
33
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
39
40 #ifdef TARGET_X86_64
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47 #if 1
48 #define BUGGY_64(x) NULL
49 #endif
50 #else
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
53 #define CODE64(s) 0
54 #define REX_X(s) 0
55 #define REX_B(s) 0
56 #endif
57
58 //#define MACRO_TEST 1
59
60 /* global register indexes */
61 static TCGv cpu_env, cpu_T[2], cpu_A0;
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0, cpu_tmp1;
64
65 #ifdef TARGET_X86_64
66 static int x86_64_hregs;
67 #endif
68
69 typedef struct DisasContext {
70 /* current insn context */
71 int override; /* -1 if no override */
72 int prefix;
73 int aflag, dflag;
74 target_ulong pc; /* pc = eip + cs_base */
75 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
76 static state change (stop translation) */
77 /* current block context */
78 target_ulong cs_base; /* base of CS segment */
79 int pe; /* protected mode */
80 int code32; /* 32 bit code segment */
81 #ifdef TARGET_X86_64
82 int lma; /* long mode active */
83 int code64; /* 64 bit code segment */
84 int rex_x, rex_b;
85 #endif
86 int ss32; /* 32 bit stack segment */
87 int cc_op; /* current CC operation */
88 int addseg; /* non zero if either DS/ES/SS have a non zero base */
89 int f_st; /* currently unused */
90 int vm86; /* vm86 mode */
91 int cpl;
92 int iopl;
93 int tf; /* TF cpu flag */
94 int singlestep_enabled; /* "hardware" single step enabled */
95 int jmp_opt; /* use direct block chaining for direct jumps */
96 int mem_index; /* select memory access functions */
97 uint64_t flags; /* all execution flags */
98 struct TranslationBlock *tb;
99 int popl_esp_hack; /* for correct popl with esp base handling */
100 int rip_offset; /* only used in x86_64, but left for simplicity */
101 int cpuid_features;
102 int cpuid_ext_features;
103 int cpuid_ext2_features;
104 } DisasContext;
105
106 static void gen_eob(DisasContext *s);
107 static void gen_jmp(DisasContext *s, target_ulong eip);
108 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
109
110 /* i386 arith/logic operations */
111 enum {
112 OP_ADDL,
113 OP_ORL,
114 OP_ADCL,
115 OP_SBBL,
116 OP_ANDL,
117 OP_SUBL,
118 OP_XORL,
119 OP_CMPL,
120 };
121
122 /* i386 shift ops */
123 enum {
124 OP_ROL,
125 OP_ROR,
126 OP_RCL,
127 OP_RCR,
128 OP_SHL,
129 OP_SHR,
130 OP_SHL1, /* undocumented */
131 OP_SAR = 7,
132 };
133
134 /* operand size */
135 enum {
136 OT_BYTE = 0,
137 OT_WORD,
138 OT_LONG,
139 OT_QUAD,
140 };
141
142 enum {
143 /* I386 int registers */
144 OR_EAX, /* MUST be even numbered */
145 OR_ECX,
146 OR_EDX,
147 OR_EBX,
148 OR_ESP,
149 OR_EBP,
150 OR_ESI,
151 OR_EDI,
152
153 OR_TMP0 = 16, /* temporary operand register */
154 OR_TMP1,
155 OR_A0, /* temporary register used when doing address evaluation */
156 };
157
158 static inline void gen_op_movl_T0_0(void)
159 {
160 tcg_gen_movi_tl(cpu_T[0], 0);
161 }
162
163 static inline void gen_op_movl_T0_im(int32_t val)
164 {
165 tcg_gen_movi_tl(cpu_T[0], val);
166 }
167
168 static inline void gen_op_movl_T0_imu(uint32_t val)
169 {
170 tcg_gen_movi_tl(cpu_T[0], val);
171 }
172
173 static inline void gen_op_movl_T1_im(int32_t val)
174 {
175 tcg_gen_movi_tl(cpu_T[1], val);
176 }
177
178 static inline void gen_op_movl_T1_imu(uint32_t val)
179 {
180 tcg_gen_movi_tl(cpu_T[1], val);
181 }
182
183 static inline void gen_op_movl_A0_im(uint32_t val)
184 {
185 tcg_gen_movi_tl(cpu_A0, val);
186 }
187
188 #ifdef TARGET_X86_64
189 static inline void gen_op_movq_A0_im(int64_t val)
190 {
191 tcg_gen_movi_tl(cpu_A0, val);
192 }
193 #endif
194
195 static inline void gen_movtl_T0_im(target_ulong val)
196 {
197 tcg_gen_movi_tl(cpu_T[0], val);
198 }
199
200 static inline void gen_movtl_T1_im(target_ulong val)
201 {
202 tcg_gen_movi_tl(cpu_T[1], val);
203 }
204
205 static inline void gen_op_andl_T0_ffff(void)
206 {
207 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
208 }
209
210 static inline void gen_op_andl_T0_im(uint32_t val)
211 {
212 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
213 }
214
215 static inline void gen_op_movl_T0_T1(void)
216 {
217 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
218 }
219
220 static inline void gen_op_andl_A0_ffff(void)
221 {
222 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
223 }
224
225 #ifdef TARGET_X86_64
226
227 #define NB_OP_SIZES 4
228
229 #define DEF_REGS(prefix, suffix) \
230 prefix ## EAX ## suffix,\
231 prefix ## ECX ## suffix,\
232 prefix ## EDX ## suffix,\
233 prefix ## EBX ## suffix,\
234 prefix ## ESP ## suffix,\
235 prefix ## EBP ## suffix,\
236 prefix ## ESI ## suffix,\
237 prefix ## EDI ## suffix,\
238 prefix ## R8 ## suffix,\
239 prefix ## R9 ## suffix,\
240 prefix ## R10 ## suffix,\
241 prefix ## R11 ## suffix,\
242 prefix ## R12 ## suffix,\
243 prefix ## R13 ## suffix,\
244 prefix ## R14 ## suffix,\
245 prefix ## R15 ## suffix,
246
247 #else /* !TARGET_X86_64 */
248
249 #define NB_OP_SIZES 3
250
251 #define DEF_REGS(prefix, suffix) \
252 prefix ## EAX ## suffix,\
253 prefix ## ECX ## suffix,\
254 prefix ## EDX ## suffix,\
255 prefix ## EBX ## suffix,\
256 prefix ## ESP ## suffix,\
257 prefix ## EBP ## suffix,\
258 prefix ## ESI ## suffix,\
259 prefix ## EDI ## suffix,
260
261 #endif /* !TARGET_X86_64 */
262
263 #if defined(WORDS_BIGENDIAN)
264 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
265 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
266 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
267 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
268 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
269 #else
270 #define REG_B_OFFSET 0
271 #define REG_H_OFFSET 1
272 #define REG_W_OFFSET 0
273 #define REG_L_OFFSET 0
274 #define REG_LH_OFFSET 4
275 #endif
276
277 static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
278 {
279 switch(ot) {
280 case OT_BYTE:
281 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
282 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
283 } else {
284 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
285 }
286 break;
287 case OT_WORD:
288 tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
289 break;
290 #ifdef TARGET_X86_64
291 case OT_LONG:
292 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
293 /* high part of register set to zero */
294 tcg_gen_movi_tl(cpu_tmp0, 0);
295 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
296 break;
297 default:
298 case OT_QUAD:
299 tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
300 break;
301 #else
302 default:
303 case OT_LONG:
304 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
305 break;
306 #endif
307 }
308 }
309
310 static inline void gen_op_mov_reg_T0(int ot, int reg)
311 {
312 gen_op_mov_reg_TN(ot, 0, reg);
313 }
314
315 static inline void gen_op_mov_reg_T1(int ot, int reg)
316 {
317 gen_op_mov_reg_TN(ot, 1, reg);
318 }
319
320 static inline void gen_op_mov_reg_A0(int size, int reg)
321 {
322 switch(size) {
323 case 0:
324 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
325 break;
326 #ifdef TARGET_X86_64
327 case 1:
328 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
329 /* high part of register set to zero */
330 tcg_gen_movi_tl(cpu_tmp0, 0);
331 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
332 break;
333 default:
334 case 2:
335 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
336 break;
337 #else
338 default:
339 case 1:
340 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
341 break;
342 #endif
343 }
344 }
345
346 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
347 {
348 switch(ot) {
349 case OT_BYTE:
350 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
351 goto std_case;
352 } else {
353 tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
354 }
355 break;
356 default:
357 std_case:
358 tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
359 break;
360 }
361 }
362
363 static inline void gen_op_movl_A0_reg(int reg)
364 {
365 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
366 }
367
368 static inline void gen_op_addl_A0_im(int32_t val)
369 {
370 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
371 #ifdef TARGET_X86_64
372 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
373 #endif
374 }
375
376 #ifdef TARGET_X86_64
377 static inline void gen_op_addq_A0_im(int64_t val)
378 {
379 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
380 }
381 #endif
382
383 static void gen_add_A0_im(DisasContext *s, int val)
384 {
385 #ifdef TARGET_X86_64
386 if (CODE64(s))
387 gen_op_addq_A0_im(val);
388 else
389 #endif
390 gen_op_addl_A0_im(val);
391 }
392
393 static inline void gen_op_addl_T0_T1(void)
394 {
395 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
396 }
397
398 static inline void gen_op_jmp_T0(void)
399 {
400 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
401 }
402
403 static inline void gen_op_addw_ESP_im(int32_t val)
404 {
405 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
406 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
407 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
408 }
409
410 static inline void gen_op_addl_ESP_im(int32_t val)
411 {
412 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
413 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
414 #ifdef TARGET_X86_64
415 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
416 #endif
417 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
418 }
419
420 #ifdef TARGET_X86_64
421 static inline void gen_op_addq_ESP_im(int32_t val)
422 {
423 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
424 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
426 }
427 #endif
428
429 static inline void gen_op_set_cc_op(int32_t val)
430 {
431 tcg_gen_movi_tl(cpu_tmp0, val);
432 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, cc_op));
433 }
434
435 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
436 {
437 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
438 if (shift != 0)
439 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
440 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
441 #ifdef TARGET_X86_64
442 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
443 #endif
444 }
445
446 static inline void gen_op_movl_A0_seg(int reg)
447 {
448 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
449 }
450
451 static inline void gen_op_addl_A0_seg(int reg)
452 {
453 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
454 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
455 #ifdef TARGET_X86_64
456 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
457 #endif
458 }
459
460 #ifdef TARGET_X86_64
461 static inline void gen_op_movq_A0_seg(int reg)
462 {
463 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
464 }
465
466 static inline void gen_op_addq_A0_seg(int reg)
467 {
468 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
469 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
470 }
471
472 static inline void gen_op_movq_A0_reg(int reg)
473 {
474 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
475 }
476
477 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
478 {
479 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
480 if (shift != 0)
481 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
482 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
483 }
484 #endif
485
486 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
487 [0] = {
488 DEF_REGS(gen_op_cmovw_, _T1_T0)
489 },
490 [1] = {
491 DEF_REGS(gen_op_cmovl_, _T1_T0)
492 },
493 #ifdef TARGET_X86_64
494 [2] = {
495 DEF_REGS(gen_op_cmovq_, _T1_T0)
496 },
497 #endif
498 };
499
500 #define DEF_ARITHC(SUFFIX)\
501 {\
502 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
503 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
504 },\
505 {\
506 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
507 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
508 },\
509 {\
510 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
511 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
512 },\
513 {\
514 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
515 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
516 },
517
518 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
519 DEF_ARITHC( )
520 };
521
522 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
523 DEF_ARITHC(_raw)
524 #ifndef CONFIG_USER_ONLY
525 DEF_ARITHC(_kernel)
526 DEF_ARITHC(_user)
527 #endif
528 };
529
530 static const int cc_op_arithb[8] = {
531 CC_OP_ADDB,
532 CC_OP_LOGICB,
533 CC_OP_ADDB,
534 CC_OP_SUBB,
535 CC_OP_LOGICB,
536 CC_OP_SUBB,
537 CC_OP_LOGICB,
538 CC_OP_SUBB,
539 };
540
541 #define DEF_CMPXCHG(SUFFIX)\
542 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
543 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
544 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
545 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
546
547 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
548 DEF_CMPXCHG( )
549 };
550
551 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
552 DEF_CMPXCHG(_raw)
553 #ifndef CONFIG_USER_ONLY
554 DEF_CMPXCHG(_kernel)
555 DEF_CMPXCHG(_user)
556 #endif
557 };
558
559 #define DEF_SHIFT(SUFFIX)\
560 {\
561 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
562 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
563 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
565 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
566 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
568 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
569 },\
570 {\
571 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
572 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
573 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
574 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
575 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
576 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
577 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
578 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
579 },\
580 {\
581 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
582 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
583 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
584 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
585 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
586 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
587 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
588 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
589 },\
590 {\
591 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
592 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
593 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
594 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
595 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
596 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
597 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
598 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
599 },
600
601 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
602 DEF_SHIFT( )
603 };
604
605 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
606 DEF_SHIFT(_raw)
607 #ifndef CONFIG_USER_ONLY
608 DEF_SHIFT(_kernel)
609 DEF_SHIFT(_user)
610 #endif
611 };
612
613 #define DEF_SHIFTD(SUFFIX, op)\
614 {\
615 NULL,\
616 NULL,\
617 },\
618 {\
619 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
620 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
621 },\
622 {\
623 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
624 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
625 },\
626 {\
627 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
628 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
629 },
630
631 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
632 DEF_SHIFTD(, im)
633 };
634
635 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
636 DEF_SHIFTD(, ECX)
637 };
638
639 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
640 DEF_SHIFTD(_raw, im)
641 #ifndef CONFIG_USER_ONLY
642 DEF_SHIFTD(_kernel, im)
643 DEF_SHIFTD(_user, im)
644 #endif
645 };
646
647 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
648 DEF_SHIFTD(_raw, ECX)
649 #ifndef CONFIG_USER_ONLY
650 DEF_SHIFTD(_kernel, ECX)
651 DEF_SHIFTD(_user, ECX)
652 #endif
653 };
654
655 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
656 [0] = {
657 gen_op_btw_T0_T1_cc,
658 gen_op_btsw_T0_T1_cc,
659 gen_op_btrw_T0_T1_cc,
660 gen_op_btcw_T0_T1_cc,
661 },
662 [1] = {
663 gen_op_btl_T0_T1_cc,
664 gen_op_btsl_T0_T1_cc,
665 gen_op_btrl_T0_T1_cc,
666 gen_op_btcl_T0_T1_cc,
667 },
668 #ifdef TARGET_X86_64
669 [2] = {
670 gen_op_btq_T0_T1_cc,
671 gen_op_btsq_T0_T1_cc,
672 gen_op_btrq_T0_T1_cc,
673 gen_op_btcq_T0_T1_cc,
674 },
675 #endif
676 };
677
678 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
679 gen_op_add_bitw_A0_T1,
680 gen_op_add_bitl_A0_T1,
681 X86_64_ONLY(gen_op_add_bitq_A0_T1),
682 };
683
684 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
685 [0] = {
686 gen_op_bsfw_T0_cc,
687 gen_op_bsrw_T0_cc,
688 },
689 [1] = {
690 gen_op_bsfl_T0_cc,
691 gen_op_bsrl_T0_cc,
692 },
693 #ifdef TARGET_X86_64
694 [2] = {
695 gen_op_bsfq_T0_cc,
696 gen_op_bsrq_T0_cc,
697 },
698 #endif
699 };
700
701 static inline void gen_op_lds_T0_A0(int idx)
702 {
703 int mem_index = (idx >> 2) - 1;
704 switch(idx & 3) {
705 case 0:
706 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
707 break;
708 case 1:
709 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
710 break;
711 default:
712 case 2:
713 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
714 break;
715 }
716 }
717
718 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719 static inline void gen_op_ld_T0_A0(int idx)
720 {
721 int mem_index = (idx >> 2) - 1;
722 switch(idx & 3) {
723 case 0:
724 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
725 break;
726 case 1:
727 tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
728 break;
729 case 2:
730 tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
731 break;
732 default:
733 case 3:
734 tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
735 break;
736 }
737 }
738
739 static inline void gen_op_ldu_T0_A0(int idx)
740 {
741 gen_op_ld_T0_A0(idx);
742 }
743
744 static inline void gen_op_ld_T1_A0(int idx)
745 {
746 int mem_index = (idx >> 2) - 1;
747 switch(idx & 3) {
748 case 0:
749 tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
750 break;
751 case 1:
752 tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
753 break;
754 case 2:
755 tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
756 break;
757 default:
758 case 3:
759 tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
760 break;
761 }
762 }
763
764 static inline void gen_op_st_T0_A0(int idx)
765 {
766 int mem_index = (idx >> 2) - 1;
767 switch(idx & 3) {
768 case 0:
769 tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
770 break;
771 case 1:
772 tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
773 break;
774 case 2:
775 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
776 break;
777 default:
778 case 3:
779 tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
780 break;
781 }
782 }
783
784 static inline void gen_op_st_T1_A0(int idx)
785 {
786 int mem_index = (idx >> 2) - 1;
787 switch(idx & 3) {
788 case 0:
789 tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
790 break;
791 case 1:
792 tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
793 break;
794 case 2:
795 tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
796 break;
797 default:
798 case 3:
799 tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
800 break;
801 }
802 }
803
804 static inline void gen_jmp_im(target_ulong pc)
805 {
806 tcg_gen_movi_tl(cpu_tmp0, pc);
807 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
808 }
809
810 static inline void gen_string_movl_A0_ESI(DisasContext *s)
811 {
812 int override;
813
814 override = s->override;
815 #ifdef TARGET_X86_64
816 if (s->aflag == 2) {
817 if (override >= 0) {
818 gen_op_movq_A0_seg(override);
819 gen_op_addq_A0_reg_sN(0, R_ESI);
820 } else {
821 gen_op_movq_A0_reg(R_ESI);
822 }
823 } else
824 #endif
825 if (s->aflag) {
826 /* 32 bit address */
827 if (s->addseg && override < 0)
828 override = R_DS;
829 if (override >= 0) {
830 gen_op_movl_A0_seg(override);
831 gen_op_addl_A0_reg_sN(0, R_ESI);
832 } else {
833 gen_op_movl_A0_reg(R_ESI);
834 }
835 } else {
836 /* 16 address, always override */
837 if (override < 0)
838 override = R_DS;
839 gen_op_movl_A0_reg(R_ESI);
840 gen_op_andl_A0_ffff();
841 gen_op_addl_A0_seg(override);
842 }
843 }
844
845 static inline void gen_string_movl_A0_EDI(DisasContext *s)
846 {
847 #ifdef TARGET_X86_64
848 if (s->aflag == 2) {
849 gen_op_movq_A0_reg(R_EDI);
850 } else
851 #endif
852 if (s->aflag) {
853 if (s->addseg) {
854 gen_op_movl_A0_seg(R_ES);
855 gen_op_addl_A0_reg_sN(0, R_EDI);
856 } else {
857 gen_op_movl_A0_reg(R_EDI);
858 }
859 } else {
860 gen_op_movl_A0_reg(R_EDI);
861 gen_op_andl_A0_ffff();
862 gen_op_addl_A0_seg(R_ES);
863 }
864 }
865
866 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
867 gen_op_movl_T0_Dshiftb,
868 gen_op_movl_T0_Dshiftw,
869 gen_op_movl_T0_Dshiftl,
870 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
871 };
872
873 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
874 gen_op_jnz_ecxw,
875 gen_op_jnz_ecxl,
876 X86_64_ONLY(gen_op_jnz_ecxq),
877 };
878
879 static GenOpFunc1 *gen_op_jz_ecx[3] = {
880 gen_op_jz_ecxw,
881 gen_op_jz_ecxl,
882 X86_64_ONLY(gen_op_jz_ecxq),
883 };
884
885 static GenOpFunc *gen_op_dec_ECX[3] = {
886 gen_op_decw_ECX,
887 gen_op_decl_ECX,
888 X86_64_ONLY(gen_op_decq_ECX),
889 };
890
891 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
892 {
893 gen_op_jnz_subb,
894 gen_op_jnz_subw,
895 gen_op_jnz_subl,
896 X86_64_ONLY(gen_op_jnz_subq),
897 },
898 {
899 gen_op_jz_subb,
900 gen_op_jz_subw,
901 gen_op_jz_subl,
902 X86_64_ONLY(gen_op_jz_subq),
903 },
904 };
905
906 static GenOpFunc *gen_op_in_DX_T0[3] = {
907 gen_op_inb_DX_T0,
908 gen_op_inw_DX_T0,
909 gen_op_inl_DX_T0,
910 };
911
912 static GenOpFunc *gen_op_out_DX_T0[3] = {
913 gen_op_outb_DX_T0,
914 gen_op_outw_DX_T0,
915 gen_op_outl_DX_T0,
916 };
917
918 static GenOpFunc *gen_op_in[3] = {
919 gen_op_inb_T0_T1,
920 gen_op_inw_T0_T1,
921 gen_op_inl_T0_T1,
922 };
923
924 static GenOpFunc *gen_op_out[3] = {
925 gen_op_outb_T0_T1,
926 gen_op_outw_T0_T1,
927 gen_op_outl_T0_T1,
928 };
929
930 static GenOpFunc *gen_check_io_T0[3] = {
931 gen_op_check_iob_T0,
932 gen_op_check_iow_T0,
933 gen_op_check_iol_T0,
934 };
935
936 static GenOpFunc *gen_check_io_DX[3] = {
937 gen_op_check_iob_DX,
938 gen_op_check_iow_DX,
939 gen_op_check_iol_DX,
940 };
941
942 static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
943 {
944 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
945 if (s->cc_op != CC_OP_DYNAMIC)
946 gen_op_set_cc_op(s->cc_op);
947 gen_jmp_im(cur_eip);
948 if (use_dx)
949 gen_check_io_DX[ot]();
950 else
951 gen_check_io_T0[ot]();
952 }
953 }
954
955 static inline void gen_movs(DisasContext *s, int ot)
956 {
957 gen_string_movl_A0_ESI(s);
958 gen_op_ld_T0_A0(ot + s->mem_index);
959 gen_string_movl_A0_EDI(s);
960 gen_op_st_T0_A0(ot + s->mem_index);
961 gen_op_movl_T0_Dshift[ot]();
962 #ifdef TARGET_X86_64
963 if (s->aflag == 2) {
964 gen_op_addq_ESI_T0();
965 gen_op_addq_EDI_T0();
966 } else
967 #endif
968 if (s->aflag) {
969 gen_op_addl_ESI_T0();
970 gen_op_addl_EDI_T0();
971 } else {
972 gen_op_addw_ESI_T0();
973 gen_op_addw_EDI_T0();
974 }
975 }
976
977 static inline void gen_update_cc_op(DisasContext *s)
978 {
979 if (s->cc_op != CC_OP_DYNAMIC) {
980 gen_op_set_cc_op(s->cc_op);
981 s->cc_op = CC_OP_DYNAMIC;
982 }
983 }
984
985 /* XXX: does not work with gdbstub "ice" single step - not a
986 serious problem */
987 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
988 {
989 int l1, l2;
990
991 l1 = gen_new_label();
992 l2 = gen_new_label();
993 gen_op_jnz_ecx[s->aflag](l1);
994 gen_set_label(l2);
995 gen_jmp_tb(s, next_eip, 1);
996 gen_set_label(l1);
997 return l2;
998 }
999
1000 static inline void gen_stos(DisasContext *s, int ot)
1001 {
1002 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1003 gen_string_movl_A0_EDI(s);
1004 gen_op_st_T0_A0(ot + s->mem_index);
1005 gen_op_movl_T0_Dshift[ot]();
1006 #ifdef TARGET_X86_64
1007 if (s->aflag == 2) {
1008 gen_op_addq_EDI_T0();
1009 } else
1010 #endif
1011 if (s->aflag) {
1012 gen_op_addl_EDI_T0();
1013 } else {
1014 gen_op_addw_EDI_T0();
1015 }
1016 }
1017
1018 static inline void gen_lods(DisasContext *s, int ot)
1019 {
1020 gen_string_movl_A0_ESI(s);
1021 gen_op_ld_T0_A0(ot + s->mem_index);
1022 gen_op_mov_reg_T0(ot, R_EAX);
1023 gen_op_movl_T0_Dshift[ot]();
1024 #ifdef TARGET_X86_64
1025 if (s->aflag == 2) {
1026 gen_op_addq_ESI_T0();
1027 } else
1028 #endif
1029 if (s->aflag) {
1030 gen_op_addl_ESI_T0();
1031 } else {
1032 gen_op_addw_ESI_T0();
1033 }
1034 }
1035
1036 static inline void gen_scas(DisasContext *s, int ot)
1037 {
1038 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1039 gen_string_movl_A0_EDI(s);
1040 gen_op_ld_T1_A0(ot + s->mem_index);
1041 gen_op_cmpl_T0_T1_cc();
1042 gen_op_movl_T0_Dshift[ot]();
1043 #ifdef TARGET_X86_64
1044 if (s->aflag == 2) {
1045 gen_op_addq_EDI_T0();
1046 } else
1047 #endif
1048 if (s->aflag) {
1049 gen_op_addl_EDI_T0();
1050 } else {
1051 gen_op_addw_EDI_T0();
1052 }
1053 }
1054
1055 static inline void gen_cmps(DisasContext *s, int ot)
1056 {
1057 gen_string_movl_A0_ESI(s);
1058 gen_op_ld_T0_A0(ot + s->mem_index);
1059 gen_string_movl_A0_EDI(s);
1060 gen_op_ld_T1_A0(ot + s->mem_index);
1061 gen_op_cmpl_T0_T1_cc();
1062 gen_op_movl_T0_Dshift[ot]();
1063 #ifdef TARGET_X86_64
1064 if (s->aflag == 2) {
1065 gen_op_addq_ESI_T0();
1066 gen_op_addq_EDI_T0();
1067 } else
1068 #endif
1069 if (s->aflag) {
1070 gen_op_addl_ESI_T0();
1071 gen_op_addl_EDI_T0();
1072 } else {
1073 gen_op_addw_ESI_T0();
1074 gen_op_addw_EDI_T0();
1075 }
1076 }
1077
1078 static inline void gen_ins(DisasContext *s, int ot)
1079 {
1080 gen_string_movl_A0_EDI(s);
1081 gen_op_movl_T0_0();
1082 gen_op_st_T0_A0(ot + s->mem_index);
1083 gen_op_in_DX_T0[ot]();
1084 gen_op_st_T0_A0(ot + s->mem_index);
1085 gen_op_movl_T0_Dshift[ot]();
1086 #ifdef TARGET_X86_64
1087 if (s->aflag == 2) {
1088 gen_op_addq_EDI_T0();
1089 } else
1090 #endif
1091 if (s->aflag) {
1092 gen_op_addl_EDI_T0();
1093 } else {
1094 gen_op_addw_EDI_T0();
1095 }
1096 }
1097
1098 static inline void gen_outs(DisasContext *s, int ot)
1099 {
1100 gen_string_movl_A0_ESI(s);
1101 gen_op_ld_T0_A0(ot + s->mem_index);
1102 gen_op_out_DX_T0[ot]();
1103 gen_op_movl_T0_Dshift[ot]();
1104 #ifdef TARGET_X86_64
1105 if (s->aflag == 2) {
1106 gen_op_addq_ESI_T0();
1107 } else
1108 #endif
1109 if (s->aflag) {
1110 gen_op_addl_ESI_T0();
1111 } else {
1112 gen_op_addw_ESI_T0();
1113 }
1114 }
1115
1116 /* same method as Valgrind : we generate jumps to current or next
1117 instruction */
1118 #define GEN_REPZ(op) \
1119 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1120 target_ulong cur_eip, target_ulong next_eip) \
1121 { \
1122 int l2;\
1123 gen_update_cc_op(s); \
1124 l2 = gen_jz_ecx_string(s, next_eip); \
1125 gen_ ## op(s, ot); \
1126 gen_op_dec_ECX[s->aflag](); \
1127 /* a loop would cause two single step exceptions if ECX = 1 \
1128 before rep string_insn */ \
1129 if (!s->jmp_opt) \
1130 gen_op_jz_ecx[s->aflag](l2); \
1131 gen_jmp(s, cur_eip); \
1132 }
1133
1134 #define GEN_REPZ2(op) \
1135 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1136 target_ulong cur_eip, \
1137 target_ulong next_eip, \
1138 int nz) \
1139 { \
1140 int l2;\
1141 gen_update_cc_op(s); \
1142 l2 = gen_jz_ecx_string(s, next_eip); \
1143 gen_ ## op(s, ot); \
1144 gen_op_dec_ECX[s->aflag](); \
1145 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1146 gen_op_string_jnz_sub[nz][ot](l2);\
1147 if (!s->jmp_opt) \
1148 gen_op_jz_ecx[s->aflag](l2); \
1149 gen_jmp(s, cur_eip); \
1150 }
1151
1152 GEN_REPZ(movs)
1153 GEN_REPZ(stos)
1154 GEN_REPZ(lods)
1155 GEN_REPZ(ins)
1156 GEN_REPZ(outs)
1157 GEN_REPZ2(scas)
1158 GEN_REPZ2(cmps)
1159
1160 enum {
1161 JCC_O,
1162 JCC_B,
1163 JCC_Z,
1164 JCC_BE,
1165 JCC_S,
1166 JCC_P,
1167 JCC_L,
1168 JCC_LE,
1169 };
1170
1171 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1172 [OT_BYTE] = {
1173 NULL,
1174 gen_op_jb_subb,
1175 gen_op_jz_subb,
1176 gen_op_jbe_subb,
1177 gen_op_js_subb,
1178 NULL,
1179 gen_op_jl_subb,
1180 gen_op_jle_subb,
1181 },
1182 [OT_WORD] = {
1183 NULL,
1184 gen_op_jb_subw,
1185 gen_op_jz_subw,
1186 gen_op_jbe_subw,
1187 gen_op_js_subw,
1188 NULL,
1189 gen_op_jl_subw,
1190 gen_op_jle_subw,
1191 },
1192 [OT_LONG] = {
1193 NULL,
1194 gen_op_jb_subl,
1195 gen_op_jz_subl,
1196 gen_op_jbe_subl,
1197 gen_op_js_subl,
1198 NULL,
1199 gen_op_jl_subl,
1200 gen_op_jle_subl,
1201 },
1202 #ifdef TARGET_X86_64
1203 [OT_QUAD] = {
1204 NULL,
1205 BUGGY_64(gen_op_jb_subq),
1206 gen_op_jz_subq,
1207 BUGGY_64(gen_op_jbe_subq),
1208 gen_op_js_subq,
1209 NULL,
1210 BUGGY_64(gen_op_jl_subq),
1211 BUGGY_64(gen_op_jle_subq),
1212 },
1213 #endif
1214 };
1215 static GenOpFunc1 *gen_op_loop[3][4] = {
1216 [0] = {
1217 gen_op_loopnzw,
1218 gen_op_loopzw,
1219 gen_op_jnz_ecxw,
1220 },
1221 [1] = {
1222 gen_op_loopnzl,
1223 gen_op_loopzl,
1224 gen_op_jnz_ecxl,
1225 },
1226 #ifdef TARGET_X86_64
1227 [2] = {
1228 gen_op_loopnzq,
1229 gen_op_loopzq,
1230 gen_op_jnz_ecxq,
1231 },
1232 #endif
1233 };
1234
1235 static GenOpFunc *gen_setcc_slow[8] = {
1236 gen_op_seto_T0_cc,
1237 gen_op_setb_T0_cc,
1238 gen_op_setz_T0_cc,
1239 gen_op_setbe_T0_cc,
1240 gen_op_sets_T0_cc,
1241 gen_op_setp_T0_cc,
1242 gen_op_setl_T0_cc,
1243 gen_op_setle_T0_cc,
1244 };
1245
1246 static GenOpFunc *gen_setcc_sub[4][8] = {
1247 [OT_BYTE] = {
1248 NULL,
1249 gen_op_setb_T0_subb,
1250 gen_op_setz_T0_subb,
1251 gen_op_setbe_T0_subb,
1252 gen_op_sets_T0_subb,
1253 NULL,
1254 gen_op_setl_T0_subb,
1255 gen_op_setle_T0_subb,
1256 },
1257 [OT_WORD] = {
1258 NULL,
1259 gen_op_setb_T0_subw,
1260 gen_op_setz_T0_subw,
1261 gen_op_setbe_T0_subw,
1262 gen_op_sets_T0_subw,
1263 NULL,
1264 gen_op_setl_T0_subw,
1265 gen_op_setle_T0_subw,
1266 },
1267 [OT_LONG] = {
1268 NULL,
1269 gen_op_setb_T0_subl,
1270 gen_op_setz_T0_subl,
1271 gen_op_setbe_T0_subl,
1272 gen_op_sets_T0_subl,
1273 NULL,
1274 gen_op_setl_T0_subl,
1275 gen_op_setle_T0_subl,
1276 },
1277 #ifdef TARGET_X86_64
1278 [OT_QUAD] = {
1279 NULL,
1280 gen_op_setb_T0_subq,
1281 gen_op_setz_T0_subq,
1282 gen_op_setbe_T0_subq,
1283 gen_op_sets_T0_subq,
1284 NULL,
1285 gen_op_setl_T0_subq,
1286 gen_op_setle_T0_subq,
1287 },
1288 #endif
1289 };
1290
1291 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1292 gen_op_fadd_ST0_FT0,
1293 gen_op_fmul_ST0_FT0,
1294 gen_op_fcom_ST0_FT0,
1295 gen_op_fcom_ST0_FT0,
1296 gen_op_fsub_ST0_FT0,
1297 gen_op_fsubr_ST0_FT0,
1298 gen_op_fdiv_ST0_FT0,
1299 gen_op_fdivr_ST0_FT0,
1300 };
1301
1302 /* NOTE the exception in "r" op ordering */
1303 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1304 gen_op_fadd_STN_ST0,
1305 gen_op_fmul_STN_ST0,
1306 NULL,
1307 NULL,
1308 gen_op_fsubr_STN_ST0,
1309 gen_op_fsub_STN_ST0,
1310 gen_op_fdivr_STN_ST0,
1311 gen_op_fdiv_STN_ST0,
1312 };
1313
1314 /* if d == OR_TMP0, it means memory operand (address in A0) */
1315 static void gen_op(DisasContext *s1, int op, int ot, int d)
1316 {
1317 GenOpFunc *gen_update_cc;
1318
1319 if (d != OR_TMP0) {
1320 gen_op_mov_TN_reg(ot, 0, d);
1321 } else {
1322 gen_op_ld_T0_A0(ot + s1->mem_index);
1323 }
1324 switch(op) {
1325 case OP_ADCL:
1326 case OP_SBBL:
1327 if (s1->cc_op != CC_OP_DYNAMIC)
1328 gen_op_set_cc_op(s1->cc_op);
1329 if (d != OR_TMP0) {
1330 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1331 gen_op_mov_reg_T0(ot, d);
1332 } else {
1333 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1334 }
1335 s1->cc_op = CC_OP_DYNAMIC;
1336 goto the_end;
1337 case OP_ADDL:
1338 gen_op_addl_T0_T1();
1339 s1->cc_op = CC_OP_ADDB + ot;
1340 gen_update_cc = gen_op_update2_cc;
1341 break;
1342 case OP_SUBL:
1343 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1344 s1->cc_op = CC_OP_SUBB + ot;
1345 gen_update_cc = gen_op_update2_cc;
1346 break;
1347 default:
1348 case OP_ANDL:
1349 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1350 s1->cc_op = CC_OP_LOGICB + ot;
1351 gen_update_cc = gen_op_update1_cc;
1352 break;
1353 case OP_ORL:
1354 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1355 s1->cc_op = CC_OP_LOGICB + ot;
1356 gen_update_cc = gen_op_update1_cc;
1357 break;
1358 case OP_XORL:
1359 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1360 s1->cc_op = CC_OP_LOGICB + ot;
1361 gen_update_cc = gen_op_update1_cc;
1362 break;
1363 case OP_CMPL:
1364 gen_op_cmpl_T0_T1_cc();
1365 s1->cc_op = CC_OP_SUBB + ot;
1366 gen_update_cc = NULL;
1367 break;
1368 }
1369 if (op != OP_CMPL) {
1370 if (d != OR_TMP0)
1371 gen_op_mov_reg_T0(ot, d);
1372 else
1373 gen_op_st_T0_A0(ot + s1->mem_index);
1374 }
1375 /* the flags update must happen after the memory write (precise
1376 exception support) */
1377 if (gen_update_cc)
1378 gen_update_cc();
1379 the_end: ;
1380 }
1381
1382 /* if d == OR_TMP0, it means memory operand (address in A0) */
1383 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1384 {
1385 if (d != OR_TMP0)
1386 gen_op_mov_TN_reg(ot, 0, d);
1387 else
1388 gen_op_ld_T0_A0(ot + s1->mem_index);
1389 if (s1->cc_op != CC_OP_DYNAMIC)
1390 gen_op_set_cc_op(s1->cc_op);
1391 if (c > 0) {
1392 gen_op_incl_T0();
1393 s1->cc_op = CC_OP_INCB + ot;
1394 } else {
1395 gen_op_decl_T0();
1396 s1->cc_op = CC_OP_DECB + ot;
1397 }
1398 if (d != OR_TMP0)
1399 gen_op_mov_reg_T0(ot, d);
1400 else
1401 gen_op_st_T0_A0(ot + s1->mem_index);
1402 gen_op_update_inc_cc();
1403 }
1404
1405 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1406 {
1407 if (d != OR_TMP0)
1408 gen_op_mov_TN_reg(ot, 0, d);
1409 else
1410 gen_op_ld_T0_A0(ot + s1->mem_index);
1411 if (s != OR_TMP1)
1412 gen_op_mov_TN_reg(ot, 1, s);
1413 /* for zero counts, flags are not updated, so must do it dynamically */
1414 if (s1->cc_op != CC_OP_DYNAMIC)
1415 gen_op_set_cc_op(s1->cc_op);
1416
1417 if (d != OR_TMP0)
1418 gen_op_shift_T0_T1_cc[ot][op]();
1419 else
1420 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1421 if (d != OR_TMP0)
1422 gen_op_mov_reg_T0(ot, d);
1423 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1424 }
1425
1426 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1427 {
1428 /* currently not optimized */
1429 gen_op_movl_T1_im(c);
1430 gen_shift(s1, op, ot, d, OR_TMP1);
1431 }
1432
1433 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1434 {
1435 target_long disp;
1436 int havesib;
1437 int base;
1438 int index;
1439 int scale;
1440 int opreg;
1441 int mod, rm, code, override, must_add_seg;
1442
1443 override = s->override;
1444 must_add_seg = s->addseg;
1445 if (override >= 0)
1446 must_add_seg = 1;
1447 mod = (modrm >> 6) & 3;
1448 rm = modrm & 7;
1449
1450 if (s->aflag) {
1451
1452 havesib = 0;
1453 base = rm;
1454 index = 0;
1455 scale = 0;
1456
1457 if (base == 4) {
1458 havesib = 1;
1459 code = ldub_code(s->pc++);
1460 scale = (code >> 6) & 3;
1461 index = ((code >> 3) & 7) | REX_X(s);
1462 base = (code & 7);
1463 }
1464 base |= REX_B(s);
1465
1466 switch (mod) {
1467 case 0:
1468 if ((base & 7) == 5) {
1469 base = -1;
1470 disp = (int32_t)ldl_code(s->pc);
1471 s->pc += 4;
1472 if (CODE64(s) && !havesib) {
1473 disp += s->pc + s->rip_offset;
1474 }
1475 } else {
1476 disp = 0;
1477 }
1478 break;
1479 case 1:
1480 disp = (int8_t)ldub_code(s->pc++);
1481 break;
1482 default:
1483 case 2:
1484 disp = ldl_code(s->pc);
1485 s->pc += 4;
1486 break;
1487 }
1488
1489 if (base >= 0) {
1490 /* for correct popl handling with esp */
1491 if (base == 4 && s->popl_esp_hack)
1492 disp += s->popl_esp_hack;
1493 #ifdef TARGET_X86_64
1494 if (s->aflag == 2) {
1495 gen_op_movq_A0_reg(base);
1496 if (disp != 0) {
1497 gen_op_addq_A0_im(disp);
1498 }
1499 } else
1500 #endif
1501 {
1502 gen_op_movl_A0_reg(base);
1503 if (disp != 0)
1504 gen_op_addl_A0_im(disp);
1505 }
1506 } else {
1507 #ifdef TARGET_X86_64
1508 if (s->aflag == 2) {
1509 gen_op_movq_A0_im(disp);
1510 } else
1511 #endif
1512 {
1513 gen_op_movl_A0_im(disp);
1514 }
1515 }
1516 /* XXX: index == 4 is always invalid */
1517 if (havesib && (index != 4 || scale != 0)) {
1518 #ifdef TARGET_X86_64
1519 if (s->aflag == 2) {
1520 gen_op_addq_A0_reg_sN(scale, index);
1521 } else
1522 #endif
1523 {
1524 gen_op_addl_A0_reg_sN(scale, index);
1525 }
1526 }
1527 if (must_add_seg) {
1528 if (override < 0) {
1529 if (base == R_EBP || base == R_ESP)
1530 override = R_SS;
1531 else
1532 override = R_DS;
1533 }
1534 #ifdef TARGET_X86_64
1535 if (s->aflag == 2) {
1536 gen_op_addq_A0_seg(override);
1537 } else
1538 #endif
1539 {
1540 gen_op_addl_A0_seg(override);
1541 }
1542 }
1543 } else {
1544 switch (mod) {
1545 case 0:
1546 if (rm == 6) {
1547 disp = lduw_code(s->pc);
1548 s->pc += 2;
1549 gen_op_movl_A0_im(disp);
1550 rm = 0; /* avoid SS override */
1551 goto no_rm;
1552 } else {
1553 disp = 0;
1554 }
1555 break;
1556 case 1:
1557 disp = (int8_t)ldub_code(s->pc++);
1558 break;
1559 default:
1560 case 2:
1561 disp = lduw_code(s->pc);
1562 s->pc += 2;
1563 break;
1564 }
1565 switch(rm) {
1566 case 0:
1567 gen_op_movl_A0_reg(R_EBX);
1568 gen_op_addl_A0_reg_sN(0, R_ESI);
1569 break;
1570 case 1:
1571 gen_op_movl_A0_reg(R_EBX);
1572 gen_op_addl_A0_reg_sN(0, R_EDI);
1573 break;
1574 case 2:
1575 gen_op_movl_A0_reg(R_EBP);
1576 gen_op_addl_A0_reg_sN(0, R_ESI);
1577 break;
1578 case 3:
1579 gen_op_movl_A0_reg(R_EBP);
1580 gen_op_addl_A0_reg_sN(0, R_EDI);
1581 break;
1582 case 4:
1583 gen_op_movl_A0_reg(R_ESI);
1584 break;
1585 case 5:
1586 gen_op_movl_A0_reg(R_EDI);
1587 break;
1588 case 6:
1589 gen_op_movl_A0_reg(R_EBP);
1590 break;
1591 default:
1592 case 7:
1593 gen_op_movl_A0_reg(R_EBX);
1594 break;
1595 }
1596 if (disp != 0)
1597 gen_op_addl_A0_im(disp);
1598 gen_op_andl_A0_ffff();
1599 no_rm:
1600 if (must_add_seg) {
1601 if (override < 0) {
1602 if (rm == 2 || rm == 3 || rm == 6)
1603 override = R_SS;
1604 else
1605 override = R_DS;
1606 }
1607 gen_op_addl_A0_seg(override);
1608 }
1609 }
1610
1611 opreg = OR_A0;
1612 disp = 0;
1613 *reg_ptr = opreg;
1614 *offset_ptr = disp;
1615 }
1616
1617 static void gen_nop_modrm(DisasContext *s, int modrm)
1618 {
1619 int mod, rm, base, code;
1620
1621 mod = (modrm >> 6) & 3;
1622 if (mod == 3)
1623 return;
1624 rm = modrm & 7;
1625
1626 if (s->aflag) {
1627
1628 base = rm;
1629
1630 if (base == 4) {
1631 code = ldub_code(s->pc++);
1632 base = (code & 7);
1633 }
1634
1635 switch (mod) {
1636 case 0:
1637 if (base == 5) {
1638 s->pc += 4;
1639 }
1640 break;
1641 case 1:
1642 s->pc++;
1643 break;
1644 default:
1645 case 2:
1646 s->pc += 4;
1647 break;
1648 }
1649 } else {
1650 switch (mod) {
1651 case 0:
1652 if (rm == 6) {
1653 s->pc += 2;
1654 }
1655 break;
1656 case 1:
1657 s->pc++;
1658 break;
1659 default:
1660 case 2:
1661 s->pc += 2;
1662 break;
1663 }
1664 }
1665 }
1666
1667 /* used for LEA and MOV AX, mem */
1668 static void gen_add_A0_ds_seg(DisasContext *s)
1669 {
1670 int override, must_add_seg;
1671 must_add_seg = s->addseg;
1672 override = R_DS;
1673 if (s->override >= 0) {
1674 override = s->override;
1675 must_add_seg = 1;
1676 } else {
1677 override = R_DS;
1678 }
1679 if (must_add_seg) {
1680 #ifdef TARGET_X86_64
1681 if (CODE64(s)) {
1682 gen_op_addq_A0_seg(override);
1683 } else
1684 #endif
1685 {
1686 gen_op_addl_A0_seg(override);
1687 }
1688 }
1689 }
1690
1691 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1692 OR_TMP0 */
1693 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1694 {
1695 int mod, rm, opreg, disp;
1696
1697 mod = (modrm >> 6) & 3;
1698 rm = (modrm & 7) | REX_B(s);
1699 if (mod == 3) {
1700 if (is_store) {
1701 if (reg != OR_TMP0)
1702 gen_op_mov_TN_reg(ot, 0, reg);
1703 gen_op_mov_reg_T0(ot, rm);
1704 } else {
1705 gen_op_mov_TN_reg(ot, 0, rm);
1706 if (reg != OR_TMP0)
1707 gen_op_mov_reg_T0(ot, reg);
1708 }
1709 } else {
1710 gen_lea_modrm(s, modrm, &opreg, &disp);
1711 if (is_store) {
1712 if (reg != OR_TMP0)
1713 gen_op_mov_TN_reg(ot, 0, reg);
1714 gen_op_st_T0_A0(ot + s->mem_index);
1715 } else {
1716 gen_op_ld_T0_A0(ot + s->mem_index);
1717 if (reg != OR_TMP0)
1718 gen_op_mov_reg_T0(ot, reg);
1719 }
1720 }
1721 }
1722
1723 static inline uint32_t insn_get(DisasContext *s, int ot)
1724 {
1725 uint32_t ret;
1726
1727 switch(ot) {
1728 case OT_BYTE:
1729 ret = ldub_code(s->pc);
1730 s->pc++;
1731 break;
1732 case OT_WORD:
1733 ret = lduw_code(s->pc);
1734 s->pc += 2;
1735 break;
1736 default:
1737 case OT_LONG:
1738 ret = ldl_code(s->pc);
1739 s->pc += 4;
1740 break;
1741 }
1742 return ret;
1743 }
1744
1745 static inline int insn_const_size(unsigned int ot)
1746 {
1747 if (ot <= OT_LONG)
1748 return 1 << ot;
1749 else
1750 return 4;
1751 }
1752
1753 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1754 {
1755 TranslationBlock *tb;
1756 target_ulong pc;
1757
1758 pc = s->cs_base + eip;
1759 tb = s->tb;
1760 /* NOTE: we handle the case where the TB spans two pages here */
1761 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1762 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1763 /* jump to same page: we can use a direct jump */
1764 tcg_gen_goto_tb(tb_num);
1765 gen_jmp_im(eip);
1766 tcg_gen_exit_tb((long)tb + tb_num);
1767 } else {
1768 /* jump to another page: currently not optimized */
1769 gen_jmp_im(eip);
1770 gen_eob(s);
1771 }
1772 }
1773
1774 static inline void gen_jcc(DisasContext *s, int b,
1775 target_ulong val, target_ulong next_eip)
1776 {
1777 TranslationBlock *tb;
1778 int inv, jcc_op;
1779 GenOpFunc1 *func;
1780 target_ulong tmp;
1781 int l1, l2;
1782
1783 inv = b & 1;
1784 jcc_op = (b >> 1) & 7;
1785
1786 if (s->jmp_opt) {
1787 switch(s->cc_op) {
1788 /* we optimize the cmp/jcc case */
1789 case CC_OP_SUBB:
1790 case CC_OP_SUBW:
1791 case CC_OP_SUBL:
1792 case CC_OP_SUBQ:
1793 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1794 break;
1795
1796 /* some jumps are easy to compute */
1797 case CC_OP_ADDB:
1798 case CC_OP_ADDW:
1799 case CC_OP_ADDL:
1800 case CC_OP_ADDQ:
1801
1802 case CC_OP_ADCB:
1803 case CC_OP_ADCW:
1804 case CC_OP_ADCL:
1805 case CC_OP_ADCQ:
1806
1807 case CC_OP_SBBB:
1808 case CC_OP_SBBW:
1809 case CC_OP_SBBL:
1810 case CC_OP_SBBQ:
1811
1812 case CC_OP_LOGICB:
1813 case CC_OP_LOGICW:
1814 case CC_OP_LOGICL:
1815 case CC_OP_LOGICQ:
1816
1817 case CC_OP_INCB:
1818 case CC_OP_INCW:
1819 case CC_OP_INCL:
1820 case CC_OP_INCQ:
1821
1822 case CC_OP_DECB:
1823 case CC_OP_DECW:
1824 case CC_OP_DECL:
1825 case CC_OP_DECQ:
1826
1827 case CC_OP_SHLB:
1828 case CC_OP_SHLW:
1829 case CC_OP_SHLL:
1830 case CC_OP_SHLQ:
1831
1832 case CC_OP_SARB:
1833 case CC_OP_SARW:
1834 case CC_OP_SARL:
1835 case CC_OP_SARQ:
1836 switch(jcc_op) {
1837 case JCC_Z:
1838 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1839 break;
1840 case JCC_S:
1841 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1842 break;
1843 default:
1844 func = NULL;
1845 break;
1846 }
1847 break;
1848 default:
1849 func = NULL;
1850 break;
1851 }
1852
1853 if (s->cc_op != CC_OP_DYNAMIC) {
1854 gen_op_set_cc_op(s->cc_op);
1855 s->cc_op = CC_OP_DYNAMIC;
1856 }
1857
1858 if (!func) {
1859 gen_setcc_slow[jcc_op]();
1860 func = gen_op_jnz_T0_label;
1861 }
1862
1863 if (inv) {
1864 tmp = val;
1865 val = next_eip;
1866 next_eip = tmp;
1867 }
1868 tb = s->tb;
1869
1870 l1 = gen_new_label();
1871 func(l1);
1872
1873 gen_goto_tb(s, 0, next_eip);
1874
1875 gen_set_label(l1);
1876 gen_goto_tb(s, 1, val);
1877
1878 s->is_jmp = 3;
1879 } else {
1880
1881 if (s->cc_op != CC_OP_DYNAMIC) {
1882 gen_op_set_cc_op(s->cc_op);
1883 s->cc_op = CC_OP_DYNAMIC;
1884 }
1885 gen_setcc_slow[jcc_op]();
1886 if (inv) {
1887 tmp = val;
1888 val = next_eip;
1889 next_eip = tmp;
1890 }
1891 l1 = gen_new_label();
1892 l2 = gen_new_label();
1893 gen_op_jnz_T0_label(l1);
1894 gen_jmp_im(next_eip);
1895 gen_op_jmp_label(l2);
1896 gen_set_label(l1);
1897 gen_jmp_im(val);
1898 gen_set_label(l2);
1899 gen_eob(s);
1900 }
1901 }
1902
1903 static void gen_setcc(DisasContext *s, int b)
1904 {
1905 int inv, jcc_op;
1906 GenOpFunc *func;
1907
1908 inv = b & 1;
1909 jcc_op = (b >> 1) & 7;
1910 switch(s->cc_op) {
1911 /* we optimize the cmp/jcc case */
1912 case CC_OP_SUBB:
1913 case CC_OP_SUBW:
1914 case CC_OP_SUBL:
1915 case CC_OP_SUBQ:
1916 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1917 if (!func)
1918 goto slow_jcc;
1919 break;
1920
1921 /* some jumps are easy to compute */
1922 case CC_OP_ADDB:
1923 case CC_OP_ADDW:
1924 case CC_OP_ADDL:
1925 case CC_OP_ADDQ:
1926
1927 case CC_OP_LOGICB:
1928 case CC_OP_LOGICW:
1929 case CC_OP_LOGICL:
1930 case CC_OP_LOGICQ:
1931
1932 case CC_OP_INCB:
1933 case CC_OP_INCW:
1934 case CC_OP_INCL:
1935 case CC_OP_INCQ:
1936
1937 case CC_OP_DECB:
1938 case CC_OP_DECW:
1939 case CC_OP_DECL:
1940 case CC_OP_DECQ:
1941
1942 case CC_OP_SHLB:
1943 case CC_OP_SHLW:
1944 case CC_OP_SHLL:
1945 case CC_OP_SHLQ:
1946 switch(jcc_op) {
1947 case JCC_Z:
1948 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1949 break;
1950 case JCC_S:
1951 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1952 break;
1953 default:
1954 goto slow_jcc;
1955 }
1956 break;
1957 default:
1958 slow_jcc:
1959 if (s->cc_op != CC_OP_DYNAMIC)
1960 gen_op_set_cc_op(s->cc_op);
1961 func = gen_setcc_slow[jcc_op];
1962 break;
1963 }
1964 func();
1965 if (inv) {
1966 gen_op_xor_T0_1();
1967 }
1968 }
1969
1970 /* move T0 to seg_reg and compute if the CPU state may change. Never
1971 call this function with seg_reg == R_CS */
1972 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1973 {
1974 if (s->pe && !s->vm86) {
1975 /* XXX: optimize by finding processor state dynamically */
1976 if (s->cc_op != CC_OP_DYNAMIC)
1977 gen_op_set_cc_op(s->cc_op);
1978 gen_jmp_im(cur_eip);
1979 gen_op_movl_seg_T0(seg_reg);
1980 /* abort translation because the addseg value may change or
1981 because ss32 may change. For R_SS, translation must always
1982 stop as a special handling must be done to disable hardware
1983 interrupts for the next instruction */
1984 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1985 s->is_jmp = 3;
1986 } else {
1987 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1988 if (seg_reg == R_SS)
1989 s->is_jmp = 3;
1990 }
1991 }
1992
1993 #define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1994
1995 static inline int
1996 gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
1997 {
1998 #if !defined(CONFIG_USER_ONLY)
1999 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
2000 if (s->cc_op != CC_OP_DYNAMIC)
2001 gen_op_set_cc_op(s->cc_op);
2002 SVM_movq_T1_im(s->pc - s->cs_base);
2003 gen_jmp_im(pc_start - s->cs_base);
2004 gen_op_geneflags();
2005 gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2006 s->cc_op = CC_OP_DYNAMIC;
2007 /* FIXME: maybe we could move the io intercept vector to the TB as well
2008 so we know if this is an EOB or not ... let's assume it's not
2009 for now. */
2010 }
2011 #endif
2012 return 0;
2013 }
2014
2015 static inline int svm_is_rep(int prefixes)
2016 {
2017 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2018 }
2019
2020 static inline int
2021 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2022 uint64_t type, uint64_t param)
2023 {
2024 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2025 /* no SVM activated */
2026 return 0;
2027 switch(type) {
2028 /* CRx and DRx reads/writes */
2029 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2030 if (s->cc_op != CC_OP_DYNAMIC) {
2031 gen_op_set_cc_op(s->cc_op);
2032 s->cc_op = CC_OP_DYNAMIC;
2033 }
2034 gen_jmp_im(pc_start - s->cs_base);
2035 SVM_movq_T1_im(param);
2036 gen_op_geneflags();
2037 gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2038 /* this is a special case as we do not know if the interception occurs
2039 so we assume there was none */
2040 return 0;
2041 case SVM_EXIT_MSR:
2042 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2043 if (s->cc_op != CC_OP_DYNAMIC) {
2044 gen_op_set_cc_op(s->cc_op);
2045 s->cc_op = CC_OP_DYNAMIC;
2046 }
2047 gen_jmp_im(pc_start - s->cs_base);
2048 SVM_movq_T1_im(param);
2049 gen_op_geneflags();
2050 gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2051 /* this is a special case as we do not know if the interception occurs
2052 so we assume there was none */
2053 return 0;
2054 }
2055 break;
2056 default:
2057 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2058 if (s->cc_op != CC_OP_DYNAMIC) {
2059 gen_op_set_cc_op(s->cc_op);
2060 s->cc_op = CC_OP_EFLAGS;
2061 }
2062 gen_jmp_im(pc_start - s->cs_base);
2063 SVM_movq_T1_im(param);
2064 gen_op_geneflags();
2065 gen_op_svm_vmexit(type >> 32, type);
2066 /* we can optimize this one so TBs don't get longer
2067 than up to vmexit */
2068 gen_eob(s);
2069 return 1;
2070 }
2071 }
2072 return 0;
2073 }
2074
2075 static inline int
2076 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2077 {
2078 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2079 }
2080
2081 static inline void gen_stack_update(DisasContext *s, int addend)
2082 {
2083 #ifdef TARGET_X86_64
2084 if (CODE64(s)) {
2085 gen_op_addq_ESP_im(addend);
2086 } else
2087 #endif
2088 if (s->ss32) {
2089 gen_op_addl_ESP_im(addend);
2090 } else {
2091 gen_op_addw_ESP_im(addend);
2092 }
2093 }
2094
2095 /* generate a push. It depends on ss32, addseg and dflag */
2096 static void gen_push_T0(DisasContext *s)
2097 {
2098 #ifdef TARGET_X86_64
2099 if (CODE64(s)) {
2100 gen_op_movq_A0_reg(R_ESP);
2101 if (s->dflag) {
2102 gen_op_addq_A0_im(-8);
2103 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2104 } else {
2105 gen_op_addq_A0_im(-2);
2106 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2107 }
2108 gen_op_mov_reg_A0(2, R_ESP);
2109 } else
2110 #endif
2111 {
2112 gen_op_movl_A0_reg(R_ESP);
2113 if (!s->dflag)
2114 gen_op_addl_A0_im(-2);
2115 else
2116 gen_op_addl_A0_im(-4);
2117 if (s->ss32) {
2118 if (s->addseg) {
2119 gen_op_movl_T1_A0();
2120 gen_op_addl_A0_seg(R_SS);
2121 }
2122 } else {
2123 gen_op_andl_A0_ffff();
2124 gen_op_movl_T1_A0();
2125 gen_op_addl_A0_seg(R_SS);
2126 }
2127 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2128 if (s->ss32 && !s->addseg)
2129 gen_op_mov_reg_A0(1, R_ESP);
2130 else
2131 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2132 }
2133 }
2134
2135 /* generate a push. It depends on ss32, addseg and dflag */
2136 /* slower version for T1, only used for call Ev */
2137 static void gen_push_T1(DisasContext *s)
2138 {
2139 #ifdef TARGET_X86_64
2140 if (CODE64(s)) {
2141 gen_op_movq_A0_reg(R_ESP);
2142 if (s->dflag) {
2143 gen_op_addq_A0_im(-8);
2144 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2145 } else {
2146 gen_op_addq_A0_im(-2);
2147 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2148 }
2149 gen_op_mov_reg_A0(2, R_ESP);
2150 } else
2151 #endif
2152 {
2153 gen_op_movl_A0_reg(R_ESP);
2154 if (!s->dflag)
2155 gen_op_addl_A0_im(-2);
2156 else
2157 gen_op_addl_A0_im(-4);
2158 if (s->ss32) {
2159 if (s->addseg) {
2160 gen_op_addl_A0_seg(R_SS);
2161 }
2162 } else {
2163 gen_op_andl_A0_ffff();
2164 gen_op_addl_A0_seg(R_SS);
2165 }
2166 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2167
2168 if (s->ss32 && !s->addseg)
2169 gen_op_mov_reg_A0(1, R_ESP);
2170 else
2171 gen_stack_update(s, (-2) << s->dflag);
2172 }
2173 }
2174
2175 /* two step pop is necessary for precise exceptions */
2176 static void gen_pop_T0(DisasContext *s)
2177 {
2178 #ifdef TARGET_X86_64
2179 if (CODE64(s)) {
2180 gen_op_movq_A0_reg(R_ESP);
2181 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2182 } else
2183 #endif
2184 {
2185 gen_op_movl_A0_reg(R_ESP);
2186 if (s->ss32) {
2187 if (s->addseg)
2188 gen_op_addl_A0_seg(R_SS);
2189 } else {
2190 gen_op_andl_A0_ffff();
2191 gen_op_addl_A0_seg(R_SS);
2192 }
2193 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2194 }
2195 }
2196
2197 static void gen_pop_update(DisasContext *s)
2198 {
2199 #ifdef TARGET_X86_64
2200 if (CODE64(s) && s->dflag) {
2201 gen_stack_update(s, 8);
2202 } else
2203 #endif
2204 {
2205 gen_stack_update(s, 2 << s->dflag);
2206 }
2207 }
2208
2209 static void gen_stack_A0(DisasContext *s)
2210 {
2211 gen_op_movl_A0_reg(R_ESP);
2212 if (!s->ss32)
2213 gen_op_andl_A0_ffff();
2214 gen_op_movl_T1_A0();
2215 if (s->addseg)
2216 gen_op_addl_A0_seg(R_SS);
2217 }
2218
2219 /* NOTE: wrap around in 16 bit not fully handled */
2220 static void gen_pusha(DisasContext *s)
2221 {
2222 int i;
2223 gen_op_movl_A0_reg(R_ESP);
2224 gen_op_addl_A0_im(-16 << s->dflag);
2225 if (!s->ss32)
2226 gen_op_andl_A0_ffff();
2227 gen_op_movl_T1_A0();
2228 if (s->addseg)
2229 gen_op_addl_A0_seg(R_SS);
2230 for(i = 0;i < 8; i++) {
2231 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2232 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2233 gen_op_addl_A0_im(2 << s->dflag);
2234 }
2235 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2236 }
2237
2238 /* NOTE: wrap around in 16 bit not fully handled */
2239 static void gen_popa(DisasContext *s)
2240 {
2241 int i;
2242 gen_op_movl_A0_reg(R_ESP);
2243 if (!s->ss32)
2244 gen_op_andl_A0_ffff();
2245 gen_op_movl_T1_A0();
2246 gen_op_addl_T1_im(16 << s->dflag);
2247 if (s->addseg)
2248 gen_op_addl_A0_seg(R_SS);
2249 for(i = 0;i < 8; i++) {
2250 /* ESP is not reloaded */
2251 if (i != 3) {
2252 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2253 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2254 }
2255 gen_op_addl_A0_im(2 << s->dflag);
2256 }
2257 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2258 }
2259
2260 static void gen_enter(DisasContext *s, int esp_addend, int level)
2261 {
2262 int ot, opsize;
2263
2264 level &= 0x1f;
2265 #ifdef TARGET_X86_64
2266 if (CODE64(s)) {
2267 ot = s->dflag ? OT_QUAD : OT_WORD;
2268 opsize = 1 << ot;
2269
2270 gen_op_movl_A0_reg(R_ESP);
2271 gen_op_addq_A0_im(-opsize);
2272 gen_op_movl_T1_A0();
2273
2274 /* push bp */
2275 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2276 gen_op_st_T0_A0(ot + s->mem_index);
2277 if (level) {
2278 gen_op_enter64_level(level, (ot == OT_QUAD));
2279 }
2280 gen_op_mov_reg_T1(ot, R_EBP);
2281 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2282 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2283 } else
2284 #endif
2285 {
2286 ot = s->dflag + OT_WORD;
2287 opsize = 2 << s->dflag;
2288
2289 gen_op_movl_A0_reg(R_ESP);
2290 gen_op_addl_A0_im(-opsize);
2291 if (!s->ss32)
2292 gen_op_andl_A0_ffff();
2293 gen_op_movl_T1_A0();
2294 if (s->addseg)
2295 gen_op_addl_A0_seg(R_SS);
2296 /* push bp */
2297 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2298 gen_op_st_T0_A0(ot + s->mem_index);
2299 if (level) {
2300 gen_op_enter_level(level, s->dflag);
2301 }
2302 gen_op_mov_reg_T1(ot, R_EBP);
2303 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2304 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2305 }
2306 }
2307
2308 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2309 {
2310 if (s->cc_op != CC_OP_DYNAMIC)
2311 gen_op_set_cc_op(s->cc_op);
2312 gen_jmp_im(cur_eip);
2313 gen_op_raise_exception(trapno);
2314 s->is_jmp = 3;
2315 }
2316
2317 /* an interrupt is different from an exception because of the
2318 privilege checks */
2319 static void gen_interrupt(DisasContext *s, int intno,
2320 target_ulong cur_eip, target_ulong next_eip)
2321 {
2322 if (s->cc_op != CC_OP_DYNAMIC)
2323 gen_op_set_cc_op(s->cc_op);
2324 gen_jmp_im(cur_eip);
2325 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2326 s->is_jmp = 3;
2327 }
2328
2329 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2330 {
2331 if (s->cc_op != CC_OP_DYNAMIC)
2332 gen_op_set_cc_op(s->cc_op);
2333 gen_jmp_im(cur_eip);
2334 gen_op_debug();
2335 s->is_jmp = 3;
2336 }
2337
2338 /* generate a generic end of block. Trace exception is also generated
2339 if needed */
2340 static void gen_eob(DisasContext *s)
2341 {
2342 if (s->cc_op != CC_OP_DYNAMIC)
2343 gen_op_set_cc_op(s->cc_op);
2344 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2345 gen_op_reset_inhibit_irq();
2346 }
2347 if (s->singlestep_enabled) {
2348 gen_op_debug();
2349 } else if (s->tf) {
2350 gen_op_single_step();
2351 } else {
2352 tcg_gen_exit_tb(0);
2353 }
2354 s->is_jmp = 3;
2355 }
2356
2357 /* generate a jump to eip. No segment change must happen before as a
2358 direct call to the next block may occur */
2359 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2360 {
2361 if (s->jmp_opt) {
2362 if (s->cc_op != CC_OP_DYNAMIC) {
2363 gen_op_set_cc_op(s->cc_op);
2364 s->cc_op = CC_OP_DYNAMIC;
2365 }
2366 gen_goto_tb(s, tb_num, eip);
2367 s->is_jmp = 3;
2368 } else {
2369 gen_jmp_im(eip);
2370 gen_eob(s);
2371 }
2372 }
2373
2374 static void gen_jmp(DisasContext *s, target_ulong eip)
2375 {
2376 gen_jmp_tb(s, eip, 0);
2377 }
2378
2379 static inline void gen_ldq_env_A0(int idx, int offset)
2380 {
2381 int mem_index = (idx >> 2) - 1;
2382 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2383 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset);
2384 }
2385
2386 static inline void gen_stq_env_A0(int idx, int offset)
2387 {
2388 int mem_index = (idx >> 2) - 1;
2389 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset);
2390 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2391 }
2392
2393 static inline void gen_ldo_env_A0(int idx, int offset)
2394 {
2395 int mem_index = (idx >> 2) - 1;
2396 tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2397 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2398 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2399 tcg_gen_qemu_ld64(cpu_tmp1, cpu_tmp0, mem_index);
2400 tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2401 }
2402
2403 static inline void gen_sto_env_A0(int idx, int offset)
2404 {
2405 int mem_index = (idx >> 2) - 1;
2406 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2407 tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2408 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2409 tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2410 tcg_gen_qemu_st64(cpu_tmp1, cpu_tmp0, mem_index);
2411 }
2412
2413 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2414 #define SSE_DUMMY ((GenOpFunc2 *)2)
2415
2416 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2417 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2418 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2419
2420 static GenOpFunc2 *sse_op_table1[256][4] = {
2421 /* 3DNow! extensions */
2422 [0x0e] = { SSE_DUMMY }, /* femms */
2423 [0x0f] = { SSE_DUMMY }, /* pf... */
2424 /* pure SSE operations */
2425 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2426 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2427 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2428 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2429 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2430 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2431 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2432 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2433
2434 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2435 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2436 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2437 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2438 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2439 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2440 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2441 [0x2f] = { gen_op_comiss, gen_op_comisd },
2442 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2443 [0x51] = SSE_FOP(sqrt),
2444 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2445 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2446 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2447 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2448 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2449 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2450 [0x58] = SSE_FOP(add),
2451 [0x59] = SSE_FOP(mul),
2452 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2453 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2454 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2455 [0x5c] = SSE_FOP(sub),
2456 [0x5d] = SSE_FOP(min),
2457 [0x5e] = SSE_FOP(div),
2458 [0x5f] = SSE_FOP(max),
2459
2460 [0xc2] = SSE_FOP(cmpeq),
2461 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2462
2463 /* MMX ops and their SSE extensions */
2464 [0x60] = MMX_OP2(punpcklbw),
2465 [0x61] = MMX_OP2(punpcklwd),
2466 [0x62] = MMX_OP2(punpckldq),
2467 [0x63] = MMX_OP2(packsswb),
2468 [0x64] = MMX_OP2(pcmpgtb),
2469 [0x65] = MMX_OP2(pcmpgtw),
2470 [0x66] = MMX_OP2(pcmpgtl),
2471 [0x67] = MMX_OP2(packuswb),
2472 [0x68] = MMX_OP2(punpckhbw),
2473 [0x69] = MMX_OP2(punpckhwd),
2474 [0x6a] = MMX_OP2(punpckhdq),
2475 [0x6b] = MMX_OP2(packssdw),
2476 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2477 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2478 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2479 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2480 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2481 (GenOpFunc2 *)gen_op_pshufd_xmm,
2482 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2483 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2484 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2485 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2486 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2487 [0x74] = MMX_OP2(pcmpeqb),
2488 [0x75] = MMX_OP2(pcmpeqw),
2489 [0x76] = MMX_OP2(pcmpeql),
2490 [0x77] = { SSE_DUMMY }, /* emms */
2491 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2492 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2493 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2494 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2495 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2496 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2497 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2498 [0xd1] = MMX_OP2(psrlw),
2499 [0xd2] = MMX_OP2(psrld),
2500 [0xd3] = MMX_OP2(psrlq),
2501 [0xd4] = MMX_OP2(paddq),
2502 [0xd5] = MMX_OP2(pmullw),
2503 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2504 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2505 [0xd8] = MMX_OP2(psubusb),
2506 [0xd9] = MMX_OP2(psubusw),
2507 [0xda] = MMX_OP2(pminub),
2508 [0xdb] = MMX_OP2(pand),
2509 [0xdc] = MMX_OP2(paddusb),
2510 [0xdd] = MMX_OP2(paddusw),
2511 [0xde] = MMX_OP2(pmaxub),
2512 [0xdf] = MMX_OP2(pandn),
2513 [0xe0] = MMX_OP2(pavgb),
2514 [0xe1] = MMX_OP2(psraw),
2515 [0xe2] = MMX_OP2(psrad),
2516 [0xe3] = MMX_OP2(pavgw),
2517 [0xe4] = MMX_OP2(pmulhuw),
2518 [0xe5] = MMX_OP2(pmulhw),
2519 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2520 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2521 [0xe8] = MMX_OP2(psubsb),
2522 [0xe9] = MMX_OP2(psubsw),
2523 [0xea] = MMX_OP2(pminsw),
2524 [0xeb] = MMX_OP2(por),
2525 [0xec] = MMX_OP2(paddsb),
2526 [0xed] = MMX_OP2(paddsw),
2527 [0xee] = MMX_OP2(pmaxsw),
2528 [0xef] = MMX_OP2(pxor),
2529 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2530 [0xf1] = MMX_OP2(psllw),
2531 [0xf2] = MMX_OP2(pslld),
2532 [0xf3] = MMX_OP2(psllq),
2533 [0xf4] = MMX_OP2(pmuludq),
2534 [0xf5] = MMX_OP2(pmaddwd),
2535 [0xf6] = MMX_OP2(psadbw),
2536 [0xf7] = MMX_OP2(maskmov),
2537 [0xf8] = MMX_OP2(psubb),
2538 [0xf9] = MMX_OP2(psubw),
2539 [0xfa] = MMX_OP2(psubl),
2540 [0xfb] = MMX_OP2(psubq),
2541 [0xfc] = MMX_OP2(paddb),
2542 [0xfd] = MMX_OP2(paddw),
2543 [0xfe] = MMX_OP2(paddl),
2544 };
2545
2546 static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2547 [0 + 2] = MMX_OP2(psrlw),
2548 [0 + 4] = MMX_OP2(psraw),
2549 [0 + 6] = MMX_OP2(psllw),
2550 [8 + 2] = MMX_OP2(psrld),
2551 [8 + 4] = MMX_OP2(psrad),
2552 [8 + 6] = MMX_OP2(pslld),
2553 [16 + 2] = MMX_OP2(psrlq),
2554 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2555 [16 + 6] = MMX_OP2(psllq),
2556 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2557 };
2558
2559 static GenOpFunc1 *sse_op_table3[4 * 3] = {
2560 gen_op_cvtsi2ss,
2561 gen_op_cvtsi2sd,
2562 X86_64_ONLY(gen_op_cvtsq2ss),
2563 X86_64_ONLY(gen_op_cvtsq2sd),
2564
2565 gen_op_cvttss2si,
2566 gen_op_cvttsd2si,
2567 X86_64_ONLY(gen_op_cvttss2sq),
2568 X86_64_ONLY(gen_op_cvttsd2sq),
2569
2570 gen_op_cvtss2si,
2571 gen_op_cvtsd2si,
2572 X86_64_ONLY(gen_op_cvtss2sq),
2573 X86_64_ONLY(gen_op_cvtsd2sq),
2574 };
2575
2576 static GenOpFunc2 *sse_op_table4[8][4] = {
2577 SSE_FOP(cmpeq),
2578 SSE_FOP(cmplt),
2579 SSE_FOP(cmple),
2580 SSE_FOP(cmpunord),
2581 SSE_FOP(cmpneq),
2582 SSE_FOP(cmpnlt),
2583 SSE_FOP(cmpnle),
2584 SSE_FOP(cmpord),
2585 };
2586
2587 static GenOpFunc2 *sse_op_table5[256] = {
2588 [0x0c] = gen_op_pi2fw,
2589 [0x0d] = gen_op_pi2fd,
2590 [0x1c] = gen_op_pf2iw,
2591 [0x1d] = gen_op_pf2id,
2592 [0x8a] = gen_op_pfnacc,
2593 [0x8e] = gen_op_pfpnacc,
2594 [0x90] = gen_op_pfcmpge,
2595 [0x94] = gen_op_pfmin,
2596 [0x96] = gen_op_pfrcp,
2597 [0x97] = gen_op_pfrsqrt,
2598 [0x9a] = gen_op_pfsub,
2599 [0x9e] = gen_op_pfadd,
2600 [0xa0] = gen_op_pfcmpgt,
2601 [0xa4] = gen_op_pfmax,
2602 [0xa6] = gen_op_movq, /* pfrcpit1; no need to actually increase precision */
2603 [0xa7] = gen_op_movq, /* pfrsqit1 */
2604 [0xaa] = gen_op_pfsubr,
2605 [0xae] = gen_op_pfacc,
2606 [0xb0] = gen_op_pfcmpeq,
2607 [0xb4] = gen_op_pfmul,
2608 [0xb6] = gen_op_movq, /* pfrcpit2 */
2609 [0xb7] = gen_op_pmulhrw_mmx,
2610 [0xbb] = gen_op_pswapd,
2611 [0xbf] = gen_op_pavgb_mmx /* pavgusb */
2612 };
2613
2614 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2615 {
2616 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2617 int modrm, mod, rm, reg, reg_addr, offset_addr;
2618 GenOpFunc2 *sse_op2;
2619 GenOpFunc3 *sse_op3;
2620
2621 b &= 0xff;
2622 if (s->prefix & PREFIX_DATA)
2623 b1 = 1;
2624 else if (s->prefix & PREFIX_REPZ)
2625 b1 = 2;
2626 else if (s->prefix & PREFIX_REPNZ)
2627 b1 = 3;
2628 else
2629 b1 = 0;
2630 sse_op2 = sse_op_table1[b][b1];
2631 if (!sse_op2)
2632 goto illegal_op;
2633 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2634 is_xmm = 1;
2635 } else {
2636 if (b1 == 0) {
2637 /* MMX case */
2638 is_xmm = 0;
2639 } else {
2640 is_xmm = 1;
2641 }
2642 }
2643 /* simple MMX/SSE operation */
2644 if (s->flags & HF_TS_MASK) {
2645 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2646 return;
2647 }
2648 if (s->flags & HF_EM_MASK) {
2649 illegal_op:
2650 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2651 return;
2652 }
2653 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2654 goto illegal_op;
2655 if (b == 0x0e) {
2656 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2657 goto illegal_op;
2658 /* femms */
2659 gen_op_emms();
2660 return;
2661 }
2662 if (b == 0x77) {
2663 /* emms */
2664 gen_op_emms();
2665 return;
2666 }
2667 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2668 the static cpu state) */
2669 if (!is_xmm) {
2670 gen_op_enter_mmx();
2671 }
2672
2673 modrm = ldub_code(s->pc++);
2674 reg = ((modrm >> 3) & 7);
2675 if (is_xmm)
2676 reg |= rex_r;
2677 mod = (modrm >> 6) & 3;
2678 if (sse_op2 == SSE_SPECIAL) {
2679 b |= (b1 << 8);
2680 switch(b) {
2681 case 0x0e7: /* movntq */
2682 if (mod == 3)
2683 goto illegal_op;
2684 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2685 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2686 break;
2687 case 0x1e7: /* movntdq */
2688 case 0x02b: /* movntps */
2689 case 0x12b: /* movntps */
2690 case 0x3f0: /* lddqu */
2691 if (mod == 3)
2692 goto illegal_op;
2693 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2694 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2695 break;
2696 case 0x6e: /* movd mm, ea */
2697 #ifdef TARGET_X86_64
2698 if (s->dflag == 2) {
2699 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2700 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2701 } else
2702 #endif
2703 {
2704 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2705 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2706 }
2707 break;
2708 case 0x16e: /* movd xmm, ea */
2709 #ifdef TARGET_X86_64
2710 if (s->dflag == 2) {
2711 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2712 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2713 } else
2714 #endif
2715 {
2716 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2717 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2718 }
2719 break;
2720 case 0x6f: /* movq mm, ea */
2721 if (mod != 3) {
2722 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2723 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2724 } else {
2725 rm = (modrm & 7);
2726 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2727 offsetof(CPUX86State,fpregs[rm].mmx));
2728 }
2729 break;
2730 case 0x010: /* movups */
2731 case 0x110: /* movupd */
2732 case 0x028: /* movaps */
2733 case 0x128: /* movapd */
2734 case 0x16f: /* movdqa xmm, ea */
2735 case 0x26f: /* movdqu xmm, ea */
2736 if (mod != 3) {
2737 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2738 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2739 } else {
2740 rm = (modrm & 7) | REX_B(s);
2741 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2742 offsetof(CPUX86State,xmm_regs[rm]));
2743 }
2744 break;
2745 case 0x210: /* movss xmm, ea */
2746 if (mod != 3) {
2747 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2748 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2749 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2750 gen_op_movl_T0_0();
2751 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2752 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2753 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2754 } else {
2755 rm = (modrm & 7) | REX_B(s);
2756 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2757 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2758 }
2759 break;
2760 case 0x310: /* movsd xmm, ea */
2761 if (mod != 3) {
2762 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2763 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2764 gen_op_movl_T0_0();
2765 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2766 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2767 } else {
2768 rm = (modrm & 7) | REX_B(s);
2769 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2770 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2771 }
2772 break;
2773 case 0x012: /* movlps */
2774 case 0x112: /* movlpd */
2775 if (mod != 3) {
2776 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2777 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2778 } else {
2779 /* movhlps */
2780 rm = (modrm & 7) | REX_B(s);
2781 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2782 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2783 }
2784 break;
2785 case 0x212: /* movsldup */
2786 if (mod != 3) {
2787 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2788 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2789 } else {
2790 rm = (modrm & 7) | REX_B(s);
2791 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2792 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2793 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2794 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2795 }
2796 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2797 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2798 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2799 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2800 break;
2801 case 0x312: /* movddup */
2802 if (mod != 3) {
2803 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2804 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2805 } else {
2806 rm = (modrm & 7) | REX_B(s);
2807 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2808 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2809 }
2810 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2811 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2812 break;
2813 case 0x016: /* movhps */
2814 case 0x116: /* movhpd */
2815 if (mod != 3) {
2816 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2817 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2818 } else {
2819 /* movlhps */
2820 rm = (modrm & 7) | REX_B(s);
2821 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2822 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2823 }
2824 break;
2825 case 0x216: /* movshdup */
2826 if (mod != 3) {
2827 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2828 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2829 } else {
2830 rm = (modrm & 7) | REX_B(s);
2831 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2832 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2833 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2834 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2835 }
2836 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2837 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2838 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2839 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2840 break;
2841 case 0x7e: /* movd ea, mm */
2842 #ifdef TARGET_X86_64
2843 if (s->dflag == 2) {
2844 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2845 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2846 } else
2847 #endif
2848 {
2849 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2850 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2851 }
2852 break;
2853 case 0x17e: /* movd ea, xmm */
2854 #ifdef TARGET_X86_64
2855 if (s->dflag == 2) {
2856 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2857 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2858 } else
2859 #endif
2860 {
2861 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2862 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2863 }
2864 break;
2865 case 0x27e: /* movq xmm, ea */
2866 if (mod != 3) {
2867 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2868 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2869 } else {
2870 rm = (modrm & 7) | REX_B(s);
2871 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2872 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2873 }
2874 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2875 break;
2876 case 0x7f: /* movq ea, mm */
2877 if (mod != 3) {
2878 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2879 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2880 } else {
2881 rm = (modrm & 7);
2882 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2883 offsetof(CPUX86State,fpregs[reg].mmx));
2884 }
2885 break;
2886 case 0x011: /* movups */
2887 case 0x111: /* movupd */
2888 case 0x029: /* movaps */
2889 case 0x129: /* movapd */
2890 case 0x17f: /* movdqa ea, xmm */
2891 case 0x27f: /* movdqu ea, xmm */
2892 if (mod != 3) {
2893 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2894 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2895 } else {
2896 rm = (modrm & 7) | REX_B(s);
2897 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2898 offsetof(CPUX86State,xmm_regs[reg]));
2899 }
2900 break;
2901 case 0x211: /* movss ea, xmm */
2902 if (mod != 3) {
2903 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2904 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2905 gen_op_st_T0_A0(OT_LONG + s->mem_index);
2906 } else {
2907 rm = (modrm & 7) | REX_B(s);
2908 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2909 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2910 }
2911 break;
2912 case 0x311: /* movsd ea, xmm */
2913 if (mod != 3) {
2914 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2915 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2916 } else {
2917 rm = (modrm & 7) | REX_B(s);
2918 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2919 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2920 }
2921 break;
2922 case 0x013: /* movlps */
2923 case 0x113: /* movlpd */
2924 if (mod != 3) {
2925 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2926 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2927 } else {
2928 goto illegal_op;
2929 }
2930 break;
2931 case 0x017: /* movhps */
2932 case 0x117: /* movhpd */
2933 if (mod != 3) {
2934 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2935 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2936 } else {
2937 goto illegal_op;
2938 }
2939 break;
2940 case 0x71: /* shift mm, im */
2941 case 0x72:
2942 case 0x73:
2943 case 0x171: /* shift xmm, im */
2944 case 0x172:
2945 case 0x173:
2946 val = ldub_code(s->pc++);
2947 if (is_xmm) {
2948 gen_op_movl_T0_im(val);
2949 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2950 gen_op_movl_T0_0();
2951 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2952 op1_offset = offsetof(CPUX86State,xmm_t0);
2953 } else {
2954 gen_op_movl_T0_im(val);
2955 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2956 gen_op_movl_T0_0();
2957 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2958 op1_offset = offsetof(CPUX86State,mmx_t0);
2959 }
2960 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2961 if (!sse_op2)
2962 goto illegal_op;
2963 if (is_xmm) {
2964 rm = (modrm & 7) | REX_B(s);
2965 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2966 } else {
2967 rm = (modrm & 7);
2968 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2969 }
2970 sse_op2(op2_offset, op1_offset);
2971 break;
2972 case 0x050: /* movmskps */
2973 rm = (modrm & 7) | REX_B(s);
2974 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2975 gen_op_mov_reg_T0(OT_LONG, reg);
2976 break;
2977 case 0x150: /* movmskpd */
2978 rm = (modrm & 7) | REX_B(s);
2979 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2980 gen_op_mov_reg_T0(OT_LONG, reg);
2981 break;
2982 case 0x02a: /* cvtpi2ps */
2983 case 0x12a: /* cvtpi2pd */
2984 gen_op_enter_mmx();
2985 if (mod != 3) {
2986 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2987 op2_offset = offsetof(CPUX86State,mmx_t0);
2988 gen_ldq_env_A0(s->mem_index, op2_offset);
2989 } else {
2990 rm = (modrm & 7);
2991 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2992 }
2993 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2994 switch(b >> 8) {
2995 case 0x0:
2996 gen_op_cvtpi2ps(op1_offset, op2_offset);
2997 break;
2998 default:
2999 case 0x1:
3000 gen_op_cvtpi2pd(op1_offset, op2_offset);
3001 break;
3002 }
3003 break;
3004 case 0x22a: /* cvtsi2ss */
3005 case 0x32a: /* cvtsi2sd */
3006 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3007 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3008 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3009 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
3010 break;
3011 case 0x02c: /* cvttps2pi */
3012 case 0x12c: /* cvttpd2pi */
3013 case 0x02d: /* cvtps2pi */
3014 case 0x12d: /* cvtpd2pi */
3015 gen_op_enter_mmx();
3016 if (mod != 3) {
3017 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3018 op2_offset = offsetof(CPUX86State,xmm_t0);
3019 gen_ldo_env_A0(s->mem_index, op2_offset);
3020 } else {
3021 rm = (modrm & 7) | REX_B(s);
3022 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3023 }
3024 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3025 switch(b) {
3026 case 0x02c:
3027 gen_op_cvttps2pi(op1_offset, op2_offset);
3028 break;
3029 case 0x12c:
3030 gen_op_cvttpd2pi(op1_offset, op2_offset);
3031 break;
3032 case 0x02d:
3033 gen_op_cvtps2pi(op1_offset, op2_offset);
3034 break;
3035 case 0x12d:
3036 gen_op_cvtpd2pi(op1_offset, op2_offset);
3037 break;
3038 }
3039 break;
3040 case 0x22c: /* cvttss2si */
3041 case 0x32c: /* cvttsd2si */
3042 case 0x22d: /* cvtss2si */
3043 case 0x32d: /* cvtsd2si */
3044 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3045 if (mod != 3) {
3046 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3047 if ((b >> 8) & 1) {
3048 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3049 } else {
3050 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3051 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3052 }
3053 op2_offset = offsetof(CPUX86State,xmm_t0);
3054 } else {
3055 rm = (modrm & 7) | REX_B(s);
3056 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3057 }
3058 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3059 (b & 1) * 4](op2_offset);
3060 gen_op_mov_reg_T0(ot, reg);
3061 break;
3062 case 0xc4: /* pinsrw */
3063 case 0x1c4:
3064 s->rip_offset = 1;
3065 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3066 val = ldub_code(s->pc++);
3067 if (b1) {
3068 val &= 7;
3069 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3070 } else {
3071 val &= 3;
3072 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3073 }
3074 break;
3075 case 0xc5: /* pextrw */
3076 case 0x1c5:
3077 if (mod != 3)
3078 goto illegal_op;
3079 val = ldub_code(s->pc++);
3080 if (b1) {
3081 val &= 7;
3082 rm = (modrm & 7) | REX_B(s);
3083 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3084 } else {
3085 val &= 3;
3086 rm = (modrm & 7);
3087 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3088 }
3089 reg = ((modrm >> 3) & 7) | rex_r;
3090 gen_op_mov_reg_T0(OT_LONG, reg);
3091 break;
3092 case 0x1d6: /* movq ea, xmm */
3093 if (mod != 3) {
3094 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3095 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3096 } else {
3097 rm = (modrm & 7) | REX_B(s);
3098 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3099 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3100 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3101 }
3102 break;
3103 case 0x2d6: /* movq2dq */
3104 gen_op_enter_mmx();
3105 rm = (modrm & 7);
3106 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3107 offsetof(CPUX86State,fpregs[rm].mmx));
3108 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3109 break;
3110 case 0x3d6: /* movdq2q */
3111 gen_op_enter_mmx();
3112 rm = (modrm & 7) | REX_B(s);
3113 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3114 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3115 break;
3116 case 0xd7: /* pmovmskb */
3117 case 0x1d7:
3118 if (mod != 3)
3119 goto illegal_op;
3120 if (b1) {
3121 rm = (modrm & 7) | REX_B(s);
3122 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3123 } else {
3124 rm = (modrm & 7);
3125 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3126 }
3127 reg = ((modrm >> 3) & 7) | rex_r;
3128 gen_op_mov_reg_T0(OT_LONG, reg);
3129 break;
3130 default:
3131 goto illegal_op;
3132 }
3133 } else {
3134 /* generic MMX or SSE operation */
3135 switch(b) {
3136 case 0xf7:
3137 /* maskmov : we must prepare A0 */
3138 if (mod != 3)
3139 goto illegal_op;
3140 #ifdef TARGET_X86_64
3141 if (s->aflag == 2) {
3142 gen_op_movq_A0_reg(R_EDI);
3143 } else
3144 #endif
3145 {
3146 gen_op_movl_A0_reg(R_EDI);
3147 if (s->aflag == 0)
3148 gen_op_andl_A0_ffff();
3149 }
3150 gen_add_A0_ds_seg(s);
3151 break;
3152 case 0x70: /* pshufx insn */
3153 case 0xc6: /* pshufx insn */
3154 case 0xc2: /* compare insns */
3155 s->rip_offset = 1;
3156 break;
3157 default:
3158 break;
3159 }
3160 if (is_xmm) {
3161 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3162 if (mod != 3) {
3163 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3164 op2_offset = offsetof(CPUX86State,xmm_t0);
3165 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3166 b == 0xc2)) {
3167 /* specific case for SSE single instructions */
3168 if (b1 == 2) {
3169 /* 32 bit access */
3170 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3171 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3172 } else {
3173 /* 64 bit access */
3174 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3175 }
3176 } else {
3177 gen_ldo_env_A0(s->mem_index, op2_offset);
3178 }
3179 } else {
3180 rm = (modrm & 7) | REX_B(s);
3181 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3182 }
3183 } else {
3184 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3185 if (mod != 3) {
3186 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3187 op2_offset = offsetof(CPUX86State,mmx_t0);
3188 gen_ldq_env_A0(s->mem_index, op2_offset);
3189 } else {
3190 rm = (modrm & 7);
3191 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3192 }
3193 }
3194 switch(b) {
3195 case 0x0f: /* 3DNow! data insns */
3196 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3197 goto illegal_op;
3198 val = ldub_code(s->pc++);
3199 sse_op2 = sse_op_table5[val];
3200 if (!sse_op2)
3201 goto illegal_op;
3202 sse_op2(op1_offset, op2_offset);
3203 break;
3204 case 0x70: /* pshufx insn */
3205 case 0xc6: /* pshufx insn */
3206 val = ldub_code(s->pc++);
3207 sse_op3 = (GenOpFunc3 *)sse_op2;
3208 sse_op3(op1_offset, op2_offset, val);
3209 break;
3210 case 0xc2:
3211 /* compare insns */
3212 val = ldub_code(s->pc++);
3213 if (val >= 8)
3214 goto illegal_op;
3215 sse_op2 = sse_op_table4[val][b1];
3216 sse_op2(op1_offset, op2_offset);
3217 break;
3218 default:
3219 sse_op2(op1_offset, op2_offset);
3220 break;
3221 }
3222 if (b == 0x2e || b == 0x2f) {
3223 s->cc_op = CC_OP_EFLAGS;
3224 }
3225 }
3226 }
3227
3228
3229 /* convert one instruction. s->is_jmp is set if the translation must
3230 be stopped. Return the next pc value */
3231 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3232 {
3233 int b, prefixes, aflag, dflag;
3234 int shift, ot;
3235 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3236 target_ulong next_eip, tval;
3237 int rex_w, rex_r;
3238
3239 s->pc = pc_start;
3240 prefixes = 0;
3241 aflag = s->code32;
3242 dflag = s->code32;
3243 s->override = -1;
3244 rex_w = -1;
3245 rex_r = 0;
3246 #ifdef TARGET_X86_64
3247 s->rex_x = 0;
3248 s->rex_b = 0;
3249 x86_64_hregs = 0;
3250 #endif
3251 s->rip_offset = 0; /* for relative ip address */
3252 next_byte:
3253 b = ldub_code(s->pc);
3254 s->pc++;
3255 /* check prefixes */
3256 #ifdef TARGET_X86_64
3257 if (CODE64(s)) {
3258 switch (b) {
3259 case 0xf3:
3260 prefixes |= PREFIX_REPZ;
3261 goto next_byte;
3262 case 0xf2:
3263 prefixes |= PREFIX_REPNZ;
3264 goto next_byte;
3265 case 0xf0:
3266 prefixes |= PREFIX_LOCK;
3267 goto next_byte;
3268 case 0x2e:
3269 s->override = R_CS;
3270 goto next_byte;
3271 case 0x36:
3272 s->override = R_SS;
3273 goto next_byte;
3274 case 0x3e:
3275 s->override = R_DS;
3276 goto next_byte;
3277 case 0x26:
3278 s->override = R_ES;
3279 goto next_byte;
3280 case 0x64:
3281 s->override = R_FS;
3282 goto next_byte;
3283 case 0x65:
3284 s->override = R_GS;
3285 goto next_byte;
3286 case 0x66:
3287 prefixes |= PREFIX_DATA;
3288 goto next_byte;
3289 case 0x67:
3290 prefixes |= PREFIX_ADR;
3291 goto next_byte;
3292 case 0x40 ... 0x4f:
3293 /* REX prefix */
3294 rex_w = (b >> 3) & 1;
3295 rex_r = (b & 0x4) << 1;
3296 s->rex_x = (b & 0x2) << 2;
3297 REX_B(s) = (b & 0x1) << 3;
3298 x86_64_hregs = 1; /* select uniform byte register addressing */
3299 goto next_byte;
3300 }
3301 if (rex_w == 1) {
3302 /* 0x66 is ignored if rex.w is set */
3303 dflag = 2;
3304 } else {
3305 if (prefixes & PREFIX_DATA)
3306 dflag ^= 1;
3307 }
3308 if (!(prefixes & PREFIX_ADR))
3309 aflag = 2;
3310 } else
3311 #endif
3312 {
3313 switch (b) {
3314 case 0xf3:
3315 prefixes |= PREFIX_REPZ;
3316 goto next_byte;
3317 case 0xf2:
3318 prefixes |= PREFIX_REPNZ;
3319 goto next_byte;
3320 case 0xf0:
3321 prefixes |= PREFIX_LOCK;
3322 goto next_byte;
3323 case 0x2e:
3324 s->override = R_CS;
3325 goto next_byte;
3326 case 0x36:
3327 s->override = R_SS;
3328 goto next_byte;
3329 case 0x3e:
3330 s->override = R_DS;
3331 goto next_byte;
3332 case 0x26:
3333 s->override = R_ES;
3334 goto next_byte;
3335 case 0x64:
3336 s->override = R_FS;
3337 goto next_byte;
3338 case 0x65:
3339 s->override = R_GS;
3340 goto next_byte;
3341 case 0x66:
3342 prefixes |= PREFIX_DATA;
3343 goto next_byte;
3344 case 0x67:
3345 prefixes |= PREFIX_ADR;
3346 goto next_byte;
3347 }
3348 if (prefixes & PREFIX_DATA)
3349 dflag ^= 1;
3350 if (prefixes & PREFIX_ADR)
3351 aflag ^= 1;
3352 }
3353
3354 s->prefix = prefixes;
3355 s->aflag = aflag;
3356 s->dflag = dflag;
3357
3358 /* lock generation */
3359 if (prefixes & PREFIX_LOCK)
3360 gen_op_lock();
3361
3362 /* now check op code */
3363 reswitch:
3364 switch(b) {
3365 case 0x0f:
3366 /**************************/
3367 /* extended op code */
3368 b = ldub_code(s->pc++) | 0x100;
3369 goto reswitch;
3370
3371 /**************************/
3372 /* arith & logic */
3373 case 0x00 ... 0x05:
3374 case 0x08 ... 0x0d:
3375 case 0x10 ... 0x15:
3376 case 0x18 ... 0x1d:
3377 case 0x20 ... 0x25:
3378 case 0x28 ... 0x2d:
3379 case 0x30 ... 0x35:
3380 case 0x38 ... 0x3d:
3381 {
3382 int op, f, val;
3383 op = (b >> 3) & 7;
3384 f = (b >> 1) & 3;
3385
3386 if ((b & 1) == 0)
3387 ot = OT_BYTE;
3388 else
3389 ot = dflag + OT_WORD;
3390
3391 switch(f) {
3392 case 0: /* OP Ev, Gv */
3393 modrm = ldub_code(s->pc++);
3394 reg = ((modrm >> 3) & 7) | rex_r;
3395 mod = (modrm >> 6) & 3;
3396 rm = (modrm & 7) | REX_B(s);
3397 if (mod != 3) {
3398 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3399 opreg = OR_TMP0;
3400 } else if (op == OP_XORL && rm == reg) {
3401 xor_zero:
3402 /* xor reg, reg optimisation */
3403 gen_op_movl_T0_0();
3404 s->cc_op = CC_OP_LOGICB + ot;
3405 gen_op_mov_reg_T0(ot, reg);
3406 gen_op_update1_cc();
3407 break;
3408 } else {
3409 opreg = rm;
3410 }
3411 gen_op_mov_TN_reg(ot, 1, reg);
3412 gen_op(s, op, ot, opreg);
3413 break;
3414 case 1: /* OP Gv, Ev */
3415 modrm = ldub_code(s->pc++);
3416 mod = (modrm >> 6) & 3;
3417 reg = ((modrm >> 3) & 7) | rex_r;
3418 rm = (modrm & 7) | REX_B(s);
3419 if (mod != 3) {
3420 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3421 gen_op_ld_T1_A0(ot + s->mem_index);
3422 } else if (op == OP_XORL && rm == reg) {
3423 goto xor_zero;
3424 } else {
3425 gen_op_mov_TN_reg(ot, 1, rm);
3426 }
3427 gen_op(s, op, ot, reg);
3428 break;
3429 case 2: /* OP A, Iv */
3430 val = insn_get(s, ot);
3431 gen_op_movl_T1_im(val);
3432 gen_op(s, op, ot, OR_EAX);
3433 break;
3434 }
3435 }
3436 break;
3437
3438 case 0x80: /* GRP1 */
3439 case 0x81:
3440 case 0x82:
3441 case 0x83:
3442 {
3443 int val;
3444
3445 if ((b & 1) == 0)
3446 ot = OT_BYTE;
3447 else
3448 ot = dflag + OT_WORD;
3449
3450 modrm = ldub_code(s->pc++);
3451 mod = (modrm >> 6) & 3;
3452 rm = (modrm & 7) | REX_B(s);
3453 op = (modrm >> 3) & 7;
3454
3455 if (mod != 3) {
3456 if (b == 0x83)
3457 s->rip_offset = 1;
3458 else
3459 s->rip_offset = insn_const_size(ot);
3460 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3461 opreg = OR_TMP0;
3462 } else {
3463 opreg = rm;
3464 }
3465
3466 switch(b) {
3467 default:
3468 case 0x80:
3469 case 0x81:
3470 case 0x82:
3471 val = insn_get(s, ot);
3472 break;
3473 case 0x83:
3474 val = (int8_t)insn_get(s, OT_BYTE);
3475 break;
3476 }
3477 gen_op_movl_T1_im(val);
3478 gen_op(s, op, ot, opreg);
3479 }
3480 break;
3481
3482 /**************************/
3483 /* inc, dec, and other misc arith */
3484 case 0x40 ... 0x47: /* inc Gv */
3485 ot = dflag ? OT_LONG : OT_WORD;
3486 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3487 break;
3488 case 0x48 ... 0x4f: /* dec Gv */
3489 ot = dflag ? OT_LONG : OT_WORD;
3490 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3491 break;
3492 case 0xf6: /* GRP3 */
3493 case 0xf7:
3494 if ((b & 1) == 0)
3495 ot = OT_BYTE;
3496 else
3497 ot = dflag + OT_WORD;
3498
3499 modrm = ldub_code(s->pc++);
3500 mod = (modrm >> 6) & 3;
3501 rm = (modrm & 7) | REX_B(s);
3502 op = (modrm >> 3) & 7;
3503 if (mod != 3) {
3504 if (op == 0)
3505 s->rip_offset = insn_const_size(ot);
3506 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3507 gen_op_ld_T0_A0(ot + s->mem_index);
3508 } else {
3509 gen_op_mov_TN_reg(ot, 0, rm);
3510 }
3511
3512 switch(op) {
3513 case 0: /* test */
3514 val = insn_get(s, ot);
3515 gen_op_movl_T1_im(val);
3516 gen_op_testl_T0_T1_cc();
3517 s->cc_op = CC_OP_LOGICB + ot;
3518 break;
3519 case 2: /* not */
3520 gen_op_notl_T0();
3521 if (mod != 3) {
3522 gen_op_st_T0_A0(ot + s->mem_index);
3523 } else {
3524 gen_op_mov_reg_T0(ot, rm);
3525 }
3526 break;
3527 case 3: /* neg */
3528 gen_op_negl_T0();
3529 if (mod != 3) {
3530 gen_op_st_T0_A0(ot + s->mem_index);
3531 } else {
3532 gen_op_mov_reg_T0(ot, rm);
3533 }
3534 gen_op_update_neg_cc();
3535 s->cc_op = CC_OP_SUBB + ot;
3536 break;
3537 case 4: /* mul */
3538 switch(ot) {
3539 case OT_BYTE:
3540 gen_op_mulb_AL_T0();
3541 s->cc_op = CC_OP_MULB;
3542 break;
3543 case OT_WORD:
3544 gen_op_mulw_AX_T0();
3545 s->cc_op = CC_OP_MULW;
3546 break;
3547 default:
3548 case OT_LONG:
3549 gen_op_mull_EAX_T0();
3550 s->cc_op = CC_OP_MULL;
3551 break;
3552 #ifdef TARGET_X86_64
3553 case OT_QUAD:
3554 gen_op_mulq_EAX_T0();
3555 s->cc_op = CC_OP_MULQ;
3556 break;
3557 #endif
3558 }
3559 break;
3560 case 5: /* imul */
3561 switch(ot) {
3562 case OT_BYTE:
3563 gen_op_imulb_AL_T0();
3564 s->cc_op = CC_OP_MULB;
3565 break;
3566 case OT_WORD:
3567 gen_op_imulw_AX_T0();
3568 s->cc_op = CC_OP_MULW;
3569 break;
3570 default:
3571 case OT_LONG:
3572 gen_op_imull_EAX_T0();
3573 s->cc_op = CC_OP_MULL;
3574 break;
3575 #ifdef TARGET_X86_64
3576 case OT_QUAD:
3577 gen_op_imulq_EAX_T0();
3578 s->cc_op = CC_OP_MULQ;
3579 break;
3580 #endif
3581 }
3582 break;
3583 case 6: /* div */
3584 switch(ot) {
3585 case OT_BYTE:
3586 gen_jmp_im(pc_start - s->cs_base);
3587 gen_op_divb_AL_T0();
3588 break;
3589 case OT_WORD:
3590 gen_jmp_im(pc_start - s->cs_base);
3591 gen_op_divw_AX_T0();
3592 break;
3593 default:
3594 case OT_LONG:
3595 gen_jmp_im(pc_start - s->cs_base);
3596 #ifdef MACRO_TEST
3597 /* XXX: this is just a test */
3598 tcg_gen_macro_2(cpu_T[0], cpu_T[0], MACRO_TEST);
3599 #else
3600 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
3601 #endif
3602 break;
3603 #ifdef TARGET_X86_64
3604 case OT_QUAD:
3605 gen_jmp_im(pc_start - s->cs_base);
3606 gen_op_divq_EAX_T0();
3607 break;
3608 #endif
3609 }
3610 break;
3611 case 7: /* idiv */
3612 switch(ot) {
3613 case OT_BYTE:
3614 gen_jmp_im(pc_start - s->cs_base);
3615 gen_op_idivb_AL_T0();
3616 break;
3617 case OT_WORD:
3618 gen_jmp_im(pc_start - s->cs_base);
3619 gen_op_idivw_AX_T0();
3620 break;
3621 default:
3622 case OT_LONG:
3623 gen_jmp_im(pc_start - s->cs_base);
3624 tcg_gen_helper_0_1(helper_idivl_EAX_T0, cpu_T[0]);
3625 break;
3626 #ifdef TARGET_X86_64
3627 case OT_QUAD:
3628 gen_jmp_im(pc_start - s->cs_base);
3629 gen_op_idivq_EAX_T0();
3630 break;
3631 #endif
3632 }
3633 break;
3634 default:
3635 goto illegal_op;
3636 }
3637 break;
3638
3639 case 0xfe: /* GRP4 */
3640 case 0xff: /* GRP5 */
3641 if ((b & 1) == 0)
3642 ot = OT_BYTE;
3643 else
3644 ot = dflag + OT_WORD;
3645
3646 modrm = ldub_code(s->pc++);
3647 mod = (modrm >> 6) & 3;
3648 rm = (modrm & 7) | REX_B(s);
3649 op = (modrm >> 3) & 7;
3650 if (op >= 2 && b == 0xfe) {
3651 goto illegal_op;
3652 }
3653 if (CODE64(s)) {
3654 if (op == 2 || op == 4) {
3655 /* operand size for jumps is 64 bit */
3656 ot = OT_QUAD;
3657 } else if (op == 3 || op == 5) {
3658 /* for call calls, the operand is 16 or 32 bit, even
3659 in long mode */
3660 ot = dflag ? OT_LONG : OT_WORD;
3661 } else if (op == 6) {
3662 /* default push size is 64 bit */
3663 ot = dflag ? OT_QUAD : OT_WORD;
3664 }
3665 }
3666 if (mod != 3) {
3667 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3668 if (op >= 2 && op != 3 && op != 5)
3669 gen_op_ld_T0_A0(ot + s->mem_index);
3670 } else {
3671 gen_op_mov_TN_reg(ot, 0, rm);
3672 }
3673
3674 switch(op) {
3675 case 0: /* inc Ev */
3676 if (mod != 3)
3677 opreg = OR_TMP0;
3678 else
3679 opreg = rm;
3680 gen_inc(s, ot, opreg, 1);
3681 break;
3682 case 1: /* dec Ev */
3683 if (mod != 3)
3684 opreg = OR_TMP0;
3685 else
3686 opreg = rm;
3687 gen_inc(s, ot, opreg, -1);
3688 break;
3689 case 2: /* call Ev */
3690 /* XXX: optimize if memory (no 'and' is necessary) */
3691 if (s->dflag == 0)
3692 gen_op_andl_T0_ffff();
3693 next_eip = s->pc - s->cs_base;
3694 gen_movtl_T1_im(next_eip);
3695 gen_push_T1(s);
3696 gen_op_jmp_T0();
3697 gen_eob(s);
3698 break;
3699 case 3: /* lcall Ev */
3700 gen_op_ld_T1_A0(ot + s->mem_index);
3701 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3702 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3703 do_lcall:
3704 if (s->pe && !s->vm86) {
3705 if (s->cc_op != CC_OP_DYNAMIC)
3706 gen_op_set_cc_op(s->cc_op);
3707 gen_jmp_im(pc_start - s->cs_base);
3708 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3709 } else {
3710 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3711 }
3712 gen_eob(s);
3713 break;
3714 case 4: /* jmp Ev */
3715 if (s->dflag == 0)
3716 gen_op_andl_T0_ffff();
3717 gen_op_jmp_T0();
3718 gen_eob(s);
3719 break;
3720 case 5: /* ljmp Ev */
3721 gen_op_ld_T1_A0(ot + s->mem_index);
3722 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3723 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3724 do_ljmp:
3725 if (s->pe && !s->vm86) {
3726 if (s->cc_op != CC_OP_DYNAMIC)
3727 gen_op_set_cc_op(s->cc_op);
3728 gen_jmp_im(pc_start - s->cs_base);
3729 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3730 } else {
3731 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3732 gen_op_movl_T0_T1();
3733 gen_op_jmp_T0();
3734 }
3735 gen_eob(s);
3736 break;
3737 case 6: /* push Ev */
3738 gen_push_T0(s);
3739 break;
3740 default:
3741 goto illegal_op;
3742 }
3743 break;
3744
3745 case 0x84: /* test Ev, Gv */
3746 case 0x85:
3747 if ((b & 1) == 0)
3748 ot = OT_BYTE;
3749 else
3750 ot = dflag + OT_WORD;
3751
3752 modrm = ldub_code(s->pc++);
3753 mod = (modrm >> 6) & 3;
3754 rm = (modrm & 7) | REX_B(s);
3755 reg = ((modrm >> 3) & 7) | rex_r;
3756
3757 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3758 gen_op_mov_TN_reg(ot, 1, reg);
3759 gen_op_testl_T0_T1_cc();
3760 s->cc_op = CC_OP_LOGICB + ot;
3761 break;
3762
3763 case 0xa8: /* test eAX, Iv */
3764 case 0xa9:
3765 if ((b & 1) == 0)
3766 ot = OT_BYTE;
3767 else
3768 ot = dflag + OT_WORD;
3769 val = insn_get(s, ot);
3770
3771 gen_op_mov_TN_reg(ot, 0, OR_EAX);
3772 gen_op_movl_T1_im(val);
3773 gen_op_testl_T0_T1_cc();
3774 s->cc_op = CC_OP_LOGICB + ot;
3775 break;
3776
3777 case 0x98: /* CWDE/CBW */
3778 #ifdef TARGET_X86_64
3779 if (dflag == 2) {
3780 gen_op_movslq_RAX_EAX();
3781 } else
3782 #endif
3783 if (dflag == 1)
3784 gen_op_movswl_EAX_AX();
3785 else
3786 gen_op_movsbw_AX_AL();
3787 break;
3788 case 0x99: /* CDQ/CWD */
3789 #ifdef TARGET_X86_64
3790 if (dflag == 2) {
3791 gen_op_movsqo_RDX_RAX();
3792 } else
3793 #endif
3794 if (dflag == 1)
3795 gen_op_movslq_EDX_EAX();
3796 else
3797 gen_op_movswl_DX_AX();
3798 break;
3799 case 0x1af: /* imul Gv, Ev */
3800 case 0x69: /* imul Gv, Ev, I */
3801 case 0x6b:
3802 ot = dflag + OT_WORD;
3803 modrm = ldub_code(s->pc++);
3804 reg = ((modrm >> 3) & 7) | rex_r;
3805 if (b == 0x69)
3806 s->rip_offset = insn_const_size(ot);
3807 else if (b == 0x6b)
3808 s->rip_offset = 1;
3809 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3810 if (b == 0x69) {
3811 val = insn_get(s, ot);
3812 gen_op_movl_T1_im(val);
3813 } else if (b == 0x6b) {
3814 val = (int8_t)insn_get(s, OT_BYTE);
3815 gen_op_movl_T1_im(val);
3816 } else {
3817 gen_op_mov_TN_reg(ot, 1, reg);
3818 }
3819
3820 #ifdef TARGET_X86_64
3821 if (ot == OT_QUAD) {
3822 gen_op_imulq_T0_T1();
3823 } else
3824 #endif
3825 if (ot == OT_LONG) {
3826 gen_op_imull_T0_T1();
3827 } else {
3828 gen_op_imulw_T0_T1();
3829 }
3830 gen_op_mov_reg_T0(ot, reg);
3831 s->cc_op = CC_OP_MULB + ot;
3832 break;
3833 case 0x1c0:
3834 case 0x1c1: /* xadd Ev, Gv */
3835 if ((b & 1) == 0)
3836 ot = OT_BYTE;
3837 else
3838 ot = dflag + OT_WORD;
3839 modrm = ldub_code(s->pc++);
3840 reg = ((modrm >> 3) & 7) | rex_r;
3841 mod = (modrm >> 6) & 3;
3842 if (mod == 3) {
3843 rm = (modrm & 7) | REX_B(s);
3844 gen_op_mov_TN_reg(ot, 0, reg);
3845 gen_op_mov_TN_reg(ot, 1, rm);
3846 gen_op_addl_T0_T1();
3847 gen_op_mov_reg_T1(ot, reg);
3848 gen_op_mov_reg_T0(ot, rm);
3849 } else {
3850 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3851 gen_op_mov_TN_reg(ot, 0, reg);
3852 gen_op_ld_T1_A0(ot + s->mem_index);
3853 gen_op_addl_T0_T1();
3854 gen_op_st_T0_A0(ot + s->mem_index);
3855 gen_op_mov_reg_T1(ot, reg);
3856 }
3857 gen_op_update2_cc();
3858 s->cc_op = CC_OP_ADDB + ot;
3859 break;
3860 case 0x1b0:
3861 case 0x1b1: /* cmpxchg Ev, Gv */
3862 if ((b & 1) == 0)
3863 ot = OT_BYTE;
3864 else
3865 ot = dflag + OT_WORD;
3866 modrm = ldub_code(s->pc++);
3867 reg = ((modrm >> 3) & 7) | rex_r;
3868 mod = (modrm >> 6) & 3;
3869 gen_op_mov_TN_reg(ot, 1, reg);
3870 if (mod == 3) {
3871 rm = (modrm & 7) | REX_B(s);
3872 gen_op_mov_TN_reg(ot, 0, rm);
3873 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3874 gen_op_mov_reg_T0(ot, rm);
3875 } else {
3876 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3877 gen_op_ld_T0_A0(ot + s->mem_index);
3878 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3879 }
3880 s->cc_op = CC_OP_SUBB + ot;
3881 break;
3882 case 0x1c7: /* cmpxchg8b */
3883 modrm = ldub_code(s->pc++);
3884 mod = (modrm >> 6) & 3;
3885 if ((mod == 3) || ((modrm & 0x38) != 0x8))
3886 goto illegal_op;
3887 gen_jmp_im(pc_start - s->cs_base);
3888 if (s->cc_op != CC_OP_DYNAMIC)
3889 gen_op_set_cc_op(s->cc_op);
3890 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3891 gen_op_cmpxchg8b();
3892 s->cc_op = CC_OP_EFLAGS;
3893 break;
3894
3895 /**************************/
3896 /* push/pop */
3897 case 0x50 ... 0x57: /* push */
3898 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
3899 gen_push_T0(s);
3900 break;
3901 case 0x58 ... 0x5f: /* pop */
3902 if (CODE64(s)) {
3903 ot = dflag ? OT_QUAD : OT_WORD;
3904 } else {
3905 ot = dflag + OT_WORD;
3906 }
3907 gen_pop_T0(s);
3908 /* NOTE: order is important for pop %sp */
3909 gen_pop_update(s);
3910 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
3911 break;
3912 case 0x60: /* pusha */
3913 if (CODE64(s))
3914 goto illegal_op;
3915 gen_pusha(s);
3916 break;
3917 case 0x61: /* popa */
3918 if (CODE64(s))
3919 goto illegal_op;
3920 gen_popa(s);
3921 break;
3922 case 0x68: /* push Iv */
3923 case 0x6a:
3924 if (CODE64(s)) {
3925 ot = dflag ? OT_QUAD : OT_WORD;
3926 } else {
3927 ot = dflag + OT_WORD;
3928 }
3929 if (b == 0x68)
3930 val = insn_get(s, ot);
3931 else
3932 val = (int8_t)insn_get(s, OT_BYTE);
3933 gen_op_movl_T0_im(val);
3934 gen_push_T0(s);
3935 break;
3936 case 0x8f: /* pop Ev */
3937 if (CODE64(s)) {
3938 ot = dflag ? OT_QUAD : OT_WORD;
3939 } else {
3940 ot = dflag + OT_WORD;
3941 }
3942 modrm = ldub_code(s->pc++);
3943 mod = (modrm >> 6) & 3;
3944 gen_pop_T0(s);
3945 if (mod == 3) {
3946 /* NOTE: order is important for pop %sp */
3947 gen_pop_update(s);
3948 rm = (modrm & 7) | REX_B(s);
3949 gen_op_mov_reg_T0(ot, rm);
3950 } else {
3951 /* NOTE: order is important too for MMU exceptions */
3952 s->popl_esp_hack = 1 << ot;
3953 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3954 s->popl_esp_hack = 0;
3955 gen_pop_update(s);
3956 }
3957 break;
3958 case 0xc8: /* enter */
3959 {
3960 int level;
3961 val = lduw_code(s->pc);
3962 s->pc += 2;
3963 level = ldub_code(s->pc++);
3964 gen_enter(s, val, level);
3965 }
3966 break;
3967 case 0xc9: /* leave */
3968 /* XXX: exception not precise (ESP is updated before potential exception) */
3969 if (CODE64(s)) {
3970 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
3971 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
3972 } else if (s->ss32) {
3973 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3974 gen_op_mov_reg_T0(OT_LONG, R_ESP);
3975 } else {
3976 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
3977 gen_op_mov_reg_T0(OT_WORD, R_ESP);
3978 }
3979 gen_pop_T0(s);
3980 if (CODE64(s)) {
3981 ot = dflag ? OT_QUAD : OT_WORD;
3982 } else {
3983 ot = dflag + OT_WORD;
3984 }
3985 gen_op_mov_reg_T0(ot, R_EBP);
3986 gen_pop_update(s);
3987 break;
3988 case 0x06: /* push es */
3989 case 0x0e: /* push cs */
3990 case 0x16: /* push ss */
3991 case 0x1e: /* push ds */
3992 if (CODE64(s))
3993 goto illegal_op;
3994 gen_op_movl_T0_seg(b >> 3);
3995 gen_push_T0(s);
3996 break;
3997 case 0x1a0: /* push fs */
3998 case 0x1a8: /* push gs */
3999 gen_op_movl_T0_seg((b >> 3) & 7);
4000 gen_push_T0(s);
4001 break;
4002 case 0x07: /* pop es */
4003 case 0x17: /* pop ss */
4004 case 0x1f: /* pop ds */
4005 if (CODE64(s))
4006 goto illegal_op;
4007 reg = b >> 3;
4008 gen_pop_T0(s);
4009 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4010 gen_pop_update(s);
4011 if (reg == R_SS) {
4012 /* if reg == SS, inhibit interrupts/trace. */
4013 /* If several instructions disable interrupts, only the
4014 _first_ does it */
4015 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4016 gen_op_set_inhibit_irq();
4017 s->tf = 0;
4018 }
4019 if (s->is_jmp) {
4020 gen_jmp_im(s->pc - s->cs_base);
4021 gen_eob(s);
4022 }
4023 break;
4024 case 0x1a1: /* pop fs */
4025 case 0x1a9: /* pop gs */
4026 gen_pop_T0(s);
4027 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4028 gen_pop_update(s);
4029 if (s->is_jmp) {
4030 gen_jmp_im(s->pc - s->cs_base);
4031 gen_eob(s);
4032 }
4033 break;
4034
4035 /**************************/
4036 /* mov */
4037 case 0x88:
4038 case 0x89: /* mov Gv, Ev */
4039 if ((b & 1) == 0)
4040 ot = OT_BYTE;
4041 else
4042 ot = dflag + OT_WORD;
4043 modrm = ldub_code(s->pc++);
4044 reg = ((modrm >> 3) & 7) | rex_r;
4045
4046 /* generate a generic store */
4047 gen_ldst_modrm(s, modrm, ot, reg, 1);
4048 break;
4049 case 0xc6:
4050 case 0xc7: /* mov Ev, Iv */
4051 if ((b & 1) == 0)
4052 ot = OT_BYTE;
4053 else
4054 ot = dflag + OT_WORD;
4055 modrm = ldub_code(s->pc++);
4056 mod = (modrm >> 6) & 3;
4057 if (mod != 3) {
4058 s->rip_offset = insn_const_size(ot);
4059 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4060 }
4061 val = insn_get(s, ot);
4062 gen_op_movl_T0_im(val);
4063 if (mod != 3)
4064 gen_op_st_T0_A0(ot + s->mem_index);
4065 else
4066 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4067 break;
4068 case 0x8a:
4069 case 0x8b: /* mov Ev, Gv */
4070 if ((b & 1) == 0)
4071 ot = OT_BYTE;
4072 else
4073 ot = OT_WORD + dflag;
4074 modrm = ldub_code(s->pc++);
4075 reg = ((modrm >> 3) & 7) | rex_r;
4076
4077 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4078 gen_op_mov_reg_T0(ot, reg);
4079 break;
4080 case 0x8e: /* mov seg, Gv */
4081 modrm = ldub_code(s->pc++);
4082 reg = (modrm >> 3) & 7;
4083 if (reg >= 6 || reg == R_CS)
4084 goto illegal_op;
4085 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4086 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4087 if (reg == R_SS) {
4088 /* if reg == SS, inhibit interrupts/trace */
4089 /* If several instructions disable interrupts, only the
4090 _first_ does it */
4091 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4092 gen_op_set_inhibit_irq();
4093 s->tf = 0;
4094 }
4095 if (s->is_jmp) {
4096 gen_jmp_im(s->pc - s->cs_base);
4097 gen_eob(s);
4098 }
4099 break;
4100 case 0x8c: /* mov Gv, seg */
4101 modrm = ldub_code(s->pc++);
4102 reg = (modrm >> 3) & 7;
4103 mod = (modrm >> 6) & 3;
4104 if (reg >= 6)
4105 goto illegal_op;
4106 gen_op_movl_T0_seg(reg);
4107 if (mod == 3)
4108 ot = OT_WORD + dflag;
4109 else
4110 ot = OT_WORD;
4111 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4112 break;
4113
4114 case 0x1b6: /* movzbS Gv, Eb */
4115 case 0x1b7: /* movzwS Gv, Eb */
4116 case 0x1be: /* movsbS Gv, Eb */
4117 case 0x1bf: /* movswS Gv, Eb */
4118 {
4119 int d_ot;
4120 /* d_ot is the size of destination */
4121 d_ot = dflag + OT_WORD;
4122 /* ot is the size of source */
4123 ot = (b & 1) + OT_BYTE;
4124 modrm = ldub_code(s->pc++);
4125 reg = ((modrm >> 3) & 7) | rex_r;
4126 mod = (modrm >> 6) & 3;
4127 rm = (modrm & 7) | REX_B(s);
4128
4129 if (mod == 3) {
4130 gen_op_mov_TN_reg(ot, 0, rm);
4131 switch(ot | (b & 8)) {
4132 case OT_BYTE:
4133 gen_op_movzbl_T0_T0();
4134 break;
4135 case OT_BYTE | 8:
4136 gen_op_movsbl_T0_T0();
4137 break;
4138 case OT_WORD:
4139 gen_op_movzwl_T0_T0();
4140 break;
4141 default:
4142 case OT_WORD | 8:
4143 gen_op_movswl_T0_T0();
4144 break;
4145 }
4146 gen_op_mov_reg_T0(d_ot, reg);
4147 } else {
4148 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4149 if (b & 8) {
4150 gen_op_lds_T0_A0(ot + s->mem_index);
4151 } else {
4152 gen_op_ldu_T0_A0(ot + s->mem_index);
4153 }
4154 gen_op_mov_reg_T0(d_ot, reg);
4155 }
4156 }
4157 break;
4158
4159 case 0x8d: /* lea */
4160 ot = dflag + OT_WORD;
4161 modrm = ldub_code(s->pc++);
4162 mod = (modrm >> 6) & 3;
4163 if (mod == 3)
4164 goto illegal_op;
4165 reg = ((modrm >> 3) & 7) | rex_r;
4166 /* we must ensure that no segment is added */
4167 s->override = -1;
4168 val = s->addseg;
4169 s->addseg = 0;
4170 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4171 s->addseg = val;
4172 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4173 break;
4174
4175 case 0xa0: /* mov EAX, Ov */
4176 case 0xa1:
4177 case 0xa2: /* mov Ov, EAX */
4178 case 0xa3:
4179 {
4180 target_ulong offset_addr;
4181
4182 if ((b & 1) == 0)
4183 ot = OT_BYTE;
4184 else
4185 ot = dflag + OT_WORD;
4186 #ifdef TARGET_X86_64
4187 if (s->aflag == 2) {
4188 offset_addr = ldq_code(s->pc);
4189 s->pc += 8;
4190 gen_op_movq_A0_im(offset_addr);
4191 } else
4192 #endif
4193 {
4194 if (s->aflag) {
4195 offset_addr = insn_get(s, OT_LONG);
4196 } else {
4197 offset_addr = insn_get(s, OT_WORD);
4198 }
4199 gen_op_movl_A0_im(offset_addr);
4200 }
4201 gen_add_A0_ds_seg(s);
4202 if ((b & 2) == 0) {
4203 gen_op_ld_T0_A0(ot + s->mem_index);
4204 gen_op_mov_reg_T0(ot, R_EAX);
4205 } else {
4206 gen_op_mov_TN_reg(ot, 0, R_EAX);
4207 gen_op_st_T0_A0(ot + s->mem_index);
4208 }
4209 }
4210 break;
4211 case 0xd7: /* xlat */
4212 #ifdef TARGET_X86_64
4213 if (s->aflag == 2) {
4214 gen_op_movq_A0_reg(R_EBX);
4215 gen_op_addq_A0_AL();
4216 } else
4217 #endif
4218 {
4219 gen_op_movl_A0_reg(R_EBX);
4220 gen_op_addl_A0_AL();
4221 if (s->aflag == 0)
4222 gen_op_andl_A0_ffff();
4223 }
4224 gen_add_A0_ds_seg(s);
4225 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4226 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4227 break;
4228 case 0xb0 ... 0xb7: /* mov R, Ib */
4229 val = insn_get(s, OT_BYTE);
4230 gen_op_movl_T0_im(val);
4231 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4232 break;
4233 case 0xb8 ... 0xbf: /* mov R, Iv */
4234 #ifdef TARGET_X86_64
4235 if (dflag == 2) {
4236 uint64_t tmp;
4237 /* 64 bit case */
4238 tmp = ldq_code(s->pc);
4239 s->pc += 8;
4240 reg = (b & 7) | REX_B(s);
4241 gen_movtl_T0_im(tmp);
4242 gen_op_mov_reg_T0(OT_QUAD, reg);
4243 } else
4244 #endif
4245 {
4246 ot = dflag ? OT_LONG : OT_WORD;
4247 val = insn_get(s, ot);
4248 reg = (b & 7) | REX_B(s);
4249 gen_op_movl_T0_im(val);
4250 gen_op_mov_reg_T0(ot, reg);
4251 }
4252 break;
4253
4254 case 0x91 ... 0x97: /* xchg R, EAX */
4255 ot = dflag + OT_WORD;
4256 reg = (b & 7) | REX_B(s);
4257 rm = R_EAX;
4258 goto do_xchg_reg;
4259 case 0x86:
4260 case 0x87: /* xchg Ev, Gv */
4261 if ((b & 1) == 0)
4262 ot = OT_BYTE;
4263 else
4264 ot = dflag + OT_WORD;
4265 modrm = ldub_code(s->pc++);
4266 reg = ((modrm >> 3) & 7) | rex_r;
4267 mod = (modrm >> 6) & 3;
4268 if (mod == 3) {
4269 rm = (modrm & 7) | REX_B(s);
4270 do_xchg_reg:
4271 gen_op_mov_TN_reg(ot, 0, reg);
4272 gen_op_mov_TN_reg(ot, 1, rm);
4273 gen_op_mov_reg_T0(ot, rm);
4274 gen_op_mov_reg_T1(ot, reg);
4275 } else {
4276 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4277 gen_op_mov_TN_reg(ot, 0, reg);
4278 /* for xchg, lock is implicit */
4279 if (!(prefixes & PREFIX_LOCK))
4280 gen_op_lock();
4281 gen_op_ld_T1_A0(ot + s->mem_index);
4282 gen_op_st_T0_A0(ot + s->mem_index);
4283 if (!(prefixes & PREFIX_LOCK))
4284 gen_op_unlock();
4285 gen_op_mov_reg_T1(ot, reg);
4286 }
4287 break;
4288 case 0xc4: /* les Gv */
4289 if (CODE64(s))
4290 goto illegal_op;
4291 op = R_ES;
4292 goto do_lxx;
4293 case 0xc5: /* lds Gv */
4294 if (CODE64(s))
4295 goto illegal_op;
4296 op = R_DS;
4297 goto do_lxx;
4298 case 0x1b2: /* lss Gv */
4299 op = R_SS;
4300 goto do_lxx;
4301 case 0x1b4: /* lfs Gv */
4302 op = R_FS;
4303 goto do_lxx;
4304 case 0x1b5: /* lgs Gv */
4305 op = R_GS;
4306 do_lxx:
4307 ot = dflag ? OT_LONG : OT_WORD;
4308 modrm = ldub_code(s->pc++);
4309 reg = ((modrm >> 3) & 7) | rex_r;
4310 mod = (modrm >> 6) & 3;
4311 if (mod == 3)
4312 goto illegal_op;
4313 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4314 gen_op_ld_T1_A0(ot + s->mem_index);
4315 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4316 /* load the segment first to handle exceptions properly */
4317 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4318 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4319 /* then put the data */
4320 gen_op_mov_reg_T1(ot, reg);
4321 if (s->is_jmp) {
4322 gen_jmp_im(s->pc - s->cs_base);
4323 gen_eob(s);
4324 }
4325 break;
4326
4327 /************************/
4328 /* shifts */
4329 case 0xc0:
4330 case 0xc1:
4331 /* shift Ev,Ib */
4332 shift = 2;
4333 grp2:
4334 {
4335 if ((b & 1) == 0)
4336 ot = OT_BYTE;
4337 else
4338 ot = dflag + OT_WORD;
4339
4340 modrm = ldub_code(s->pc++);
4341 mod = (modrm >> 6) & 3;
4342 op = (modrm >> 3) & 7;
4343
4344 if (mod != 3) {
4345 if (shift == 2) {
4346 s->rip_offset = 1;
4347 }
4348 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4349 opreg = OR_TMP0;
4350 } else {
4351 opreg = (modrm & 7) | REX_B(s);
4352 }
4353
4354 /* simpler op */
4355 if (shift == 0) {
4356 gen_shift(s, op, ot, opreg, OR_ECX);
4357 } else {
4358 if (shift == 2) {
4359 shift = ldub_code(s->pc++);
4360 }
4361 gen_shifti(s, op, ot, opreg, shift);
4362 }
4363 }
4364 break;
4365 case 0xd0:
4366 case 0xd1:
4367 /* shift Ev,1 */
4368 shift = 1;
4369 goto grp2;
4370 case 0xd2:
4371 case 0xd3:
4372 /* shift Ev,cl */
4373 shift = 0;
4374 goto grp2;
4375
4376 case 0x1a4: /* shld imm */
4377 op = 0;
4378 shift = 1;
4379 goto do_shiftd;
4380 case 0x1a5: /* shld cl */
4381 op = 0;
4382 shift = 0;
4383 goto do_shiftd;
4384 case 0x1ac: /* shrd imm */
4385 op = 1;
4386 shift = 1;
4387 goto do_shiftd;
4388 case 0x1ad: /* shrd cl */
4389 op = 1;
4390 shift = 0;
4391 do_shiftd:
4392 ot = dflag + OT_WORD;
4393 modrm = ldub_code(s->pc++);
4394 mod = (modrm >> 6) & 3;
4395 rm = (modrm & 7) | REX_B(s);
4396 reg = ((modrm >> 3) & 7) | rex_r;
4397
4398 if (mod != 3) {
4399 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4400 gen_op_ld_T0_A0(ot + s->mem_index);
4401 } else {
4402 gen_op_mov_TN_reg(ot, 0, rm);
4403 }
4404 gen_op_mov_TN_reg(ot, 1, reg);
4405
4406 if (shift) {
4407 val = ldub_code(s->pc++);
4408 if (ot == OT_QUAD)
4409 val &= 0x3f;
4410 else
4411 val &= 0x1f;
4412 if (val) {
4413 if (mod == 3)
4414 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4415 else
4416 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4417 if (op == 0 && ot != OT_WORD)
4418 s->cc_op = CC_OP_SHLB + ot;
4419 else
4420 s->cc_op = CC_OP_SARB + ot;
4421 }
4422 } else {
4423 if (s->cc_op != CC_OP_DYNAMIC)
4424 gen_op_set_cc_op(s->cc_op);
4425 if (mod == 3)
4426 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4427 else
4428 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4429 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4430 }
4431 if (mod == 3) {
4432 gen_op_mov_reg_T0(ot, rm);
4433 }
4434 break;
4435
4436 /************************/
4437 /* floats */
4438 case 0xd8 ... 0xdf:
4439 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4440 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4441 /* XXX: what to do if illegal op ? */
4442 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4443 break;
4444 }
4445 modrm = ldub_code(s->pc++);
4446 mod = (modrm >> 6) & 3;
4447 rm = modrm & 7;
4448 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4449 if (mod != 3) {
4450 /* memory op */
4451 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4452 switch(op) {
4453 case 0x00 ... 0x07: /* fxxxs */
4454 case 0x10 ... 0x17: /* fixxxl */
4455 case 0x20 ... 0x27: /* fxxxl */
4456 case 0x30 ... 0x37: /* fixxx */
4457 {
4458 int op1;
4459 op1 = op & 7;
4460
4461 switch(op >> 4) {
4462 case 0:
4463 gen_op_flds_FT0_A0();
4464 break;
4465 case 1:
4466 gen_op_fildl_FT0_A0();
4467 break;
4468 case 2:
4469 gen_op_fldl_FT0_A0();
4470 break;
4471 case 3:
4472 default:
4473 gen_op_fild_FT0_A0();
4474 break;
4475 }
4476
4477 gen_op_fp_arith_ST0_FT0[op1]();
4478 if (op1 == 3) {
4479 /* fcomp needs pop */
4480 gen_op_fpop();
4481 }
4482 }
4483 break;
4484 case 0x08: /* flds */
4485 case 0x0a: /* fsts */
4486 case 0x0b: /* fstps */
4487 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4488 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4489 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4490 switch(op & 7) {
4491 case 0:
4492 switch(op >> 4) {
4493 case 0:
4494 gen_op_flds_ST0_A0();
4495 break;
4496 case 1:
4497 gen_op_fildl_ST0_A0();
4498 break;
4499 case 2:
4500 gen_op_fldl_ST0_A0();
4501 break;
4502 case 3:
4503 default:
4504 gen_op_fild_ST0_A0();
4505 break;
4506 }
4507 break;
4508 case 1:
4509 switch(op >> 4) {
4510 case 1:
4511 gen_op_fisttl_ST0_A0();
4512 break;
4513 case 2:
4514 gen_op_fisttll_ST0_A0();
4515 break;
4516 case 3:
4517 default:
4518 gen_op_fistt_ST0_A0();
4519 }
4520 gen_op_fpop();
4521 break;
4522 default:
4523 switch(op >> 4) {
4524 case 0:
4525 gen_op_fsts_ST0_A0();
4526 break;
4527 case 1:
4528 gen_op_fistl_ST0_A0();
4529 break;
4530 case 2:
4531 gen_op_fstl_ST0_A0();
4532 break;
4533 case 3:
4534 default:
4535 gen_op_fist_ST0_A0();
4536 break;
4537 }
4538 if ((op & 7) == 3)
4539 gen_op_fpop();
4540 break;
4541 }
4542 break;
4543 case 0x0c: /* fldenv mem */
4544 gen_op_fldenv_A0(s->dflag);
4545 break;
4546 case 0x0d: /* fldcw mem */
4547 gen_op_fldcw_A0();
4548 break;
4549 case 0x0e: /* fnstenv mem */
4550 gen_op_fnstenv_A0(s->dflag);
4551 break;
4552 case 0x0f: /* fnstcw mem */
4553 gen_op_fnstcw_A0();
4554 break;
4555 case 0x1d: /* fldt mem */
4556 gen_op_fldt_ST0_A0();
4557 break;
4558 case 0x1f: /* fstpt mem */
4559 gen_op_fstt_ST0_A0();
4560 gen_op_fpop();
4561 break;
4562 case 0x2c: /* frstor mem */
4563 gen_op_frstor_A0(s->dflag);
4564 break;
4565 case 0x2e: /* fnsave mem */
4566 gen_op_fnsave_A0(s->dflag);
4567 break;
4568 case 0x2f: /* fnstsw mem */
4569 gen_op_fnstsw_A0();
4570 break;
4571 case 0x3c: /* fbld */
4572 gen_op_fbld_ST0_A0();
4573 break;
4574 case 0x3e: /* fbstp */
4575 gen_op_fbst_ST0_A0();
4576 gen_op_fpop();
4577 break;
4578 case 0x3d: /* fildll */
4579 gen_op_fildll_ST0_A0();
4580 break;
4581 case 0x3f: /* fistpll */
4582 gen_op_fistll_ST0_A0();
4583 gen_op_fpop();
4584 break;
4585 default:
4586 goto illegal_op;
4587 }
4588 } else {
4589 /* register float ops */
4590 opreg = rm;
4591
4592 switch(op) {
4593 case 0x08: /* fld sti */
4594 gen_op_fpush();
4595 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4596 break;
4597 case 0x09: /* fxchg sti */
4598 case 0x29: /* fxchg4 sti, undocumented op */
4599 case 0x39: /* fxchg7 sti, undocumented op */
4600 gen_op_fxchg_ST0_STN(opreg);
4601 break;
4602 case 0x0a: /* grp d9/2 */
4603 switch(rm) {
4604 case 0: /* fnop */
4605 /* check exceptions (FreeBSD FPU probe) */
4606 if (s->cc_op != CC_OP_DYNAMIC)
4607 gen_op_set_cc_op(s->cc_op);
4608 gen_jmp_im(pc_start - s->cs_base);
4609 gen_op_fwait();
4610 break;
4611 default:
4612 goto illegal_op;
4613 }
4614 break;
4615 case 0x0c: /* grp d9/4 */
4616 switch(rm) {
4617 case 0: /* fchs */
4618 gen_op_fchs_ST0();
4619 break;
4620 case 1: /* fabs */
4621 gen_op_fabs_ST0();
4622 break;
4623 case 4: /* ftst */
4624 gen_op_fldz_FT0();
4625 gen_op_fcom_ST0_FT0();
4626 break;
4627 case 5: /* fxam */
4628 gen_op_fxam_ST0();
4629 break;
4630 default:
4631 goto illegal_op;
4632 }
4633 break;
4634 case 0x0d: /* grp d9/5 */
4635 {
4636 switch(rm) {
4637 case 0:
4638 gen_op_fpush();
4639 gen_op_fld1_ST0();
4640 break;
4641 case 1:
4642 gen_op_fpush();
4643 gen_op_fldl2t_ST0();
4644 break;
4645 case 2:
4646 gen_op_fpush();
4647 gen_op_fldl2e_ST0();
4648 break;
4649 case 3:
4650 gen_op_fpush();
4651 gen_op_fldpi_ST0();
4652 break;
4653 case 4:
4654 gen_op_fpush();
4655 gen_op_fldlg2_ST0();
4656 break;
4657 case 5:
4658 gen_op_fpush();
4659 gen_op_fldln2_ST0();
4660 break;
4661 case 6:
4662 gen_op_fpush();
4663 gen_op_fldz_ST0();
4664 break;
4665 default:
4666 goto illegal_op;
4667 }
4668 }
4669 break;
4670 case 0x0e: /* grp d9/6 */
4671 switch(rm) {
4672 case 0: /* f2xm1 */
4673 gen_op_f2xm1();
4674 break;
4675 case 1: /* fyl2x */
4676 gen_op_fyl2x();
4677 break;
4678 case 2: /* fptan */
4679 gen_op_fptan();
4680 break;
4681 case 3: /* fpatan */
4682 gen_op_fpatan();
4683 break;
4684 case 4: /* fxtract */
4685 gen_op_fxtract();
4686 break;
4687 case 5: /* fprem1 */
4688 gen_op_fprem1();
4689 break;
4690 case 6: /* fdecstp */
4691 gen_op_fdecstp();
4692 break;
4693 default:
4694 case 7: /* fincstp */
4695 gen_op_fincstp();
4696 break;
4697 }
4698 break;
4699 case 0x0f: /* grp d9/7 */
4700 switch(rm) {
4701 case 0: /* fprem */
4702 gen_op_fprem();
4703 break;
4704 case 1: /* fyl2xp1 */
4705 gen_op_fyl2xp1();
4706 break;
4707 case 2: /* fsqrt */
4708 gen_op_fsqrt();
4709 break;
4710 case 3: /* fsincos */
4711 gen_op_fsincos();
4712 break;
4713 case 5: /* fscale */
4714 gen_op_fscale();
4715 break;
4716 case 4: /* frndint */
4717 gen_op_frndint();
4718 break;
4719 case 6: /* fsin */
4720 gen_op_fsin();
4721 break;
4722 default:
4723 case 7: /* fcos */
4724 gen_op_fcos();
4725 break;
4726 }
4727 break;
4728 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4729 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4730 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4731 {
4732 int op1;
4733
4734 op1 = op & 7;
4735 if (op >= 0x20) {
4736 gen_op_fp_arith_STN_ST0[op1](opreg);
4737 if (op >= 0x30)
4738 gen_op_fpop();
4739 } else {
4740 gen_op_fmov_FT0_STN(opreg);
4741 gen_op_fp_arith_ST0_FT0[op1]();
4742 }
4743 }
4744 break;
4745 case 0x02: /* fcom */
4746 case 0x22: /* fcom2, undocumented op */
4747 gen_op_fmov_FT0_STN(opreg);
4748 gen_op_fcom_ST0_FT0();
4749 break;
4750 case 0x03: /* fcomp */
4751 case 0x23: /* fcomp3, undocumented op */
4752 case 0x32: /* fcomp5, undocumented op */
4753 gen_op_fmov_FT0_STN(opreg);
4754 gen_op_fcom_ST0_FT0();
4755 gen_op_fpop();
4756 break;
4757 case 0x15: /* da/5 */
4758 switch(rm) {
4759 case 1: /* fucompp */
4760 gen_op_fmov_FT0_STN(1);
4761 gen_op_fucom_ST0_FT0();
4762 gen_op_fpop();
4763 gen_op_fpop();
4764 break;
4765 default:
4766 goto illegal_op;
4767 }
4768 break;
4769 case 0x1c:
4770 switch(rm) {
4771 case 0: /* feni (287 only, just do nop here) */
4772 break;
4773 case 1: /* fdisi (287 only, just do nop here) */
4774 break;
4775 case 2: /* fclex */
4776 gen_op_fclex();
4777 break;
4778 case 3: /* fninit */
4779 gen_op_fninit();
4780 break;
4781 case 4: /* fsetpm (287 only, just do nop here) */
4782 break;
4783 default:
4784 goto illegal_op;
4785 }
4786 break;
4787 case 0x1d: /* fucomi */
4788 if (s->cc_op != CC_OP_DYNAMIC)
4789 gen_op_set_cc_op(s->cc_op);
4790 gen_op_fmov_FT0_STN(opreg);
4791 gen_op_fucomi_ST0_FT0();
4792 s->cc_op = CC_OP_EFLAGS;
4793 break;
4794 case 0x1e: /* fcomi */
4795 if (s->cc_op != CC_OP_DYNAMIC)
4796 gen_op_set_cc_op(s->cc_op);
4797 gen_op_fmov_FT0_STN(opreg);
4798 gen_op_fcomi_ST0_FT0();
4799 s->cc_op = CC_OP_EFLAGS;
4800 break;
4801 case 0x28: /* ffree sti */
4802 gen_op_ffree_STN(opreg);
4803 break;
4804 case 0x2a: /* fst sti */
4805 gen_op_fmov_STN_ST0(opreg);
4806 break;
4807 case 0x2b: /* fstp sti */
4808 case 0x0b: /* fstp1 sti, undocumented op */
4809 case 0x3a: /* fstp8 sti, undocumented op */
4810 case 0x3b: /* fstp9 sti, undocumented op */
4811 gen_op_fmov_STN_ST0(opreg);
4812 gen_op_fpop();
4813 break;
4814 case 0x2c: /* fucom st(i) */
4815 gen_op_fmov_FT0_STN(opreg);
4816 gen_op_fucom_ST0_FT0();
4817 break;
4818 case 0x2d: /* fucomp st(i) */
4819 gen_op_fmov_FT0_STN(opreg);
4820 gen_op_fucom_ST0_FT0();
4821 gen_op_fpop();
4822 break;
4823 case 0x33: /* de/3 */
4824 switch(rm) {
4825 case 1: /* fcompp */
4826 gen_op_fmov_FT0_STN(1);
4827 gen_op_fcom_ST0_FT0();
4828 gen_op_fpop();
4829 gen_op_fpop();
4830 break;
4831 default:
4832 goto illegal_op;
4833 }
4834 break;
4835 case 0x38: /* ffreep sti, undocumented op */
4836 gen_op_ffree_STN(opreg);
4837 gen_op_fpop();
4838 break;
4839 case 0x3c: /* df/4 */
4840 switch(rm) {
4841 case 0:
4842 gen_op_fnstsw_EAX();
4843 break;
4844 default:
4845 goto illegal_op;
4846 }
4847 break;
4848 case 0x3d: /* fucomip */
4849 if (s->cc_op != CC_OP_DYNAMIC)
4850 gen_op_set_cc_op(s->cc_op);
4851 gen_op_fmov_FT0_STN(opreg);
4852 gen_op_fucomi_ST0_FT0();
4853 gen_op_fpop();
4854 s->cc_op = CC_OP_EFLAGS;
4855 break;
4856 case 0x3e: /* fcomip */
4857 if (s->cc_op != CC_OP_DYNAMIC)
4858 gen_op_set_cc_op(s->cc_op);
4859 gen_op_fmov_FT0_STN(opreg);
4860 gen_op_fcomi_ST0_FT0();
4861 gen_op_fpop();
4862 s->cc_op = CC_OP_EFLAGS;
4863 break;
4864 case 0x10 ... 0x13: /* fcmovxx */
4865 case 0x18 ... 0x1b:
4866 {
4867 int op1;
4868 const static uint8_t fcmov_cc[8] = {
4869 (JCC_B << 1),
4870 (JCC_Z << 1),
4871 (JCC_BE << 1),
4872 (JCC_P << 1),
4873 };
4874 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4875 gen_setcc(s, op1);
4876 gen_op_fcmov_ST0_STN_T0(opreg);
4877 }
4878 break;
4879 default:
4880 goto illegal_op;
4881 }
4882 }
4883 break;
4884 /************************/
4885 /* string ops */
4886
4887 case 0xa4: /* movsS */
4888 case 0xa5:
4889 if ((b & 1) == 0)
4890 ot = OT_BYTE;
4891 else
4892 ot = dflag + OT_WORD;
4893
4894 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4895 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4896 } else {
4897 gen_movs(s, ot);
4898 }
4899 break;
4900
4901 case 0xaa: /* stosS */
4902 case 0xab:
4903 if ((b & 1) == 0)
4904 ot = OT_BYTE;
4905 else
4906 ot = dflag + OT_WORD;
4907
4908 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4909 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4910 } else {
4911 gen_stos(s, ot);
4912 }
4913 break;
4914 case 0xac: /* lodsS */
4915 case 0xad:
4916 if ((b & 1) == 0)
4917 ot = OT_BYTE;
4918 else
4919 ot = dflag + OT_WORD;
4920 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4921 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4922 } else {
4923 gen_lods(s, ot);
4924 }
4925 break;
4926 case 0xae: /* scasS */
4927 case 0xaf:
4928 if ((b & 1) == 0)
4929 ot = OT_BYTE;
4930 else
4931 ot = dflag + OT_WORD;
4932 if (prefixes & PREFIX_REPNZ) {
4933 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4934 } else if (prefixes & PREFIX_REPZ) {
4935 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4936 } else {
4937 gen_scas(s, ot);
4938 s->cc_op = CC_OP_SUBB + ot;
4939 }
4940 break;
4941
4942 case 0xa6: /* cmpsS */
4943 case 0xa7:
4944 if ((b & 1) == 0)
4945 ot = OT_BYTE;
4946 else
4947 ot = dflag + OT_WORD;
4948 if (prefixes & PREFIX_REPNZ) {
4949 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4950 } else if (prefixes & PREFIX_REPZ) {
4951 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4952 } else {
4953 gen_cmps(s, ot);
4954 s->cc_op = CC_OP_SUBB + ot;
4955 }
4956 break;
4957 case 0x6c: /* insS */
4958 case 0x6d:
4959 if ((b & 1) == 0)
4960 ot = OT_BYTE;
4961 else
4962 ot = dflag ? OT_LONG : OT_WORD;
4963 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4964 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4965 gen_op_andl_T0_ffff();
4966 if (gen_svm_check_io(s, pc_start,
4967 SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
4968 svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
4969 break;
4970 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4971 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4972 } else {
4973 gen_ins(s, ot);
4974 }
4975 break;
4976 case 0x6e: /* outsS */
4977 case 0x6f:
4978 if ((b & 1) == 0)
4979 ot = OT_BYTE;
4980 else
4981 ot = dflag ? OT_LONG : OT_WORD;
4982 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4983 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4984 gen_op_andl_T0_ffff();
4985 if (gen_svm_check_io(s, pc_start,
4986 (1 << (4+ot)) | svm_is_rep(prefixes) |
4987 4 | (1 << (7+s->aflag))))
4988 break;
4989 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4990 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4991 } else {
4992 gen_outs(s, ot);
4993 }
4994 break;
4995
4996 /************************/
4997 /* port I/O */
4998
4999 case 0xe4:
5000 case 0xe5:
5001 if ((b & 1) == 0)
5002 ot = OT_BYTE;
5003 else
5004 ot = dflag ? OT_LONG : OT_WORD;
5005 val = ldub_code(s->pc++);
5006 gen_op_movl_T0_im(val);
5007 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5008 if (gen_svm_check_io(s, pc_start,
5009 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5010 (1 << (4+ot))))
5011 break;
5012 gen_op_in[ot]();
5013 gen_op_mov_reg_T1(ot, R_EAX);
5014 break;
5015 case 0xe6:
5016 case 0xe7:
5017 if ((b & 1) == 0)
5018 ot = OT_BYTE;
5019 else
5020 ot = dflag ? OT_LONG : OT_WORD;
5021 val = ldub_code(s->pc++);
5022 gen_op_movl_T0_im(val);
5023 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5024 if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
5025 (1 << (4+ot))))
5026 break;
5027 gen_op_mov_TN_reg(ot, 1, R_EAX);
5028 gen_op_out[ot]();
5029 break;
5030 case 0xec:
5031 case 0xed:
5032 if ((b & 1) == 0)
5033 ot = OT_BYTE;
5034 else
5035 ot = dflag ? OT_LONG : OT_WORD;
5036 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5037 gen_op_andl_T0_ffff();
5038 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5039 if (gen_svm_check_io(s, pc_start,
5040 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5041 (1 << (4+ot))))
5042 break;
5043 gen_op_in[ot]();
5044 gen_op_mov_reg_T1(ot, R_EAX);
5045 break;
5046 case 0xee:
5047 case 0xef:
5048 if ((b & 1) == 0)
5049 ot = OT_BYTE;
5050 else
5051 ot = dflag ? OT_LONG : OT_WORD;
5052 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5053 gen_op_andl_T0_ffff();
5054 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5055 if (gen_svm_check_io(s, pc_start,
5056 svm_is_rep(prefixes) | (1 << (4+ot))))
5057 break;
5058 gen_op_mov_TN_reg(ot, 1, R_EAX);
5059 gen_op_out[ot]();
5060 break;
5061
5062 /************************/
5063 /* control */
5064 case 0xc2: /* ret im */
5065 val = ldsw_code(s->pc);
5066 s->pc += 2;
5067 gen_pop_T0(s);
5068 if (CODE64(s) && s->dflag)
5069 s->dflag = 2;
5070 gen_stack_update(s, val + (2 << s->dflag));
5071 if (s->dflag == 0)
5072 gen_op_andl_T0_ffff();
5073 gen_op_jmp_T0();
5074 gen_eob(s);
5075 break;
5076 case 0xc3: /* ret */
5077 gen_pop_T0(s);
5078 gen_pop_update(s);
5079 if (s->dflag == 0)
5080 gen_op_andl_T0_ffff();
5081 gen_op_jmp_T0();
5082 gen_eob(s);
5083 break;
5084 case 0xca: /* lret im */
5085 val = ldsw_code(s->pc);
5086 s->pc += 2;
5087 do_lret:
5088 if (s->pe && !s->vm86) {
5089 if (s->cc_op != CC_OP_DYNAMIC)
5090 gen_op_set_cc_op(s->cc_op);
5091 gen_jmp_im(pc_start - s->cs_base);
5092 gen_op_lret_protected(s->dflag, val);
5093 } else {
5094 gen_stack_A0(s);
5095 /* pop offset */
5096 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5097 if (s->dflag == 0)
5098 gen_op_andl_T0_ffff();
5099 /* NOTE: keeping EIP updated is not a problem in case of
5100 exception */
5101 gen_op_jmp_T0();
5102 /* pop selector */
5103 gen_op_addl_A0_im(2 << s->dflag);
5104 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5105 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5106 /* add stack offset */
5107 gen_stack_update(s, val + (4 << s->dflag));
5108 }
5109 gen_eob(s);
5110 break;
5111 case 0xcb: /* lret */
5112 val = 0;
5113 goto do_lret;
5114 case 0xcf: /* iret */
5115 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5116 break;
5117 if (!s->pe) {
5118 /* real mode */
5119 gen_op_iret_real(s->dflag);
5120 s->cc_op = CC_OP_EFLAGS;
5121 } else if (s->vm86) {
5122 if (s->iopl != 3) {
5123 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5124 } else {
5125 gen_op_iret_real(s->dflag);
5126 s->cc_op = CC_OP_EFLAGS;
5127 }
5128 } else {
5129 if (s->cc_op != CC_OP_DYNAMIC)
5130 gen_op_set_cc_op(s->cc_op);
5131 gen_jmp_im(pc_start - s->cs_base);
5132 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5133 s->cc_op = CC_OP_EFLAGS;
5134 }
5135 gen_eob(s);
5136 break;
5137 case 0xe8: /* call im */
5138 {
5139 if (dflag)
5140 tval = (int32_t)insn_get(s, OT_LONG);
5141 else
5142 tval = (int16_t)insn_get(s, OT_WORD);
5143 next_eip = s->pc - s->cs_base;
5144 tval += next_eip;
5145 if (s->dflag == 0)
5146 tval &= 0xffff;
5147 gen_movtl_T0_im(next_eip);
5148 gen_push_T0(s);
5149 gen_jmp(s, tval);
5150 }
5151 break;
5152 case 0x9a: /* lcall im */
5153 {
5154 unsigned int selector, offset;
5155
5156 if (CODE64(s))
5157 goto illegal_op;
5158 ot = dflag ? OT_LONG : OT_WORD;
5159 offset = insn_get(s, ot);
5160 selector = insn_get(s, OT_WORD);
5161
5162 gen_op_movl_T0_im(selector);
5163 gen_op_movl_T1_imu(offset);
5164 }
5165 goto do_lcall;
5166 case 0xe9: /* jmp im */
5167 if (dflag)
5168 tval = (int32_t)insn_get(s, OT_LONG);
5169 else
5170 tval = (int16_t)insn_get(s, OT_WORD);
5171 tval += s->pc - s->cs_base;
5172 if (s->dflag == 0)
5173 tval &= 0xffff;
5174 gen_jmp(s, tval);
5175 break;
5176 case 0xea: /* ljmp im */
5177 {
5178 unsigned int selector, offset;
5179
5180 if (CODE64(s))
5181 goto illegal_op;
5182 ot = dflag ? OT_LONG : OT_WORD;
5183 offset = insn_get(s, ot);
5184 selector = insn_get(s, OT_WORD);
5185
5186 gen_op_movl_T0_im(selector);
5187 gen_op_movl_T1_imu(offset);
5188 }
5189 goto do_ljmp;
5190 case 0xeb: /* jmp Jb */
5191 tval = (int8_t)insn_get(s, OT_BYTE);
5192 tval += s->pc - s->cs_base;
5193 if (s->dflag == 0)
5194 tval &= 0xffff;
5195 gen_jmp(s, tval);
5196 break;
5197 case 0x70 ... 0x7f: /* jcc Jb */
5198 tval = (int8_t)insn_get(s, OT_BYTE);
5199 goto do_jcc;
5200 case 0x180 ... 0x18f: /* jcc Jv */
5201 if (dflag) {
5202 tval = (int32_t)insn_get(s, OT_LONG);
5203 } else {
5204 tval = (int16_t)insn_get(s, OT_WORD);
5205 }
5206 do_jcc:
5207 next_eip = s->pc - s->cs_base;
5208 tval += next_eip;
5209 if (s->dflag == 0)
5210 tval &= 0xffff;
5211 gen_jcc(s, b, tval, next_eip);
5212 break;
5213
5214 case 0x190 ... 0x19f: /* setcc Gv */
5215 modrm = ldub_code(s->pc++);
5216 gen_setcc(s, b);
5217 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5218 break;
5219 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5220 ot = dflag + OT_WORD;
5221 modrm = ldub_code(s->pc++);
5222 reg = ((modrm >> 3) & 7) | rex_r;
5223 mod = (modrm >> 6) & 3;
5224 gen_setcc(s, b);
5225 if (mod != 3) {
5226 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5227 gen_op_ld_T1_A0(ot + s->mem_index);
5228 } else {
5229 rm = (modrm & 7) | REX_B(s);
5230 gen_op_mov_TN_reg(ot, 1, rm);
5231 }
5232 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5233 break;
5234
5235 /************************/
5236 /* flags */
5237 case 0x9c: /* pushf */
5238 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5239 break;
5240 if (s->vm86 && s->iopl != 3) {
5241 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5242 } else {
5243 if (s->cc_op != CC_OP_DYNAMIC)
5244 gen_op_set_cc_op(s->cc_op);
5245 gen_op_movl_T0_eflags();
5246 gen_push_T0(s);
5247 }
5248 break;
5249 case 0x9d: /* popf */
5250 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5251 break;
5252 if (s->vm86 && s->iopl != 3) {
5253 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5254 } else {
5255 gen_pop_T0(s);
5256 if (s->cpl == 0) {
5257 if (s->dflag) {
5258 gen_op_movl_eflags_T0_cpl0();
5259 } else {
5260 gen_op_movw_eflags_T0_cpl0();
5261 }
5262 } else {
5263 if (s->cpl <= s->iopl) {
5264 if (s->dflag) {
5265 gen_op_movl_eflags_T0_io();
5266 } else {
5267 gen_op_movw_eflags_T0_io();
5268 }
5269 } else {
5270 if (s->dflag) {
5271 gen_op_movl_eflags_T0();
5272 } else {
5273 gen_op_movw_eflags_T0();
5274 }
5275 }
5276 }
5277 gen_pop_update(s);
5278 s->cc_op = CC_OP_EFLAGS;
5279 /* abort translation because TF flag may change */
5280 gen_jmp_im(s->pc - s->cs_base);
5281 gen_eob(s);
5282 }
5283 break;
5284 case 0x9e: /* sahf */
5285 if (CODE64(s))
5286 goto illegal_op;
5287 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5288 if (s->cc_op != CC_OP_DYNAMIC)
5289 gen_op_set_cc_op(s->cc_op);
5290 gen_op_movb_eflags_T0();
5291 s->cc_op = CC_OP_EFLAGS;
5292 break;
5293 case 0x9f: /* lahf */
5294 if (CODE64(s))
5295 goto illegal_op;
5296 if (s->cc_op != CC_OP_DYNAMIC)
5297 gen_op_set_cc_op(s->cc_op);
5298 gen_op_movl_T0_eflags();
5299 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5300 break;
5301 case 0xf5: /* cmc */
5302 if (s->cc_op != CC_OP_DYNAMIC)
5303 gen_op_set_cc_op(s->cc_op);
5304 gen_op_cmc();
5305 s->cc_op = CC_OP_EFLAGS;
5306 break;
5307 case 0xf8: /* clc */
5308 if (s->cc_op != CC_OP_DYNAMIC)
5309 gen_op_set_cc_op(s->cc_op);
5310 gen_op_clc();
5311 s->cc_op = CC_OP_EFLAGS;
5312 break;
5313 case 0xf9: /* stc */
5314 if (s->cc_op != CC_OP_DYNAMIC)
5315 gen_op_set_cc_op(s->cc_op);
5316 gen_op_stc();
5317 s->cc_op = CC_OP_EFLAGS;
5318 break;
5319 case 0xfc: /* cld */
5320 gen_op_cld();
5321 break;
5322 case 0xfd: /* std */
5323 gen_op_std();
5324 break;
5325
5326 /************************/
5327 /* bit operations */
5328 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5329 ot = dflag + OT_WORD;
5330 modrm = ldub_code(s->pc++);
5331 op = (modrm >> 3) & 7;
5332 mod = (modrm >> 6) & 3;
5333 rm = (modrm & 7) | REX_B(s);
5334 if (mod != 3) {
5335 s->rip_offset = 1;
5336 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5337 gen_op_ld_T0_A0(ot + s->mem_index);
5338 } else {
5339 gen_op_mov_TN_reg(ot, 0, rm);
5340 }
5341 /* load shift */
5342 val = ldub_code(s->pc++);
5343 gen_op_movl_T1_im(val);
5344 if (op < 4)
5345 goto illegal_op;
5346 op -= 4;
5347 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5348 s->cc_op = CC_OP_SARB + ot;
5349 if (op != 0) {
5350 if (mod != 3)
5351 gen_op_st_T0_A0(ot + s->mem_index);
5352 else
5353 gen_op_mov_reg_T0(ot, rm);
5354 gen_op_update_bt_cc();
5355 }
5356 break;
5357 case 0x1a3: /* bt Gv, Ev */
5358 op = 0;
5359 goto do_btx;
5360 case 0x1ab: /* bts */
5361 op = 1;
5362 goto do_btx;
5363 case 0x1b3: /* btr */
5364 op = 2;
5365 goto do_btx;
5366 case 0x1bb: /* btc */
5367 op = 3;
5368 do_btx:
5369 ot = dflag + OT_WORD;
5370 modrm = ldub_code(s->pc++);
5371 reg = ((modrm >> 3) & 7) | rex_r;
5372 mod = (modrm >> 6) & 3;
5373 rm = (modrm & 7) | REX_B(s);
5374 gen_op_mov_TN_reg(OT_LONG, 1, reg);
5375 if (mod != 3) {
5376 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5377 /* specific case: we need to add a displacement */
5378 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5379 gen_op_ld_T0_A0(ot + s->mem_index);
5380 } else {
5381 gen_op_mov_TN_reg(ot, 0, rm);
5382 }
5383 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5384 s->cc_op = CC_OP_SARB + ot;
5385 if (op != 0) {
5386 if (mod != 3)
5387 gen_op_st_T0_A0(ot + s->mem_index);
5388 else
5389 gen_op_mov_reg_T0(ot, rm);
5390 gen_op_update_bt_cc();
5391 }
5392 break;
5393 case 0x1bc: /* bsf */
5394 case 0x1bd: /* bsr */
5395 ot = dflag + OT_WORD;
5396 modrm = ldub_code(s->pc++);
5397 reg = ((modrm >> 3) & 7) | rex_r;
5398 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5399 /* NOTE: in order to handle the 0 case, we must load the
5400 result. It could be optimized with a generated jump */
5401 gen_op_mov_TN_reg(ot, 1, reg);
5402 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5403 gen_op_mov_reg_T1(ot, reg);
5404 s->cc_op = CC_OP_LOGICB + ot;
5405 break;
5406 /************************/
5407 /* bcd */
5408 case 0x27: /* daa */
5409 if (CODE64(s))
5410 goto illegal_op;
5411 if (s->cc_op != CC_OP_DYNAMIC)
5412 gen_op_set_cc_op(s->cc_op);
5413 gen_op_daa();
5414 s->cc_op = CC_OP_EFLAGS;
5415 break;
5416 case 0x2f: /* das */
5417 if (CODE64(s))
5418 goto illegal_op;
5419 if (s->cc_op != CC_OP_DYNAMIC)
5420 gen_op_set_cc_op(s->cc_op);
5421 gen_op_das();
5422 s->cc_op = CC_OP_EFLAGS;
5423 break;
5424 case 0x37: /* aaa */
5425 if (CODE64(s))
5426 goto illegal_op;
5427 if (s->cc_op != CC_OP_DYNAMIC)
5428 gen_op_set_cc_op(s->cc_op);
5429 gen_op_aaa();
5430 s->cc_op = CC_OP_EFLAGS;
5431 break;
5432 case 0x3f: /* aas */
5433 if (CODE64(s))
5434 goto illegal_op;
5435 if (s->cc_op != CC_OP_DYNAMIC)
5436 gen_op_set_cc_op(s->cc_op);
5437 gen_op_aas();
5438 s->cc_op = CC_OP_EFLAGS;
5439 break;
5440 case 0xd4: /* aam */
5441 if (CODE64(s))
5442 goto illegal_op;
5443 val = ldub_code(s->pc++);
5444 if (val == 0) {
5445 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5446 } else {
5447 gen_op_aam(val);
5448 s->cc_op = CC_OP_LOGICB;
5449 }
5450 break;
5451 case 0xd5: /* aad */
5452 if (CODE64(s))
5453 goto illegal_op;
5454 val = ldub_code(s->pc++);
5455 gen_op_aad(val);
5456 s->cc_op = CC_OP_LOGICB;
5457 break;
5458 /************************/
5459 /* misc */
5460 case 0x90: /* nop */
5461 /* XXX: xchg + rex handling */
5462 /* XXX: correct lock test for all insn */
5463 if (prefixes & PREFIX_LOCK)
5464 goto illegal_op;
5465 if (prefixes & PREFIX_REPZ) {
5466 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5467 }
5468 break;
5469 case 0x9b: /* fwait */
5470 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5471 (HF_MP_MASK | HF_TS_MASK)) {
5472 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5473 } else {
5474 if (s->cc_op != CC_OP_DYNAMIC)
5475 gen_op_set_cc_op(s->cc_op);
5476 gen_jmp_im(pc_start - s->cs_base);
5477 gen_op_fwait();
5478 }
5479 break;
5480 case 0xcc: /* int3 */
5481 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5482 break;
5483 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5484 break;
5485 case 0xcd: /* int N */
5486 val = ldub_code(s->pc++);
5487 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5488 break;
5489 if (s->vm86 && s->iopl != 3) {
5490 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5491 } else {
5492 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5493 }
5494 break;
5495 case 0xce: /* into */
5496 if (CODE64(s))
5497 goto illegal_op;
5498 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5499 break;
5500 if (s->cc_op != CC_OP_DYNAMIC)
5501 gen_op_set_cc_op(s->cc_op);
5502 gen_jmp_im(pc_start - s->cs_base);
5503 gen_op_into(s->pc - pc_start);
5504 break;
5505 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5506 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5507 break;
5508 #if 1
5509 gen_debug(s, pc_start - s->cs_base);
5510 #else
5511 /* start debug */
5512 tb_flush(cpu_single_env);
5513 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5514 #endif
5515 break;
5516 case 0xfa: /* cli */
5517 if (!s->vm86) {
5518 if (s->cpl <= s->iopl) {
5519 gen_op_cli();
5520 } else {
5521 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5522 }
5523 } else {
5524 if (s->iopl == 3) {
5525 gen_op_cli();
5526 } else {
5527 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5528 }
5529 }
5530 break;
5531 case 0xfb: /* sti */
5532 if (!s->vm86) {
5533 if (s->cpl <= s->iopl) {
5534 gen_sti:
5535 gen_op_sti();
5536 /* interruptions are enabled only the first insn after sti */
5537 /* If several instructions disable interrupts, only the
5538 _first_ does it */
5539 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5540 gen_op_set_inhibit_irq();
5541 /* give a chance to handle pending irqs */
5542 gen_jmp_im(s->pc - s->cs_base);
5543 gen_eob(s);
5544 } else {
5545 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5546 }
5547 } else {
5548 if (s->iopl == 3) {
5549 goto gen_sti;
5550 } else {
5551 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5552 }
5553 }
5554 break;
5555 case 0x62: /* bound */
5556 if (CODE64(s))
5557 goto illegal_op;
5558 ot = dflag ? OT_LONG : OT_WORD;
5559 modrm = ldub_code(s->pc++);
5560 reg = (modrm >> 3) & 7;
5561 mod = (modrm >> 6) & 3;
5562 if (mod == 3)
5563 goto illegal_op;
5564 gen_op_mov_TN_reg(ot, 0, reg);
5565 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5566 gen_jmp_im(pc_start - s->cs_base);
5567 if (ot == OT_WORD)
5568 gen_op_boundw();
5569 else
5570 gen_op_boundl();
5571 break;
5572 case 0x1c8 ... 0x1cf: /* bswap reg */
5573 reg = (b & 7) | REX_B(s);
5574 #ifdef TARGET_X86_64
5575 if (dflag == 2) {
5576 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
5577 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
5578 gen_op_mov_reg_T0(OT_QUAD, reg);
5579 } else
5580 {
5581 TCGv tmp0;
5582 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5583
5584 tmp0 = tcg_temp_new(TCG_TYPE_I32);
5585 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
5586 tcg_gen_bswap_i32(tmp0, tmp0);
5587 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
5588 gen_op_mov_reg_T0(OT_LONG, reg);
5589 }
5590 #else
5591 {
5592 gen_op_mov_TN_reg(OT_LONG, 0, reg);
5593 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
5594 gen_op_mov_reg_T0(OT_LONG, reg);
5595 }
5596 #endif
5597 break;
5598 case 0xd6: /* salc */
5599 if (CODE64(s))
5600 goto illegal_op;
5601 if (s->cc_op != CC_OP_DYNAMIC)
5602 gen_op_set_cc_op(s->cc_op);
5603 gen_op_salc();
5604 break;
5605 case 0xe0: /* loopnz */
5606 case 0xe1: /* loopz */
5607 if (s->cc_op != CC_OP_DYNAMIC)
5608 gen_op_set_cc_op(s->cc_op);
5609 /* FALL THRU */
5610 case 0xe2: /* loop */
5611 case 0xe3: /* jecxz */
5612 {
5613 int l1, l2;
5614
5615 tval = (int8_t)insn_get(s, OT_BYTE);
5616 next_eip = s->pc - s->cs_base;
5617 tval += next_eip;
5618 if (s->dflag == 0)
5619 tval &= 0xffff;
5620
5621 l1 = gen_new_label();
5622 l2 = gen_new_label();
5623 b &= 3;
5624 if (b == 3) {
5625 gen_op_jz_ecx[s->aflag](l1);
5626 } else {
5627 gen_op_dec_ECX[s->aflag]();
5628 if (b <= 1)
5629 gen_op_mov_T0_cc();
5630 gen_op_loop[s->aflag][b](l1);
5631 }
5632
5633 gen_jmp_im(next_eip);
5634 gen_op_jmp_label(l2);
5635 gen_set_label(l1);
5636 gen_jmp_im(tval);
5637 gen_set_label(l2);
5638 gen_eob(s);
5639 }
5640 break;
5641 case 0x130: /* wrmsr */
5642 case 0x132: /* rdmsr */
5643 if (s->cpl != 0) {
5644 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5645 } else {
5646 int retval = 0;
5647 if (b & 2) {
5648 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5649 gen_op_rdmsr();
5650 } else {
5651 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5652 gen_op_wrmsr();
5653 }
5654 if(retval)
5655 gen_eob(s);
5656 }
5657 break;
5658 case 0x131: /* rdtsc */
5659 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5660 break;
5661 gen_jmp_im(pc_start - s->cs_base);
5662 gen_op_rdtsc();
5663 break;
5664 case 0x133: /* rdpmc */
5665 gen_jmp_im(pc_start - s->cs_base);
5666 gen_op_rdpmc();
5667 break;
5668 case 0x134: /* sysenter */
5669 if (CODE64(s))
5670 goto illegal_op;
5671 if (!s->pe) {
5672 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5673 } else {
5674 if (s->cc_op != CC_OP_DYNAMIC) {
5675 gen_op_set_cc_op(s->cc_op);
5676 s->cc_op = CC_OP_DYNAMIC;
5677 }
5678 gen_jmp_im(pc_start - s->cs_base);
5679 gen_op_sysenter();
5680 gen_eob(s);
5681 }
5682 break;
5683 case 0x135: /* sysexit */
5684 if (CODE64(s))
5685 goto illegal_op;
5686 if (!s->pe) {
5687 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5688 } else {
5689 if (s->cc_op != CC_OP_DYNAMIC) {
5690 gen_op_set_cc_op(s->cc_op);
5691 s->cc_op = CC_OP_DYNAMIC;
5692 }
5693 gen_jmp_im(pc_start - s->cs_base);
5694 gen_op_sysexit();
5695 gen_eob(s);
5696 }
5697 break;
5698 #ifdef TARGET_X86_64
5699 case 0x105: /* syscall */
5700 /* XXX: is it usable in real mode ? */
5701 if (s->cc_op != CC_OP_DYNAMIC) {
5702 gen_op_set_cc_op(s->cc_op);
5703 s->cc_op = CC_OP_DYNAMIC;
5704 }
5705 gen_jmp_im(pc_start - s->cs_base);
5706 gen_op_syscall(s->pc - pc_start);
5707 gen_eob(s);
5708 break;
5709 case 0x107: /* sysret */
5710 if (!s->pe) {
5711 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5712 } else {
5713 if (s->cc_op != CC_OP_DYNAMIC) {
5714 gen_op_set_cc_op(s->cc_op);
5715 s->cc_op = CC_OP_DYNAMIC;
5716 }
5717 gen_jmp_im(pc_start - s->cs_base);
5718 gen_op_sysret(s->dflag);
5719 /* condition codes are modified only in long mode */
5720 if (s->lma)
5721 s->cc_op = CC_OP_EFLAGS;
5722 gen_eob(s);
5723 }
5724 break;
5725 #endif
5726 case 0x1a2: /* cpuid */
5727 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5728 break;
5729 gen_op_cpuid();
5730 break;
5731 case 0xf4: /* hlt */
5732 if (s->cpl != 0) {
5733 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5734 } else {
5735 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5736 break;
5737 if (s->cc_op != CC_OP_DYNAMIC)
5738 gen_op_set_cc_op(s->cc_op);
5739 gen_jmp_im(s->pc - s->cs_base);
5740 gen_op_hlt();
5741 s->is_jmp = 3;
5742 }
5743 break;
5744 case 0x100:
5745 modrm = ldub_code(s->pc++);
5746 mod = (modrm >> 6) & 3;
5747 op = (modrm >> 3) & 7;
5748 switch(op) {
5749 case 0: /* sldt */
5750 if (!s->pe || s->vm86)
5751 goto illegal_op;
5752 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5753 break;
5754 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5755 ot = OT_WORD;
5756 if (mod == 3)
5757 ot += s->dflag;
5758 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5759 break;
5760 case 2: /* lldt */
5761 if (!s->pe || s->vm86)
5762 goto illegal_op;
5763 if (s->cpl != 0) {
5764 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5765 } else {
5766 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5767 break;
5768 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5769 gen_jmp_im(pc_start - s->cs_base);
5770 gen_op_lldt_T0();
5771 }
5772 break;
5773 case 1: /* str */
5774 if (!s->pe || s->vm86)
5775 goto illegal_op;
5776 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5777 break;
5778 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5779 ot = OT_WORD;
5780 if (mod == 3)
5781 ot += s->dflag;
5782 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5783 break;
5784 case 3: /* ltr */
5785 if (!s->pe || s->vm86)
5786 goto illegal_op;
5787 if (s->cpl != 0) {
5788 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5789 } else {
5790 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5791 break;
5792 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5793 gen_jmp_im(pc_start - s->cs_base);
5794 gen_op_ltr_T0();
5795 }
5796 break;
5797 case 4: /* verr */
5798 case 5: /* verw */
5799 if (!s->pe || s->vm86)
5800 goto illegal_op;
5801 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5802 if (s->cc_op != CC_OP_DYNAMIC)
5803 gen_op_set_cc_op(s->cc_op);
5804 if (op == 4)
5805 gen_op_verr();
5806 else
5807 gen_op_verw();
5808 s->cc_op = CC_OP_EFLAGS;
5809 break;
5810 default:
5811 goto illegal_op;
5812 }
5813 break;
5814 case 0x101:
5815 modrm = ldub_code(s->pc++);
5816 mod = (modrm >> 6) & 3;
5817 op = (modrm >> 3) & 7;
5818 rm = modrm & 7;
5819 switch(op) {
5820 case 0: /* sgdt */
5821 if (mod == 3)
5822 goto illegal_op;
5823 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5824 break;
5825 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5826 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5827 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5828 gen_add_A0_im(s, 2);
5829 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5830 if (!s->dflag)
5831 gen_op_andl_T0_im(0xffffff);
5832 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5833 break;
5834 case 1:
5835 if (mod == 3) {
5836 switch (rm) {
5837 case 0: /* monitor */
5838 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5839 s->cpl != 0)
5840 goto illegal_op;
5841 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
5842 break;
5843 gen_jmp_im(pc_start - s->cs_base);
5844 #ifdef TARGET_X86_64
5845 if (s->aflag == 2) {
5846 gen_op_movq_A0_reg(R_EBX);
5847 gen_op_addq_A0_AL();
5848 } else
5849 #endif
5850 {
5851 gen_op_movl_A0_reg(R_EBX);
5852 gen_op_addl_A0_AL();
5853 if (s->aflag == 0)
5854 gen_op_andl_A0_ffff();
5855 }
5856 gen_add_A0_ds_seg(s);
5857 gen_op_monitor();
5858 break;
5859 case 1: /* mwait */
5860 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5861 s->cpl != 0)
5862 goto illegal_op;
5863 if (s->cc_op != CC_OP_DYNAMIC) {
5864 gen_op_set_cc_op(s->cc_op);
5865 s->cc_op = CC_OP_DYNAMIC;
5866 }
5867 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
5868 break;
5869 gen_jmp_im(s->pc - s->cs_base);
5870 gen_op_mwait();
5871 gen_eob(s);
5872 break;
5873 default:
5874 goto illegal_op;
5875 }
5876 } else { /* sidt */
5877 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
5878 break;
5879 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5880 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5881 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5882 gen_add_A0_im(s, 2);
5883 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5884 if (!s->dflag)
5885 gen_op_andl_T0_im(0xffffff);
5886 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5887 }
5888 break;
5889 case 2: /* lgdt */
5890 case 3: /* lidt */
5891 if (mod == 3) {
5892 switch(rm) {
5893 case 0: /* VMRUN */
5894 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
5895 break;
5896 if (s->cc_op != CC_OP_DYNAMIC)
5897 gen_op_set_cc_op(s->cc_op);
5898 gen_jmp_im(s->pc - s->cs_base);
5899 gen_op_vmrun();
5900 s->cc_op = CC_OP_EFLAGS;
5901 gen_eob(s);
5902 break;
5903 case 1: /* VMMCALL */
5904 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
5905 break;
5906 /* FIXME: cause #UD if hflags & SVM */
5907 gen_op_vmmcall();
5908 break;
5909 case 2: /* VMLOAD */
5910 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
5911 break;
5912 gen_op_vmload();
5913 break;
5914 case 3: /* VMSAVE */
5915 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
5916 break;
5917 gen_op_vmsave();
5918 break;
5919 case 4: /* STGI */
5920 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
5921 break;
5922 gen_op_stgi();
5923 break;
5924 case 5: /* CLGI */
5925 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
5926 break;
5927 gen_op_clgi();
5928 break;
5929 case 6: /* SKINIT */
5930 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
5931 break;
5932 gen_op_skinit();
5933 break;
5934 case 7: /* INVLPGA */
5935 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
5936 break;
5937 gen_op_invlpga();
5938 break;
5939 default:
5940 goto illegal_op;
5941 }
5942 } else if (s->cpl != 0) {
5943 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5944 } else {
5945 if (gen_svm_check_intercept(s, pc_start,
5946 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
5947 break;
5948 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5949 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
5950 gen_add_A0_im(s, 2);
5951 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5952 if (!s->dflag)
5953 gen_op_andl_T0_im(0xffffff);
5954 if (op == 2) {
5955 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5956 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5957 } else {
5958 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5959 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5960 }
5961 }
5962 break;
5963 case 4: /* smsw */
5964 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
5965 break;
5966 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5967 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5968 break;
5969 case 6: /* lmsw */
5970 if (s->cpl != 0) {
5971 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5972 } else {
5973 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
5974 break;
5975 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5976 gen_op_lmsw_T0();
5977 gen_jmp_im(s->pc - s->cs_base);
5978 gen_eob(s);
5979 }
5980 break;
5981 case 7: /* invlpg */
5982 if (s->cpl != 0) {
5983 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5984 } else {
5985 if (mod == 3) {
5986 #ifdef TARGET_X86_64
5987 if (CODE64(s) && rm == 0) {
5988 /* swapgs */
5989 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5990 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5991 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5992 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5993 } else
5994 #endif
5995 {
5996 goto illegal_op;
5997 }
5998 } else {
5999 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6000 break;
6001 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6002 gen_op_invlpg_A0();
6003 gen_jmp_im(s->pc - s->cs_base);
6004 gen_eob(s);
6005 }
6006 }
6007 break;
6008 default:
6009 goto illegal_op;
6010 }
6011 break;
6012 case 0x108: /* invd */
6013 case 0x109: /* wbinvd */
6014 if (s->cpl != 0) {
6015 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6016 } else {
6017 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6018 break;
6019 /* nothing to do */
6020 }
6021 break;
6022 case 0x63: /* arpl or movslS (x86_64) */
6023 #ifdef TARGET_X86_64
6024 if (CODE64(s)) {
6025 int d_ot;
6026 /* d_ot is the size of destination */
6027 d_ot = dflag + OT_WORD;
6028
6029 modrm = ldub_code(s->pc++);
6030 reg = ((modrm >> 3) & 7) | rex_r;
6031 mod = (modrm >> 6) & 3;
6032 rm = (modrm & 7) | REX_B(s);
6033
6034 if (mod == 3) {
6035 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6036 /* sign extend */
6037 if (d_ot == OT_QUAD)
6038 gen_op_movslq_T0_T0();
6039 gen_op_mov_reg_T0(d_ot, reg);
6040 } else {
6041 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6042 if (d_ot == OT_QUAD) {
6043 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6044 } else {
6045 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6046 }
6047 gen_op_mov_reg_T0(d_ot, reg);
6048 }
6049 } else
6050 #endif
6051 {
6052 if (!s->pe || s->vm86)
6053 goto illegal_op;
6054 ot = dflag ? OT_LONG : OT_WORD;
6055 modrm = ldub_code(s->pc++);
6056 reg = (modrm >> 3) & 7;
6057 mod = (modrm >> 6) & 3;
6058 rm = modrm & 7;
6059 if (mod != 3) {
6060 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6061 gen_op_ld_T0_A0(ot + s->mem_index);
6062 } else {
6063 gen_op_mov_TN_reg(ot, 0, rm);
6064 }
6065 if (s->cc_op != CC_OP_DYNAMIC)
6066 gen_op_set_cc_op(s->cc_op);
6067 gen_op_arpl();
6068 s->cc_op = CC_OP_EFLAGS;
6069 if (mod != 3) {
6070 gen_op_st_T0_A0(ot + s->mem_index);
6071 } else {
6072 gen_op_mov_reg_T0(ot, rm);
6073 }
6074 gen_op_arpl_update();
6075 }
6076 break;
6077 case 0x102: /* lar */
6078 case 0x103: /* lsl */
6079 if (!s->pe || s->vm86)
6080 goto illegal_op;
6081 ot = dflag ? OT_LONG : OT_WORD;
6082 modrm = ldub_code(s->pc++);
6083 reg = ((modrm >> 3) & 7) | rex_r;
6084 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6085 gen_op_mov_TN_reg(ot, 1, reg);
6086 if (s->cc_op != CC_OP_DYNAMIC)
6087 gen_op_set_cc_op(s->cc_op);
6088 if (b == 0x102)
6089 gen_op_lar();
6090 else
6091 gen_op_lsl();
6092 s->cc_op = CC_OP_EFLAGS;
6093 gen_op_mov_reg_T1(ot, reg);
6094 break;
6095 case 0x118:
6096 modrm = ldub_code(s->pc++);
6097 mod = (modrm >> 6) & 3;
6098 op = (modrm >> 3) & 7;
6099 switch(op) {
6100 case 0: /* prefetchnta */
6101 case 1: /* prefetchnt0 */
6102 case 2: /* prefetchnt0 */
6103 case 3: /* prefetchnt0 */
6104 if (mod == 3)
6105 goto illegal_op;
6106 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6107 /* nothing more to do */
6108 break;
6109 default: /* nop (multi byte) */
6110 gen_nop_modrm(s, modrm);
6111 break;
6112 }
6113 break;
6114 case 0x119 ... 0x11f: /* nop (multi byte) */
6115 modrm = ldub_code(s->pc++);
6116 gen_nop_modrm(s, modrm);
6117 break;
6118 case 0x120: /* mov reg, crN */
6119 case 0x122: /* mov crN, reg */
6120 if (s->cpl != 0) {
6121 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6122 } else {
6123 modrm = ldub_code(s->pc++);
6124 if ((modrm & 0xc0) != 0xc0)
6125 goto illegal_op;
6126 rm = (modrm & 7) | REX_B(s);
6127 reg = ((modrm >> 3) & 7) | rex_r;
6128 if (CODE64(s))
6129 ot = OT_QUAD;
6130 else
6131 ot = OT_LONG;
6132 switch(reg) {
6133 case 0:
6134 case 2:
6135 case 3:
6136 case 4:
6137 case 8:
6138 if (b & 2) {
6139 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6140 gen_op_mov_TN_reg(ot, 0, rm);
6141 gen_op_movl_crN_T0(reg);
6142 gen_jmp_im(s->pc - s->cs_base);
6143 gen_eob(s);
6144 } else {
6145 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6146 #if !defined(CONFIG_USER_ONLY)
6147 if (reg == 8)
6148 gen_op_movtl_T0_cr8();
6149 else
6150 #endif
6151 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6152 gen_op_mov_reg_T0(ot, rm);
6153 }
6154 break;
6155 default:
6156 goto illegal_op;
6157 }
6158 }
6159 break;
6160 case 0x121: /* mov reg, drN */
6161 case 0x123: /* mov drN, reg */
6162 if (s->cpl != 0) {
6163 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6164 } else {
6165 modrm = ldub_code(s->pc++);
6166 if ((modrm & 0xc0) != 0xc0)
6167 goto illegal_op;
6168 rm = (modrm & 7) | REX_B(s);
6169 reg = ((modrm >> 3) & 7) | rex_r;
6170 if (CODE64(s))
6171 ot = OT_QUAD;
6172 else
6173 ot = OT_LONG;
6174 /* XXX: do it dynamically with CR4.DE bit */
6175 if (reg == 4 || reg == 5 || reg >= 8)
6176 goto illegal_op;
6177 if (b & 2) {
6178 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6179 gen_op_mov_TN_reg(ot, 0, rm);
6180 gen_op_movl_drN_T0(reg);
6181 gen_jmp_im(s->pc - s->cs_base);
6182 gen_eob(s);
6183 } else {
6184 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6185 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6186 gen_op_mov_reg_T0(ot, rm);
6187 }
6188 }
6189 break;
6190 case 0x106: /* clts */
6191 if (s->cpl != 0) {
6192 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6193 } else {
6194 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6195 gen_op_clts();
6196 /* abort block because static cpu state changed */
6197 gen_jmp_im(s->pc - s->cs_base);
6198 gen_eob(s);
6199 }
6200 break;
6201 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6202 case 0x1c3: /* MOVNTI reg, mem */
6203 if (!(s->cpuid_features & CPUID_SSE2))
6204 goto illegal_op;
6205 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6206 modrm = ldub_code(s->pc++);
6207 mod = (modrm >> 6) & 3;
6208 if (mod == 3)
6209 goto illegal_op;
6210 reg = ((modrm >> 3) & 7) | rex_r;
6211 /* generate a generic store */
6212 gen_ldst_modrm(s, modrm, ot, reg, 1);
6213 break;
6214 case 0x1ae:
6215 modrm = ldub_code(s->pc++);
6216 mod = (modrm >> 6) & 3;
6217 op = (modrm >> 3) & 7;
6218 switch(op) {
6219 case 0: /* fxsave */
6220 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6221 (s->flags & HF_EM_MASK))
6222 goto illegal_op;
6223 if (s->flags & HF_TS_MASK) {
6224 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6225 break;
6226 }
6227 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6228 gen_op_fxsave_A0((s->dflag == 2));
6229 break;
6230 case 1: /* fxrstor */
6231 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6232 (s->flags & HF_EM_MASK))
6233 goto illegal_op;
6234 if (s->flags & HF_TS_MASK) {
6235 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6236 break;
6237 }
6238 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6239 gen_op_fxrstor_A0((s->dflag == 2));
6240 break;
6241 case 2: /* ldmxcsr */
6242 case 3: /* stmxcsr */
6243 if (s->flags & HF_TS_MASK) {
6244 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6245 break;
6246 }
6247 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6248 mod == 3)
6249 goto illegal_op;
6250 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6251 if (op == 2) {
6252 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6253 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6254 } else {
6255 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6256 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6257 }
6258 break;
6259 case 5: /* lfence */
6260 case 6: /* mfence */
6261 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6262 goto illegal_op;
6263 break;
6264 case 7: /* sfence / clflush */
6265 if ((modrm & 0xc7) == 0xc0) {
6266 /* sfence */
6267 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6268 if (!(s->cpuid_features & CPUID_SSE))
6269 goto illegal_op;
6270 } else {
6271 /* clflush */
6272 if (!(s->cpuid_features & CPUID_CLFLUSH))
6273 goto illegal_op;
6274 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6275 }
6276 break;
6277 default:
6278 goto illegal_op;
6279 }
6280 break;
6281 case 0x10d: /* 3DNow! prefetch(w) */
6282 modrm = ldub_code(s->pc++);
6283 mod = (modrm >> 6) & 3;
6284 if (mod == 3)
6285 goto illegal_op;
6286 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6287 /* ignore for now */
6288 break;
6289 case 0x1aa: /* rsm */
6290 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6291 break;
6292 if (!(s->flags & HF_SMM_MASK))
6293 goto illegal_op;
6294 if (s->cc_op != CC_OP_DYNAMIC) {
6295 gen_op_set_cc_op(s->cc_op);
6296 s->cc_op = CC_OP_DYNAMIC;
6297 }
6298 gen_jmp_im(s->pc - s->cs_base);
6299 gen_op_rsm();
6300 gen_eob(s);
6301 break;
6302 case 0x10e ... 0x10f:
6303 /* 3DNow! instructions, ignore prefixes */
6304 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6305 case 0x110 ... 0x117:
6306 case 0x128 ... 0x12f:
6307 case 0x150 ... 0x177:
6308 case 0x17c ... 0x17f:
6309 case 0x1c2:
6310 case 0x1c4 ... 0x1c6:
6311 case 0x1d0 ... 0x1fe:
6312 gen_sse(s, b, pc_start, rex_r);
6313 break;
6314 default:
6315 goto illegal_op;
6316 }
6317 /* lock generation */
6318 if (s->prefix & PREFIX_LOCK)
6319 gen_op_unlock();
6320 return s->pc;
6321 illegal_op:
6322 if (s->prefix & PREFIX_LOCK)
6323 gen_op_unlock();
6324 /* XXX: ensure that no lock was generated */
6325 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6326 return s->pc;
6327 }
6328
6329 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6330 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6331
6332 /* flags read by an operation */
6333 static uint16_t opc_read_flags[NB_OPS] = {
6334 [INDEX_op_aas] = CC_A,
6335 [INDEX_op_aaa] = CC_A,
6336 [INDEX_op_das] = CC_A | CC_C,
6337 [INDEX_op_daa] = CC_A | CC_C,
6338
6339 /* subtle: due to the incl/decl implementation, C is used */
6340 [INDEX_op_update_inc_cc] = CC_C,
6341
6342 [INDEX_op_into] = CC_O,
6343
6344 [INDEX_op_jb_subb] = CC_C,
6345 [INDEX_op_jb_subw] = CC_C,
6346 [INDEX_op_jb_subl] = CC_C,
6347
6348 [INDEX_op_jz_subb] = CC_Z,
6349 [INDEX_op_jz_subw] = CC_Z,
6350 [INDEX_op_jz_subl] = CC_Z,
6351
6352 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6353 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6354 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6355
6356 [INDEX_op_js_subb] = CC_S,
6357 [INDEX_op_js_subw] = CC_S,
6358 [INDEX_op_js_subl] = CC_S,
6359
6360 [INDEX_op_jl_subb] = CC_O | CC_S,
6361 [INDEX_op_jl_subw] = CC_O | CC_S,
6362 [INDEX_op_jl_subl] = CC_O | CC_S,
6363
6364 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6365 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6366 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6367
6368 [INDEX_op_loopnzw] = CC_Z,
6369 [INDEX_op_loopnzl] = CC_Z,
6370 [INDEX_op_loopzw] = CC_Z,
6371 [INDEX_op_loopzl] = CC_Z,
6372
6373 [INDEX_op_seto_T0_cc] = CC_O,
6374 [INDEX_op_setb_T0_cc] = CC_C,
6375 [INDEX_op_setz_T0_cc] = CC_Z,
6376 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6377 [INDEX_op_sets_T0_cc] = CC_S,
6378 [INDEX_op_setp_T0_cc] = CC_P,
6379 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6380 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6381
6382 [INDEX_op_setb_T0_subb] = CC_C,
6383 [INDEX_op_setb_T0_subw] = CC_C,
6384 [INDEX_op_setb_T0_subl] = CC_C,
6385
6386 [INDEX_op_setz_T0_subb] = CC_Z,
6387 [INDEX_op_setz_T0_subw] = CC_Z,
6388 [INDEX_op_setz_T0_subl] = CC_Z,
6389
6390 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6391 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6392 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6393
6394 [INDEX_op_sets_T0_subb] = CC_S,
6395 [INDEX_op_sets_T0_subw] = CC_S,
6396 [INDEX_op_sets_T0_subl] = CC_S,
6397
6398 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6399 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6400 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6401
6402 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6403 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6404 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6405
6406 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6407 [INDEX_op_cmc] = CC_C,
6408 [INDEX_op_salc] = CC_C,
6409
6410 /* needed for correct flag optimisation before string ops */
6411 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6412 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6413 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6414 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6415
6416 #ifdef TARGET_X86_64
6417 [INDEX_op_jb_subq] = CC_C,
6418 [INDEX_op_jz_subq] = CC_Z,
6419 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6420 [INDEX_op_js_subq] = CC_S,
6421 [INDEX_op_jl_subq] = CC_O | CC_S,
6422 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6423
6424 [INDEX_op_loopnzq] = CC_Z,
6425 [INDEX_op_loopzq] = CC_Z,
6426
6427 [INDEX_op_setb_T0_subq] = CC_C,
6428 [INDEX_op_setz_T0_subq] = CC_Z,
6429 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6430 [INDEX_op_sets_T0_subq] = CC_S,
6431 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6432 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6433
6434 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6435 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6436 #endif
6437
6438 #define DEF_READF(SUFFIX)\
6439 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6440 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6441 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6442 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6443 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6444 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6445 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6446 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6447 \
6448 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6449 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6450 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6451 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6452 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6453 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6454 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6455 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6456
6457 DEF_READF( )
6458 DEF_READF(_raw)
6459 #ifndef CONFIG_USER_ONLY
6460 DEF_READF(_kernel)
6461 DEF_READF(_user)
6462 #endif
6463 };
6464
6465 /* flags written by an operation */
6466 static uint16_t opc_write_flags[NB_OPS] = {
6467 [INDEX_op_update2_cc] = CC_OSZAPC,
6468 [INDEX_op_update1_cc] = CC_OSZAPC,
6469 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6470 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6471 /* subtle: due to the incl/decl implementation, C is used */
6472 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6473 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6474
6475 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6476 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6477 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6478 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6479 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6480 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6481 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6482 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6483 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6484 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6485 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6486
6487 /* sse */
6488 [INDEX_op_ucomiss] = CC_OSZAPC,
6489 [INDEX_op_ucomisd] = CC_OSZAPC,
6490 [INDEX_op_comiss] = CC_OSZAPC,
6491 [INDEX_op_comisd] = CC_OSZAPC,
6492
6493 /* bcd */
6494 [INDEX_op_aam] = CC_OSZAPC,
6495 [INDEX_op_aad] = CC_OSZAPC,
6496 [INDEX_op_aas] = CC_OSZAPC,
6497 [INDEX_op_aaa] = CC_OSZAPC,
6498 [INDEX_op_das] = CC_OSZAPC,
6499 [INDEX_op_daa] = CC_OSZAPC,
6500
6501 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6502 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6503 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6504 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6505 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6506 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6507 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6508 [INDEX_op_clc] = CC_C,
6509 [INDEX_op_stc] = CC_C,
6510 [INDEX_op_cmc] = CC_C,
6511
6512 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6513 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6514 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6515 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6516 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6517 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6518 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6519 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6520 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6521 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6522 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6523 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6524
6525 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6526 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6527 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6528 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6529 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6530 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6531
6532 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6533 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6534 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6535 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6536
6537 [INDEX_op_cmpxchg8b] = CC_Z,
6538 [INDEX_op_lar] = CC_Z,
6539 [INDEX_op_lsl] = CC_Z,
6540 [INDEX_op_verr] = CC_Z,
6541 [INDEX_op_verw] = CC_Z,
6542 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6543 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6544
6545 #define DEF_WRITEF(SUFFIX)\
6546 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6547 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6548 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6549 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6550 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6551 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6552 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6553 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6554 \
6555 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6556 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6557 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6558 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6559 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6560 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6561 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6562 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6563 \
6564 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6565 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6566 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6567 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6568 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6569 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6570 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6571 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6572 \
6573 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6574 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6575 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6576 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6577 \
6578 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6579 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6580 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6581 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6582 \
6583 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6584 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6585 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6586 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6587 \
6588 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6589 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6590 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6591 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6592 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6593 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6594 \
6595 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6596 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6597 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6598 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6599 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6600 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6601 \
6602 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6603 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6604 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6605 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6606
6607
6608 DEF_WRITEF( )
6609 DEF_WRITEF(_raw)
6610 #ifndef CONFIG_USER_ONLY
6611 DEF_WRITEF(_kernel)
6612 DEF_WRITEF(_user)
6613 #endif
6614 };
6615
6616 /* simpler form of an operation if no flags need to be generated */
6617 static uint16_t opc_simpler[NB_OPS] = {
6618 [INDEX_op_update2_cc] = INDEX_op_nop,
6619 [INDEX_op_update1_cc] = INDEX_op_nop,
6620 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6621 #if 0
6622 /* broken: CC_OP logic must be rewritten */
6623 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6624 #endif
6625
6626 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6627 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6628 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6629 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6630
6631 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6632 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6633 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6634 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6635
6636 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6637 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6638 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6639 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6640
6641 #define DEF_SIMPLER(SUFFIX)\
6642 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6643 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6644 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6645 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6646 \
6647 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6648 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6649 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6650 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6651
6652 DEF_SIMPLER( )
6653 DEF_SIMPLER(_raw)
6654 #ifndef CONFIG_USER_ONLY
6655 DEF_SIMPLER(_kernel)
6656 DEF_SIMPLER(_user)
6657 #endif
6658 };
6659
6660 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6661 {
6662 switch(macro_id) {
6663 #ifdef MACRO_TEST
6664 case MACRO_TEST:
6665 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6666 break;
6667 #endif
6668 }
6669 }
6670
6671 void optimize_flags_init(void)
6672 {
6673 int i;
6674 /* put default values in arrays */
6675 for(i = 0; i < NB_OPS; i++) {
6676 if (opc_simpler[i] == 0)
6677 opc_simpler[i] = i;
6678 }
6679
6680 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6681
6682 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6683 #if TARGET_LONG_BITS > HOST_LONG_BITS
6684 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
6685 TCG_AREG0, offsetof(CPUState, t0), "T0");
6686 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6687 TCG_AREG0, offsetof(CPUState, t1), "T1");
6688 cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6689 TCG_AREG0, offsetof(CPUState, t2), "A0");
6690 #else
6691 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6692 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6693 cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6694 cpu_tmp1 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6695 #endif
6696 /* the helpers are only registered to print debug info */
6697 TCG_HELPER(helper_divl_EAX_T0);
6698 TCG_HELPER(helper_idivl_EAX_T0);
6699 }
6700
6701 /* CPU flags computation optimization: we move backward thru the
6702 generated code to see which flags are needed. The operation is
6703 modified if suitable */
6704 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6705 {
6706 uint16_t *opc_ptr;
6707 int live_flags, write_flags, op;
6708
6709 opc_ptr = opc_buf + opc_buf_len;
6710 /* live_flags contains the flags needed by the next instructions
6711 in the code. At the end of the block, we consider that all the
6712 flags are live. */
6713 live_flags = CC_OSZAPC;
6714 while (opc_ptr > opc_buf) {
6715 op = *--opc_ptr;
6716 /* if none of the flags written by the instruction is used,
6717 then we can try to find a simpler instruction */
6718 write_flags = opc_write_flags[op];
6719 if ((live_flags & write_flags) == 0) {
6720 *opc_ptr = opc_simpler[op];
6721 }
6722 /* compute the live flags before the instruction */
6723 live_flags &= ~write_flags;
6724 live_flags |= opc_read_flags[op];
6725 }
6726 }
6727
6728 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6729 basic block 'tb'. If search_pc is TRUE, also generate PC
6730 information for each intermediate instruction. */
6731 static inline int gen_intermediate_code_internal(CPUState *env,
6732 TranslationBlock *tb,
6733 int search_pc)
6734 {
6735 DisasContext dc1, *dc = &dc1;
6736 target_ulong pc_ptr;
6737 uint16_t *gen_opc_end;
6738 int j, lj, cflags;
6739 uint64_t flags;
6740 target_ulong pc_start;
6741 target_ulong cs_base;
6742
6743 /* generate intermediate code */
6744 pc_start = tb->pc;
6745 cs_base = tb->cs_base;
6746 flags = tb->flags;
6747 cflags = tb->cflags;
6748
6749 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6750 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6751 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6752 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6753 dc->f_st = 0;
6754 dc->vm86 = (flags >> VM_SHIFT) & 1;
6755 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6756 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6757 dc->tf = (flags >> TF_SHIFT) & 1;
6758 dc->singlestep_enabled = env->singlestep_enabled;
6759 dc->cc_op = CC_OP_DYNAMIC;
6760 dc->cs_base = cs_base;
6761 dc->tb = tb;
6762 dc->popl_esp_hack = 0;
6763 /* select memory access functions */
6764 dc->mem_index = 0;
6765 if (flags & HF_SOFTMMU_MASK) {
6766 if (dc->cpl == 3)
6767 dc->mem_index = 2 * 4;
6768 else
6769 dc->mem_index = 1 * 4;
6770 }
6771 dc->cpuid_features = env->cpuid_features;
6772 dc->cpuid_ext_features = env->cpuid_ext_features;
6773 dc->cpuid_ext2_features = env->cpuid_ext2_features;
6774 #ifdef TARGET_X86_64
6775 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6776 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6777 #endif
6778 dc->flags = flags;
6779 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6780 (flags & HF_INHIBIT_IRQ_MASK)
6781 #ifndef CONFIG_SOFTMMU
6782 || (flags & HF_SOFTMMU_MASK)
6783 #endif
6784 );
6785 #if 0
6786 /* check addseg logic */
6787 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6788 printf("ERROR addseg\n");
6789 #endif
6790
6791 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6792 #if TARGET_LONG_BITS > HOST_LONG_BITS
6793 cpu_tmp1 = tcg_temp_new(TCG_TYPE_I64);
6794 #endif
6795
6796 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6797
6798 dc->is_jmp = DISAS_NEXT;
6799 pc_ptr = pc_start;
6800 lj = -1;
6801
6802 for(;;) {
6803 if (env->nb_breakpoints > 0) {
6804 for(j = 0; j < env->nb_breakpoints; j++) {
6805 if (env->breakpoints[j] == pc_ptr) {
6806 gen_debug(dc, pc_ptr - dc->cs_base);
6807 break;
6808 }
6809 }
6810 }
6811 if (search_pc) {
6812 j = gen_opc_ptr - gen_opc_buf;
6813 if (lj < j) {
6814 lj++;
6815 while (lj < j)
6816 gen_opc_instr_start[lj++] = 0;
6817 }
6818 gen_opc_pc[lj] = pc_ptr;
6819 gen_opc_cc_op[lj] = dc->cc_op;
6820 gen_opc_instr_start[lj] = 1;
6821 }
6822 pc_ptr = disas_insn(dc, pc_ptr);
6823 /* stop translation if indicated */
6824 if (dc->is_jmp)
6825 break;
6826 /* if single step mode, we generate only one instruction and
6827 generate an exception */
6828 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6829 the flag and abort the translation to give the irqs a
6830 change to be happen */
6831 if (dc->tf || dc->singlestep_enabled ||
6832 (flags & HF_INHIBIT_IRQ_MASK) ||
6833 (cflags & CF_SINGLE_INSN)) {
6834 gen_jmp_im(pc_ptr - dc->cs_base);
6835 gen_eob(dc);
6836 break;
6837 }
6838 /* if too long translation, stop generation too */
6839 if (gen_opc_ptr >= gen_opc_end ||
6840 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6841 gen_jmp_im(pc_ptr - dc->cs_base);
6842 gen_eob(dc);
6843 break;
6844 }
6845 }
6846 *gen_opc_ptr = INDEX_op_end;
6847 /* we don't forget to fill the last values */
6848 if (search_pc) {
6849 j = gen_opc_ptr - gen_opc_buf;
6850 lj++;
6851 while (lj <= j)
6852 gen_opc_instr_start[lj++] = 0;
6853 }
6854
6855 #ifdef DEBUG_DISAS
6856 if (loglevel & CPU_LOG_TB_CPU) {
6857 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6858 }
6859 if (loglevel & CPU_LOG_TB_IN_ASM) {
6860 int disas_flags;
6861 fprintf(logfile, "----------------\n");
6862 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6863 #ifdef TARGET_X86_64
6864 if (dc->code64)
6865 disas_flags = 2;
6866 else
6867 #endif
6868 disas_flags = !dc->code32;
6869 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6870 fprintf(logfile, "\n");
6871 if (loglevel & CPU_LOG_TB_OP_OPT) {
6872 fprintf(logfile, "OP before opt:\n");
6873 tcg_dump_ops(&tcg_ctx, logfile);
6874 fprintf(logfile, "\n");
6875 }
6876 }
6877 #endif
6878
6879 /* optimize flag computations */
6880 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6881
6882 if (!search_pc)
6883 tb->size = pc_ptr - pc_start;
6884 return 0;
6885 }
6886
6887 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6888 {
6889 return gen_intermediate_code_internal(env, tb, 0);
6890 }
6891
6892 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6893 {
6894 return gen_intermediate_code_internal(env, tb, 1);
6895 }
6896
6897 void gen_pc_load(CPUState *env, TranslationBlock *tb,
6898 unsigned long searched_pc, int pc_pos, void *puc)
6899 {
6900 int cc_op;
6901 #ifdef DEBUG_DISAS
6902 if (loglevel & CPU_LOG_TB_OP) {
6903 int i;
6904 fprintf(logfile, "RESTORE:\n");
6905 for(i = 0;i <= pc_pos; i++) {
6906 if (gen_opc_instr_start[i]) {
6907 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
6908 }
6909 }
6910 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
6911 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
6912 (uint32_t)tb->cs_base);
6913 }
6914 #endif
6915 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
6916 cc_op = gen_opc_cc_op[pc_pos];
6917 if (cc_op != CC_OP_DYNAMIC)
6918 env->cc_op = cc_op;
6919 }