]> git.proxmox.com Git - qemu.git/blob - target-i386/translate.c
fixed INC/DEC condition codes
[qemu.git] / target-i386 / translate.c
1 /*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25 #include <signal.h>
26 #include <assert.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
33
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
39
40 #ifdef TARGET_X86_64
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47 #if 1
48 #define BUGGY_64(x) NULL
49 #endif
50 #else
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
53 #define CODE64(s) 0
54 #define REX_X(s) 0
55 #define REX_B(s) 0
56 #endif
57
58 //#define MACRO_TEST 1
59
60 /* global register indexes */
61 static TCGv cpu_env, cpu_T[2], cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst;
62 static TCGv cpu_T3;
63 /* local register indexes (only used inside old micro ops) */
64 static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
65 static TCGv cpu_tmp5, cpu_tmp6;
66
67 #ifdef TARGET_X86_64
68 static int x86_64_hregs;
69 #endif
70
71 typedef struct DisasContext {
72 /* current insn context */
73 int override; /* -1 if no override */
74 int prefix;
75 int aflag, dflag;
76 target_ulong pc; /* pc = eip + cs_base */
77 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
78 static state change (stop translation) */
79 /* current block context */
80 target_ulong cs_base; /* base of CS segment */
81 int pe; /* protected mode */
82 int code32; /* 32 bit code segment */
83 #ifdef TARGET_X86_64
84 int lma; /* long mode active */
85 int code64; /* 64 bit code segment */
86 int rex_x, rex_b;
87 #endif
88 int ss32; /* 32 bit stack segment */
89 int cc_op; /* current CC operation */
90 int addseg; /* non zero if either DS/ES/SS have a non zero base */
91 int f_st; /* currently unused */
92 int vm86; /* vm86 mode */
93 int cpl;
94 int iopl;
95 int tf; /* TF cpu flag */
96 int singlestep_enabled; /* "hardware" single step enabled */
97 int jmp_opt; /* use direct block chaining for direct jumps */
98 int mem_index; /* select memory access functions */
99 uint64_t flags; /* all execution flags */
100 struct TranslationBlock *tb;
101 int popl_esp_hack; /* for correct popl with esp base handling */
102 int rip_offset; /* only used in x86_64, but left for simplicity */
103 int cpuid_features;
104 int cpuid_ext_features;
105 int cpuid_ext2_features;
106 } DisasContext;
107
108 static void gen_eob(DisasContext *s);
109 static void gen_jmp(DisasContext *s, target_ulong eip);
110 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
111
112 /* i386 arith/logic operations */
113 enum {
114 OP_ADDL,
115 OP_ORL,
116 OP_ADCL,
117 OP_SBBL,
118 OP_ANDL,
119 OP_SUBL,
120 OP_XORL,
121 OP_CMPL,
122 };
123
124 /* i386 shift ops */
125 enum {
126 OP_ROL,
127 OP_ROR,
128 OP_RCL,
129 OP_RCR,
130 OP_SHL,
131 OP_SHR,
132 OP_SHL1, /* undocumented */
133 OP_SAR = 7,
134 };
135
136 /* operand size */
137 enum {
138 OT_BYTE = 0,
139 OT_WORD,
140 OT_LONG,
141 OT_QUAD,
142 };
143
144 enum {
145 /* I386 int registers */
146 OR_EAX, /* MUST be even numbered */
147 OR_ECX,
148 OR_EDX,
149 OR_EBX,
150 OR_ESP,
151 OR_EBP,
152 OR_ESI,
153 OR_EDI,
154
155 OR_TMP0 = 16, /* temporary operand register */
156 OR_TMP1,
157 OR_A0, /* temporary register used when doing address evaluation */
158 };
159
160 static inline void gen_op_movl_T0_0(void)
161 {
162 tcg_gen_movi_tl(cpu_T[0], 0);
163 }
164
165 static inline void gen_op_movl_T0_im(int32_t val)
166 {
167 tcg_gen_movi_tl(cpu_T[0], val);
168 }
169
170 static inline void gen_op_movl_T0_imu(uint32_t val)
171 {
172 tcg_gen_movi_tl(cpu_T[0], val);
173 }
174
175 static inline void gen_op_movl_T1_im(int32_t val)
176 {
177 tcg_gen_movi_tl(cpu_T[1], val);
178 }
179
180 static inline void gen_op_movl_T1_imu(uint32_t val)
181 {
182 tcg_gen_movi_tl(cpu_T[1], val);
183 }
184
185 static inline void gen_op_movl_A0_im(uint32_t val)
186 {
187 tcg_gen_movi_tl(cpu_A0, val);
188 }
189
190 #ifdef TARGET_X86_64
191 static inline void gen_op_movq_A0_im(int64_t val)
192 {
193 tcg_gen_movi_tl(cpu_A0, val);
194 }
195 #endif
196
197 static inline void gen_movtl_T0_im(target_ulong val)
198 {
199 tcg_gen_movi_tl(cpu_T[0], val);
200 }
201
202 static inline void gen_movtl_T1_im(target_ulong val)
203 {
204 tcg_gen_movi_tl(cpu_T[1], val);
205 }
206
207 static inline void gen_op_andl_T0_ffff(void)
208 {
209 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
210 }
211
212 static inline void gen_op_andl_T0_im(uint32_t val)
213 {
214 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
215 }
216
217 static inline void gen_op_movl_T0_T1(void)
218 {
219 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
220 }
221
222 static inline void gen_op_andl_A0_ffff(void)
223 {
224 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
225 }
226
227 #ifdef TARGET_X86_64
228
229 #define NB_OP_SIZES 4
230
231 #define DEF_REGS(prefix, suffix) \
232 prefix ## EAX ## suffix,\
233 prefix ## ECX ## suffix,\
234 prefix ## EDX ## suffix,\
235 prefix ## EBX ## suffix,\
236 prefix ## ESP ## suffix,\
237 prefix ## EBP ## suffix,\
238 prefix ## ESI ## suffix,\
239 prefix ## EDI ## suffix,\
240 prefix ## R8 ## suffix,\
241 prefix ## R9 ## suffix,\
242 prefix ## R10 ## suffix,\
243 prefix ## R11 ## suffix,\
244 prefix ## R12 ## suffix,\
245 prefix ## R13 ## suffix,\
246 prefix ## R14 ## suffix,\
247 prefix ## R15 ## suffix,
248
249 #else /* !TARGET_X86_64 */
250
251 #define NB_OP_SIZES 3
252
253 #define DEF_REGS(prefix, suffix) \
254 prefix ## EAX ## suffix,\
255 prefix ## ECX ## suffix,\
256 prefix ## EDX ## suffix,\
257 prefix ## EBX ## suffix,\
258 prefix ## ESP ## suffix,\
259 prefix ## EBP ## suffix,\
260 prefix ## ESI ## suffix,\
261 prefix ## EDI ## suffix,
262
263 #endif /* !TARGET_X86_64 */
264
265 #if defined(WORDS_BIGENDIAN)
266 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
267 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
268 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
269 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
270 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
271 #else
272 #define REG_B_OFFSET 0
273 #define REG_H_OFFSET 1
274 #define REG_W_OFFSET 0
275 #define REG_L_OFFSET 0
276 #define REG_LH_OFFSET 4
277 #endif
278
279 static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
280 {
281 switch(ot) {
282 case OT_BYTE:
283 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
284 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
285 } else {
286 tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
287 }
288 break;
289 case OT_WORD:
290 tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
291 break;
292 #ifdef TARGET_X86_64
293 case OT_LONG:
294 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
295 /* high part of register set to zero */
296 tcg_gen_movi_tl(cpu_tmp0, 0);
297 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
298 break;
299 default:
300 case OT_QUAD:
301 tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
302 break;
303 #else
304 default:
305 case OT_LONG:
306 tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
307 break;
308 #endif
309 }
310 }
311
312 static inline void gen_op_mov_reg_T0(int ot, int reg)
313 {
314 gen_op_mov_reg_TN(ot, 0, reg);
315 }
316
317 static inline void gen_op_mov_reg_T1(int ot, int reg)
318 {
319 gen_op_mov_reg_TN(ot, 1, reg);
320 }
321
322 static inline void gen_op_mov_reg_A0(int size, int reg)
323 {
324 switch(size) {
325 case 0:
326 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
327 break;
328 #ifdef TARGET_X86_64
329 case 1:
330 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
331 /* high part of register set to zero */
332 tcg_gen_movi_tl(cpu_tmp0, 0);
333 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
334 break;
335 default:
336 case 2:
337 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
338 break;
339 #else
340 default:
341 case 1:
342 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
343 break;
344 #endif
345 }
346 }
347
348 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
349 {
350 switch(ot) {
351 case OT_BYTE:
352 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
353 goto std_case;
354 } else {
355 tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
356 }
357 break;
358 default:
359 std_case:
360 tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
361 break;
362 }
363 }
364
365 static inline void gen_op_movl_A0_reg(int reg)
366 {
367 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
368 }
369
370 static inline void gen_op_addl_A0_im(int32_t val)
371 {
372 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
373 #ifdef TARGET_X86_64
374 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
375 #endif
376 }
377
378 #ifdef TARGET_X86_64
379 static inline void gen_op_addq_A0_im(int64_t val)
380 {
381 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
382 }
383 #endif
384
385 static void gen_add_A0_im(DisasContext *s, int val)
386 {
387 #ifdef TARGET_X86_64
388 if (CODE64(s))
389 gen_op_addq_A0_im(val);
390 else
391 #endif
392 gen_op_addl_A0_im(val);
393 }
394
395 static inline void gen_op_addl_T0_T1(void)
396 {
397 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
398 }
399
400 static inline void gen_op_jmp_T0(void)
401 {
402 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
403 }
404
405 static inline void gen_op_addw_ESP_im(int32_t val)
406 {
407 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
408 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
409 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
410 }
411
412 static inline void gen_op_addl_ESP_im(int32_t val)
413 {
414 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
415 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
416 #ifdef TARGET_X86_64
417 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
418 #endif
419 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
420 }
421
422 #ifdef TARGET_X86_64
423 static inline void gen_op_addq_ESP_im(int32_t val)
424 {
425 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
426 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
427 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
428 }
429 #endif
430
431 static inline void gen_op_set_cc_op(int32_t val)
432 {
433 tcg_gen_movi_i32(cpu_cc_op, val);
434 }
435
436 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
437 {
438 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
439 if (shift != 0)
440 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
441 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
442 #ifdef TARGET_X86_64
443 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
444 #endif
445 }
446
447 static inline void gen_op_movl_A0_seg(int reg)
448 {
449 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
450 }
451
452 static inline void gen_op_addl_A0_seg(int reg)
453 {
454 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
455 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
456 #ifdef TARGET_X86_64
457 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
458 #endif
459 }
460
461 #ifdef TARGET_X86_64
462 static inline void gen_op_movq_A0_seg(int reg)
463 {
464 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
465 }
466
467 static inline void gen_op_addq_A0_seg(int reg)
468 {
469 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
470 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
471 }
472
473 static inline void gen_op_movq_A0_reg(int reg)
474 {
475 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
476 }
477
478 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
479 {
480 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
481 if (shift != 0)
482 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
483 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
484 }
485 #endif
486
487 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
488 [0] = {
489 DEF_REGS(gen_op_cmovw_, _T1_T0)
490 },
491 [1] = {
492 DEF_REGS(gen_op_cmovl_, _T1_T0)
493 },
494 #ifdef TARGET_X86_64
495 [2] = {
496 DEF_REGS(gen_op_cmovq_, _T1_T0)
497 },
498 #endif
499 };
500
501 static inline void gen_op_lds_T0_A0(int idx)
502 {
503 int mem_index = (idx >> 2) - 1;
504 switch(idx & 3) {
505 case 0:
506 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
507 break;
508 case 1:
509 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
510 break;
511 default:
512 case 2:
513 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
514 break;
515 }
516 }
517
518 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
519 static inline void gen_op_ld_T0_A0(int idx)
520 {
521 int mem_index = (idx >> 2) - 1;
522 switch(idx & 3) {
523 case 0:
524 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
525 break;
526 case 1:
527 tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
528 break;
529 case 2:
530 tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
531 break;
532 default:
533 case 3:
534 tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
535 break;
536 }
537 }
538
539 static inline void gen_op_ldu_T0_A0(int idx)
540 {
541 gen_op_ld_T0_A0(idx);
542 }
543
544 static inline void gen_op_ld_T1_A0(int idx)
545 {
546 int mem_index = (idx >> 2) - 1;
547 switch(idx & 3) {
548 case 0:
549 tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
550 break;
551 case 1:
552 tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
553 break;
554 case 2:
555 tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
556 break;
557 default:
558 case 3:
559 tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
560 break;
561 }
562 }
563
564 static inline void gen_op_st_T0_A0(int idx)
565 {
566 int mem_index = (idx >> 2) - 1;
567 switch(idx & 3) {
568 case 0:
569 tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
570 break;
571 case 1:
572 tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
573 break;
574 case 2:
575 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
576 break;
577 default:
578 case 3:
579 tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
580 break;
581 }
582 }
583
584 static inline void gen_op_st_T1_A0(int idx)
585 {
586 int mem_index = (idx >> 2) - 1;
587 switch(idx & 3) {
588 case 0:
589 tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
590 break;
591 case 1:
592 tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
593 break;
594 case 2:
595 tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
596 break;
597 default:
598 case 3:
599 tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
600 break;
601 }
602 }
603
604 static inline void gen_jmp_im(target_ulong pc)
605 {
606 tcg_gen_movi_tl(cpu_tmp0, pc);
607 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
608 }
609
610 static inline void gen_string_movl_A0_ESI(DisasContext *s)
611 {
612 int override;
613
614 override = s->override;
615 #ifdef TARGET_X86_64
616 if (s->aflag == 2) {
617 if (override >= 0) {
618 gen_op_movq_A0_seg(override);
619 gen_op_addq_A0_reg_sN(0, R_ESI);
620 } else {
621 gen_op_movq_A0_reg(R_ESI);
622 }
623 } else
624 #endif
625 if (s->aflag) {
626 /* 32 bit address */
627 if (s->addseg && override < 0)
628 override = R_DS;
629 if (override >= 0) {
630 gen_op_movl_A0_seg(override);
631 gen_op_addl_A0_reg_sN(0, R_ESI);
632 } else {
633 gen_op_movl_A0_reg(R_ESI);
634 }
635 } else {
636 /* 16 address, always override */
637 if (override < 0)
638 override = R_DS;
639 gen_op_movl_A0_reg(R_ESI);
640 gen_op_andl_A0_ffff();
641 gen_op_addl_A0_seg(override);
642 }
643 }
644
645 static inline void gen_string_movl_A0_EDI(DisasContext *s)
646 {
647 #ifdef TARGET_X86_64
648 if (s->aflag == 2) {
649 gen_op_movq_A0_reg(R_EDI);
650 } else
651 #endif
652 if (s->aflag) {
653 if (s->addseg) {
654 gen_op_movl_A0_seg(R_ES);
655 gen_op_addl_A0_reg_sN(0, R_EDI);
656 } else {
657 gen_op_movl_A0_reg(R_EDI);
658 }
659 } else {
660 gen_op_movl_A0_reg(R_EDI);
661 gen_op_andl_A0_ffff();
662 gen_op_addl_A0_seg(R_ES);
663 }
664 }
665
666 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
667 gen_op_movl_T0_Dshiftb,
668 gen_op_movl_T0_Dshiftw,
669 gen_op_movl_T0_Dshiftl,
670 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
671 };
672
673 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
674 gen_op_jnz_ecxw,
675 gen_op_jnz_ecxl,
676 X86_64_ONLY(gen_op_jnz_ecxq),
677 };
678
679 static GenOpFunc1 *gen_op_jz_ecx[3] = {
680 gen_op_jz_ecxw,
681 gen_op_jz_ecxl,
682 X86_64_ONLY(gen_op_jz_ecxq),
683 };
684
685 static GenOpFunc *gen_op_dec_ECX[3] = {
686 gen_op_decw_ECX,
687 gen_op_decl_ECX,
688 X86_64_ONLY(gen_op_decq_ECX),
689 };
690
691 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
692 {
693 gen_op_jnz_subb,
694 gen_op_jnz_subw,
695 gen_op_jnz_subl,
696 X86_64_ONLY(gen_op_jnz_subq),
697 },
698 {
699 gen_op_jz_subb,
700 gen_op_jz_subw,
701 gen_op_jz_subl,
702 X86_64_ONLY(gen_op_jz_subq),
703 },
704 };
705
706 static void *helper_in_func[3] = {
707 helper_inb,
708 helper_inw,
709 helper_inl,
710 };
711
712 static void *helper_out_func[3] = {
713 helper_outb,
714 helper_outw,
715 helper_outl,
716 };
717
718 static void *gen_check_io_func[3] = {
719 helper_check_iob,
720 helper_check_iow,
721 helper_check_iol,
722 };
723
724 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
725 uint32_t svm_flags)
726 {
727 int state_saved;
728 target_ulong next_eip;
729
730 state_saved = 0;
731 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
732 if (s->cc_op != CC_OP_DYNAMIC)
733 gen_op_set_cc_op(s->cc_op);
734 gen_jmp_im(cur_eip);
735 state_saved = 1;
736 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
737 tcg_gen_helper_0_1(gen_check_io_func[ot],
738 cpu_tmp2_i32);
739 }
740 if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
741 if (!state_saved) {
742 if (s->cc_op != CC_OP_DYNAMIC)
743 gen_op_set_cc_op(s->cc_op);
744 gen_jmp_im(cur_eip);
745 state_saved = 1;
746 }
747 svm_flags |= (1 << (4 + ot));
748 next_eip = s->pc - s->cs_base;
749 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
750 tcg_gen_helper_0_3(helper_svm_check_io,
751 cpu_tmp2_i32,
752 tcg_const_i32(svm_flags),
753 tcg_const_i32(next_eip - cur_eip));
754 }
755 }
756
757 static inline void gen_movs(DisasContext *s, int ot)
758 {
759 gen_string_movl_A0_ESI(s);
760 gen_op_ld_T0_A0(ot + s->mem_index);
761 gen_string_movl_A0_EDI(s);
762 gen_op_st_T0_A0(ot + s->mem_index);
763 gen_op_movl_T0_Dshift[ot]();
764 #ifdef TARGET_X86_64
765 if (s->aflag == 2) {
766 gen_op_addq_ESI_T0();
767 gen_op_addq_EDI_T0();
768 } else
769 #endif
770 if (s->aflag) {
771 gen_op_addl_ESI_T0();
772 gen_op_addl_EDI_T0();
773 } else {
774 gen_op_addw_ESI_T0();
775 gen_op_addw_EDI_T0();
776 }
777 }
778
779 static inline void gen_update_cc_op(DisasContext *s)
780 {
781 if (s->cc_op != CC_OP_DYNAMIC) {
782 gen_op_set_cc_op(s->cc_op);
783 s->cc_op = CC_OP_DYNAMIC;
784 }
785 }
786
787 static void gen_op_update1_cc(void)
788 {
789 tcg_gen_discard_tl(cpu_cc_src);
790 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
791 }
792
793 static void gen_op_update2_cc(void)
794 {
795 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
796 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
797 }
798
799 static inline void gen_op_cmpl_T0_T1_cc(void)
800 {
801 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
802 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
803 }
804
805 static inline void gen_op_testl_T0_T1_cc(void)
806 {
807 tcg_gen_discard_tl(cpu_cc_src);
808 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
809 }
810
811 static void gen_op_update_neg_cc(void)
812 {
813 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
814 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
815 }
816
817 /* XXX: does not work with gdbstub "ice" single step - not a
818 serious problem */
819 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
820 {
821 int l1, l2;
822
823 l1 = gen_new_label();
824 l2 = gen_new_label();
825 gen_op_jnz_ecx[s->aflag](l1);
826 gen_set_label(l2);
827 gen_jmp_tb(s, next_eip, 1);
828 gen_set_label(l1);
829 return l2;
830 }
831
832 static inline void gen_stos(DisasContext *s, int ot)
833 {
834 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
835 gen_string_movl_A0_EDI(s);
836 gen_op_st_T0_A0(ot + s->mem_index);
837 gen_op_movl_T0_Dshift[ot]();
838 #ifdef TARGET_X86_64
839 if (s->aflag == 2) {
840 gen_op_addq_EDI_T0();
841 } else
842 #endif
843 if (s->aflag) {
844 gen_op_addl_EDI_T0();
845 } else {
846 gen_op_addw_EDI_T0();
847 }
848 }
849
850 static inline void gen_lods(DisasContext *s, int ot)
851 {
852 gen_string_movl_A0_ESI(s);
853 gen_op_ld_T0_A0(ot + s->mem_index);
854 gen_op_mov_reg_T0(ot, R_EAX);
855 gen_op_movl_T0_Dshift[ot]();
856 #ifdef TARGET_X86_64
857 if (s->aflag == 2) {
858 gen_op_addq_ESI_T0();
859 } else
860 #endif
861 if (s->aflag) {
862 gen_op_addl_ESI_T0();
863 } else {
864 gen_op_addw_ESI_T0();
865 }
866 }
867
868 static inline void gen_scas(DisasContext *s, int ot)
869 {
870 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
871 gen_string_movl_A0_EDI(s);
872 gen_op_ld_T1_A0(ot + s->mem_index);
873 gen_op_cmpl_T0_T1_cc();
874 gen_op_movl_T0_Dshift[ot]();
875 #ifdef TARGET_X86_64
876 if (s->aflag == 2) {
877 gen_op_addq_EDI_T0();
878 } else
879 #endif
880 if (s->aflag) {
881 gen_op_addl_EDI_T0();
882 } else {
883 gen_op_addw_EDI_T0();
884 }
885 }
886
887 static inline void gen_cmps(DisasContext *s, int ot)
888 {
889 gen_string_movl_A0_ESI(s);
890 gen_op_ld_T0_A0(ot + s->mem_index);
891 gen_string_movl_A0_EDI(s);
892 gen_op_ld_T1_A0(ot + s->mem_index);
893 gen_op_cmpl_T0_T1_cc();
894 gen_op_movl_T0_Dshift[ot]();
895 #ifdef TARGET_X86_64
896 if (s->aflag == 2) {
897 gen_op_addq_ESI_T0();
898 gen_op_addq_EDI_T0();
899 } else
900 #endif
901 if (s->aflag) {
902 gen_op_addl_ESI_T0();
903 gen_op_addl_EDI_T0();
904 } else {
905 gen_op_addw_ESI_T0();
906 gen_op_addw_EDI_T0();
907 }
908 }
909
910 static inline void gen_ins(DisasContext *s, int ot)
911 {
912 gen_string_movl_A0_EDI(s);
913 gen_op_movl_T0_0();
914 gen_op_st_T0_A0(ot + s->mem_index);
915 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
916 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
917 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
918 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
919 gen_op_st_T0_A0(ot + s->mem_index);
920 gen_op_movl_T0_Dshift[ot]();
921 #ifdef TARGET_X86_64
922 if (s->aflag == 2) {
923 gen_op_addq_EDI_T0();
924 } else
925 #endif
926 if (s->aflag) {
927 gen_op_addl_EDI_T0();
928 } else {
929 gen_op_addw_EDI_T0();
930 }
931 }
932
933 static inline void gen_outs(DisasContext *s, int ot)
934 {
935 gen_string_movl_A0_ESI(s);
936 gen_op_ld_T0_A0(ot + s->mem_index);
937
938 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
939 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
940 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
941 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
942 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
943
944 gen_op_movl_T0_Dshift[ot]();
945 #ifdef TARGET_X86_64
946 if (s->aflag == 2) {
947 gen_op_addq_ESI_T0();
948 } else
949 #endif
950 if (s->aflag) {
951 gen_op_addl_ESI_T0();
952 } else {
953 gen_op_addw_ESI_T0();
954 }
955 }
956
957 /* same method as Valgrind : we generate jumps to current or next
958 instruction */
959 #define GEN_REPZ(op) \
960 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
961 target_ulong cur_eip, target_ulong next_eip) \
962 { \
963 int l2;\
964 gen_update_cc_op(s); \
965 l2 = gen_jz_ecx_string(s, next_eip); \
966 gen_ ## op(s, ot); \
967 gen_op_dec_ECX[s->aflag](); \
968 /* a loop would cause two single step exceptions if ECX = 1 \
969 before rep string_insn */ \
970 if (!s->jmp_opt) \
971 gen_op_jz_ecx[s->aflag](l2); \
972 gen_jmp(s, cur_eip); \
973 }
974
975 #define GEN_REPZ2(op) \
976 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
977 target_ulong cur_eip, \
978 target_ulong next_eip, \
979 int nz) \
980 { \
981 int l2;\
982 gen_update_cc_op(s); \
983 l2 = gen_jz_ecx_string(s, next_eip); \
984 gen_ ## op(s, ot); \
985 gen_op_dec_ECX[s->aflag](); \
986 gen_op_set_cc_op(CC_OP_SUBB + ot); \
987 gen_op_string_jnz_sub[nz][ot](l2);\
988 if (!s->jmp_opt) \
989 gen_op_jz_ecx[s->aflag](l2); \
990 gen_jmp(s, cur_eip); \
991 }
992
993 GEN_REPZ(movs)
994 GEN_REPZ(stos)
995 GEN_REPZ(lods)
996 GEN_REPZ(ins)
997 GEN_REPZ(outs)
998 GEN_REPZ2(scas)
999 GEN_REPZ2(cmps)
1000
1001 enum {
1002 JCC_O,
1003 JCC_B,
1004 JCC_Z,
1005 JCC_BE,
1006 JCC_S,
1007 JCC_P,
1008 JCC_L,
1009 JCC_LE,
1010 };
1011
1012 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1013 [OT_BYTE] = {
1014 NULL,
1015 gen_op_jb_subb,
1016 gen_op_jz_subb,
1017 gen_op_jbe_subb,
1018 gen_op_js_subb,
1019 NULL,
1020 gen_op_jl_subb,
1021 gen_op_jle_subb,
1022 },
1023 [OT_WORD] = {
1024 NULL,
1025 gen_op_jb_subw,
1026 gen_op_jz_subw,
1027 gen_op_jbe_subw,
1028 gen_op_js_subw,
1029 NULL,
1030 gen_op_jl_subw,
1031 gen_op_jle_subw,
1032 },
1033 [OT_LONG] = {
1034 NULL,
1035 gen_op_jb_subl,
1036 gen_op_jz_subl,
1037 gen_op_jbe_subl,
1038 gen_op_js_subl,
1039 NULL,
1040 gen_op_jl_subl,
1041 gen_op_jle_subl,
1042 },
1043 #ifdef TARGET_X86_64
1044 [OT_QUAD] = {
1045 NULL,
1046 BUGGY_64(gen_op_jb_subq),
1047 gen_op_jz_subq,
1048 BUGGY_64(gen_op_jbe_subq),
1049 gen_op_js_subq,
1050 NULL,
1051 BUGGY_64(gen_op_jl_subq),
1052 BUGGY_64(gen_op_jle_subq),
1053 },
1054 #endif
1055 };
1056 static GenOpFunc1 *gen_op_loop[3][4] = {
1057 [0] = {
1058 gen_op_loopnzw,
1059 gen_op_loopzw,
1060 gen_op_jnz_ecxw,
1061 },
1062 [1] = {
1063 gen_op_loopnzl,
1064 gen_op_loopzl,
1065 gen_op_jnz_ecxl,
1066 },
1067 #ifdef TARGET_X86_64
1068 [2] = {
1069 gen_op_loopnzq,
1070 gen_op_loopzq,
1071 gen_op_jnz_ecxq,
1072 },
1073 #endif
1074 };
1075
1076 static GenOpFunc *gen_setcc_slow[8] = {
1077 gen_op_seto_T0_cc,
1078 gen_op_setb_T0_cc,
1079 gen_op_setz_T0_cc,
1080 gen_op_setbe_T0_cc,
1081 gen_op_sets_T0_cc,
1082 gen_op_setp_T0_cc,
1083 gen_op_setl_T0_cc,
1084 gen_op_setle_T0_cc,
1085 };
1086
1087 static GenOpFunc *gen_setcc_sub[4][8] = {
1088 [OT_BYTE] = {
1089 NULL,
1090 gen_op_setb_T0_subb,
1091 gen_op_setz_T0_subb,
1092 gen_op_setbe_T0_subb,
1093 gen_op_sets_T0_subb,
1094 NULL,
1095 gen_op_setl_T0_subb,
1096 gen_op_setle_T0_subb,
1097 },
1098 [OT_WORD] = {
1099 NULL,
1100 gen_op_setb_T0_subw,
1101 gen_op_setz_T0_subw,
1102 gen_op_setbe_T0_subw,
1103 gen_op_sets_T0_subw,
1104 NULL,
1105 gen_op_setl_T0_subw,
1106 gen_op_setle_T0_subw,
1107 },
1108 [OT_LONG] = {
1109 NULL,
1110 gen_op_setb_T0_subl,
1111 gen_op_setz_T0_subl,
1112 gen_op_setbe_T0_subl,
1113 gen_op_sets_T0_subl,
1114 NULL,
1115 gen_op_setl_T0_subl,
1116 gen_op_setle_T0_subl,
1117 },
1118 #ifdef TARGET_X86_64
1119 [OT_QUAD] = {
1120 NULL,
1121 gen_op_setb_T0_subq,
1122 gen_op_setz_T0_subq,
1123 gen_op_setbe_T0_subq,
1124 gen_op_sets_T0_subq,
1125 NULL,
1126 gen_op_setl_T0_subq,
1127 gen_op_setle_T0_subq,
1128 },
1129 #endif
1130 };
1131
1132 static void *helper_fp_arith_ST0_FT0[8] = {
1133 helper_fadd_ST0_FT0,
1134 helper_fmul_ST0_FT0,
1135 helper_fcom_ST0_FT0,
1136 helper_fcom_ST0_FT0,
1137 helper_fsub_ST0_FT0,
1138 helper_fsubr_ST0_FT0,
1139 helper_fdiv_ST0_FT0,
1140 helper_fdivr_ST0_FT0,
1141 };
1142
1143 /* NOTE the exception in "r" op ordering */
1144 static void *helper_fp_arith_STN_ST0[8] = {
1145 helper_fadd_STN_ST0,
1146 helper_fmul_STN_ST0,
1147 NULL,
1148 NULL,
1149 helper_fsubr_STN_ST0,
1150 helper_fsub_STN_ST0,
1151 helper_fdivr_STN_ST0,
1152 helper_fdiv_STN_ST0,
1153 };
1154
1155 /* compute eflags.C to reg */
1156 static void gen_compute_eflags_c(TCGv reg)
1157 {
1158 #if TCG_TARGET_REG_BITS == 32
1159 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1160 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1161 (long)cc_table + offsetof(CCTable, compute_c));
1162 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1163 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1164 1, &cpu_tmp2_i32, 0, NULL);
1165 #else
1166 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1167 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1168 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1169 (long)cc_table + offsetof(CCTable, compute_c));
1170 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1171 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1172 1, &cpu_tmp2_i32, 0, NULL);
1173 #endif
1174 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1175 }
1176
1177 /* compute all eflags to cc_src */
1178 static void gen_compute_eflags(TCGv reg)
1179 {
1180 #if TCG_TARGET_REG_BITS == 32
1181 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1182 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1183 (long)cc_table + offsetof(CCTable, compute_all));
1184 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1185 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1186 1, &cpu_tmp2_i32, 0, NULL);
1187 #else
1188 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1189 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1190 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1191 (long)cc_table + offsetof(CCTable, compute_all));
1192 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1193 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1194 1, &cpu_tmp2_i32, 0, NULL);
1195 #endif
1196 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1197 }
1198
1199 /* if d == OR_TMP0, it means memory operand (address in A0) */
1200 static void gen_op(DisasContext *s1, int op, int ot, int d)
1201 {
1202 if (d != OR_TMP0) {
1203 gen_op_mov_TN_reg(ot, 0, d);
1204 } else {
1205 gen_op_ld_T0_A0(ot + s1->mem_index);
1206 }
1207 switch(op) {
1208 case OP_ADCL:
1209 if (s1->cc_op != CC_OP_DYNAMIC)
1210 gen_op_set_cc_op(s1->cc_op);
1211 gen_compute_eflags_c(cpu_tmp4);
1212 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1213 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1214 if (d != OR_TMP0)
1215 gen_op_mov_reg_T0(ot, d);
1216 else
1217 gen_op_st_T0_A0(ot + s1->mem_index);
1218 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1219 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1220 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1221 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1222 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1223 s1->cc_op = CC_OP_DYNAMIC;
1224 break;
1225 case OP_SBBL:
1226 if (s1->cc_op != CC_OP_DYNAMIC)
1227 gen_op_set_cc_op(s1->cc_op);
1228 gen_compute_eflags_c(cpu_tmp4);
1229 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1230 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1231 if (d != OR_TMP0)
1232 gen_op_mov_reg_T0(ot, d);
1233 else
1234 gen_op_st_T0_A0(ot + s1->mem_index);
1235 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1236 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1237 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1238 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1239 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1240 s1->cc_op = CC_OP_DYNAMIC;
1241 break;
1242 case OP_ADDL:
1243 gen_op_addl_T0_T1();
1244 if (d != OR_TMP0)
1245 gen_op_mov_reg_T0(ot, d);
1246 else
1247 gen_op_st_T0_A0(ot + s1->mem_index);
1248 gen_op_update2_cc();
1249 s1->cc_op = CC_OP_ADDB + ot;
1250 break;
1251 case OP_SUBL:
1252 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1253 if (d != OR_TMP0)
1254 gen_op_mov_reg_T0(ot, d);
1255 else
1256 gen_op_st_T0_A0(ot + s1->mem_index);
1257 gen_op_update2_cc();
1258 s1->cc_op = CC_OP_SUBB + ot;
1259 break;
1260 default:
1261 case OP_ANDL:
1262 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1263 if (d != OR_TMP0)
1264 gen_op_mov_reg_T0(ot, d);
1265 else
1266 gen_op_st_T0_A0(ot + s1->mem_index);
1267 gen_op_update1_cc();
1268 s1->cc_op = CC_OP_LOGICB + ot;
1269 break;
1270 case OP_ORL:
1271 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1272 if (d != OR_TMP0)
1273 gen_op_mov_reg_T0(ot, d);
1274 else
1275 gen_op_st_T0_A0(ot + s1->mem_index);
1276 gen_op_update1_cc();
1277 s1->cc_op = CC_OP_LOGICB + ot;
1278 break;
1279 case OP_XORL:
1280 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1281 if (d != OR_TMP0)
1282 gen_op_mov_reg_T0(ot, d);
1283 else
1284 gen_op_st_T0_A0(ot + s1->mem_index);
1285 gen_op_update1_cc();
1286 s1->cc_op = CC_OP_LOGICB + ot;
1287 break;
1288 case OP_CMPL:
1289 gen_op_cmpl_T0_T1_cc();
1290 s1->cc_op = CC_OP_SUBB + ot;
1291 break;
1292 }
1293 }
1294
1295 /* if d == OR_TMP0, it means memory operand (address in A0) */
1296 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1297 {
1298 if (d != OR_TMP0)
1299 gen_op_mov_TN_reg(ot, 0, d);
1300 else
1301 gen_op_ld_T0_A0(ot + s1->mem_index);
1302 if (s1->cc_op != CC_OP_DYNAMIC)
1303 gen_op_set_cc_op(s1->cc_op);
1304 if (c > 0) {
1305 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1306 s1->cc_op = CC_OP_INCB + ot;
1307 } else {
1308 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1309 s1->cc_op = CC_OP_DECB + ot;
1310 }
1311 if (d != OR_TMP0)
1312 gen_op_mov_reg_T0(ot, d);
1313 else
1314 gen_op_st_T0_A0(ot + s1->mem_index);
1315 gen_compute_eflags_c(cpu_cc_src);
1316 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1317 }
1318
1319 static void gen_extu(int ot, TCGv reg)
1320 {
1321 switch(ot) {
1322 case OT_BYTE:
1323 tcg_gen_ext8u_tl(reg, reg);
1324 break;
1325 case OT_WORD:
1326 tcg_gen_ext16u_tl(reg, reg);
1327 break;
1328 case OT_LONG:
1329 tcg_gen_ext32u_tl(reg, reg);
1330 break;
1331 default:
1332 break;
1333 }
1334 }
1335
1336 static void gen_exts(int ot, TCGv reg)
1337 {
1338 switch(ot) {
1339 case OT_BYTE:
1340 tcg_gen_ext8s_tl(reg, reg);
1341 break;
1342 case OT_WORD:
1343 tcg_gen_ext16s_tl(reg, reg);
1344 break;
1345 case OT_LONG:
1346 tcg_gen_ext32s_tl(reg, reg);
1347 break;
1348 default:
1349 break;
1350 }
1351 }
1352
1353 /* XXX: add faster immediate case */
1354 static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1355 int is_right, int is_arith)
1356 {
1357 target_ulong mask;
1358 int shift_label;
1359
1360 if (ot == OT_QUAD)
1361 mask = 0x3f;
1362 else
1363 mask = 0x1f;
1364
1365 /* load */
1366 if (op1 == OR_TMP0)
1367 gen_op_ld_T0_A0(ot + s->mem_index);
1368 else
1369 gen_op_mov_TN_reg(ot, 0, op1);
1370
1371 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1372
1373 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1374
1375 if (is_right) {
1376 if (is_arith) {
1377 gen_exts(ot, cpu_T[0]);
1378 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1379 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1380 } else {
1381 gen_extu(ot, cpu_T[0]);
1382 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1383 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1384 }
1385 } else {
1386 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1387 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1388 }
1389
1390 /* store */
1391 if (op1 == OR_TMP0)
1392 gen_op_st_T0_A0(ot + s->mem_index);
1393 else
1394 gen_op_mov_reg_T0(ot, op1);
1395
1396 /* update eflags if non zero shift */
1397 if (s->cc_op != CC_OP_DYNAMIC)
1398 gen_op_set_cc_op(s->cc_op);
1399
1400 shift_label = gen_new_label();
1401 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), shift_label);
1402
1403 tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1404 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1405 if (is_right)
1406 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1407 else
1408 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1409
1410 gen_set_label(shift_label);
1411 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1412 }
1413
1414 static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1415 {
1416 if (arg2 >= 0)
1417 tcg_gen_shli_tl(ret, arg1, arg2);
1418 else
1419 tcg_gen_shri_tl(ret, arg1, -arg2);
1420 }
1421
1422 /* XXX: add faster immediate case */
1423 static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1424 int is_right)
1425 {
1426 target_ulong mask;
1427 int label1, label2, data_bits;
1428
1429 if (ot == OT_QUAD)
1430 mask = 0x3f;
1431 else
1432 mask = 0x1f;
1433
1434 /* load */
1435 if (op1 == OR_TMP0)
1436 gen_op_ld_T0_A0(ot + s->mem_index);
1437 else
1438 gen_op_mov_TN_reg(ot, 0, op1);
1439
1440 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1441
1442 /* Must test zero case to avoid using undefined behaviour in TCG
1443 shifts. */
1444 label1 = gen_new_label();
1445 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label1);
1446
1447 if (ot <= OT_WORD)
1448 tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], (1 << (3 + ot)) - 1);
1449 else
1450 tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1451
1452 gen_extu(ot, cpu_T[0]);
1453 tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1454
1455 data_bits = 8 << ot;
1456 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1457 fix TCG definition) */
1458 if (is_right) {
1459 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1460 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1461 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1462 } else {
1463 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1464 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1465 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1466 }
1467 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1468
1469 gen_set_label(label1);
1470 /* store */
1471 if (op1 == OR_TMP0)
1472 gen_op_st_T0_A0(ot + s->mem_index);
1473 else
1474 gen_op_mov_reg_T0(ot, op1);
1475
1476 /* update eflags */
1477 if (s->cc_op != CC_OP_DYNAMIC)
1478 gen_op_set_cc_op(s->cc_op);
1479
1480 label2 = gen_new_label();
1481 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label2);
1482
1483 gen_compute_eflags(cpu_cc_src);
1484 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1485 tcg_gen_xor_tl(cpu_tmp0, cpu_T3, cpu_T[0]);
1486 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1487 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1488 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1489 if (is_right) {
1490 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], data_bits - 1);
1491 }
1492 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_C);
1493 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
1494
1495 tcg_gen_discard_tl(cpu_cc_dst);
1496 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1497
1498 gen_set_label(label2);
1499 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1500 }
1501
1502 static void *helper_rotc[8] = {
1503 helper_rclb,
1504 helper_rclw,
1505 helper_rcll,
1506 X86_64_ONLY(helper_rclq),
1507 helper_rcrb,
1508 helper_rcrw,
1509 helper_rcrl,
1510 X86_64_ONLY(helper_rcrq),
1511 };
1512
1513 /* XXX: add faster immediate = 1 case */
1514 static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1515 int is_right)
1516 {
1517 int label1;
1518
1519 if (s->cc_op != CC_OP_DYNAMIC)
1520 gen_op_set_cc_op(s->cc_op);
1521
1522 /* load */
1523 if (op1 == OR_TMP0)
1524 gen_op_ld_T0_A0(ot + s->mem_index);
1525 else
1526 gen_op_mov_TN_reg(ot, 0, op1);
1527
1528 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1529 cpu_T[0], cpu_T[0], cpu_T[1]);
1530 /* store */
1531 if (op1 == OR_TMP0)
1532 gen_op_st_T0_A0(ot + s->mem_index);
1533 else
1534 gen_op_mov_reg_T0(ot, op1);
1535
1536 /* update eflags */
1537 label1 = gen_new_label();
1538 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(-1), label1);
1539
1540 tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1541 tcg_gen_discard_tl(cpu_cc_dst);
1542 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1543
1544 gen_set_label(label1);
1545 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1546 }
1547
1548 /* XXX: add faster immediate case */
1549 static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
1550 int is_right)
1551 {
1552 int label1, label2, data_bits;
1553 target_ulong mask;
1554
1555 if (ot == OT_QUAD)
1556 mask = 0x3f;
1557 else
1558 mask = 0x1f;
1559
1560 /* load */
1561 if (op1 == OR_TMP0)
1562 gen_op_ld_T0_A0(ot + s->mem_index);
1563 else
1564 gen_op_mov_TN_reg(ot, 0, op1);
1565
1566 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1567 /* Must test zero case to avoid using undefined behaviour in TCG
1568 shifts. */
1569 label1 = gen_new_label();
1570 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
1571
1572 tcg_gen_addi_tl(cpu_tmp5, cpu_T3, -1);
1573 if (ot == OT_WORD) {
1574 /* Note: we implement the Intel behaviour for shift count > 16 */
1575 if (is_right) {
1576 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1577 tcg_gen_shli_tl(cpu_tmp0, cpu_T[1], 16);
1578 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1579 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1580
1581 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1582
1583 /* only needed if count > 16, but a test would complicate */
1584 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1585 tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp5);
1586
1587 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1588
1589 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1590 } else {
1591 /* XXX: not optimal */
1592 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1593 tcg_gen_shli_tl(cpu_T[1], cpu_T[1], 16);
1594 tcg_gen_or_tl(cpu_T[1], cpu_T[1], cpu_T[0]);
1595 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1596
1597 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1598 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1599 tcg_gen_shr_tl(cpu_tmp6, cpu_T[1], cpu_tmp0);
1600 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1601
1602 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1603 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1604 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1605 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1606 }
1607 } else {
1608 data_bits = 8 << ot;
1609 if (is_right) {
1610 if (ot == OT_LONG)
1611 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1612
1613 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1614
1615 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1616 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1617 tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1618 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1619
1620 } else {
1621 if (ot == OT_LONG)
1622 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1623
1624 tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1625
1626 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1627 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1628 tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1629 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1630 }
1631 }
1632 tcg_gen_mov_tl(cpu_T[1], cpu_tmp4);
1633
1634 gen_set_label(label1);
1635 /* store */
1636 if (op1 == OR_TMP0)
1637 gen_op_st_T0_A0(ot + s->mem_index);
1638 else
1639 gen_op_mov_reg_T0(ot, op1);
1640
1641 /* update eflags */
1642 if (s->cc_op != CC_OP_DYNAMIC)
1643 gen_op_set_cc_op(s->cc_op);
1644
1645 label2 = gen_new_label();
1646 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label2);
1647
1648 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1649 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1650 if (is_right) {
1651 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1652 } else {
1653 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1654 }
1655 gen_set_label(label2);
1656 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1657 }
1658
1659 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1660 {
1661 if (s != OR_TMP1)
1662 gen_op_mov_TN_reg(ot, 1, s);
1663 switch(op) {
1664 case OP_ROL:
1665 gen_rot_rm_T1(s1, ot, d, 0);
1666 break;
1667 case OP_ROR:
1668 gen_rot_rm_T1(s1, ot, d, 1);
1669 break;
1670 case OP_SHL:
1671 case OP_SHL1:
1672 gen_shift_rm_T1(s1, ot, d, 0, 0);
1673 break;
1674 case OP_SHR:
1675 gen_shift_rm_T1(s1, ot, d, 1, 0);
1676 break;
1677 case OP_SAR:
1678 gen_shift_rm_T1(s1, ot, d, 1, 1);
1679 break;
1680 case OP_RCL:
1681 gen_rotc_rm_T1(s1, ot, d, 0);
1682 break;
1683 case OP_RCR:
1684 gen_rotc_rm_T1(s1, ot, d, 1);
1685 break;
1686 }
1687 }
1688
1689 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1690 {
1691 /* currently not optimized */
1692 gen_op_movl_T1_im(c);
1693 gen_shift(s1, op, ot, d, OR_TMP1);
1694 }
1695
1696 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1697 {
1698 target_long disp;
1699 int havesib;
1700 int base;
1701 int index;
1702 int scale;
1703 int opreg;
1704 int mod, rm, code, override, must_add_seg;
1705
1706 override = s->override;
1707 must_add_seg = s->addseg;
1708 if (override >= 0)
1709 must_add_seg = 1;
1710 mod = (modrm >> 6) & 3;
1711 rm = modrm & 7;
1712
1713 if (s->aflag) {
1714
1715 havesib = 0;
1716 base = rm;
1717 index = 0;
1718 scale = 0;
1719
1720 if (base == 4) {
1721 havesib = 1;
1722 code = ldub_code(s->pc++);
1723 scale = (code >> 6) & 3;
1724 index = ((code >> 3) & 7) | REX_X(s);
1725 base = (code & 7);
1726 }
1727 base |= REX_B(s);
1728
1729 switch (mod) {
1730 case 0:
1731 if ((base & 7) == 5) {
1732 base = -1;
1733 disp = (int32_t)ldl_code(s->pc);
1734 s->pc += 4;
1735 if (CODE64(s) && !havesib) {
1736 disp += s->pc + s->rip_offset;
1737 }
1738 } else {
1739 disp = 0;
1740 }
1741 break;
1742 case 1:
1743 disp = (int8_t)ldub_code(s->pc++);
1744 break;
1745 default:
1746 case 2:
1747 disp = ldl_code(s->pc);
1748 s->pc += 4;
1749 break;
1750 }
1751
1752 if (base >= 0) {
1753 /* for correct popl handling with esp */
1754 if (base == 4 && s->popl_esp_hack)
1755 disp += s->popl_esp_hack;
1756 #ifdef TARGET_X86_64
1757 if (s->aflag == 2) {
1758 gen_op_movq_A0_reg(base);
1759 if (disp != 0) {
1760 gen_op_addq_A0_im(disp);
1761 }
1762 } else
1763 #endif
1764 {
1765 gen_op_movl_A0_reg(base);
1766 if (disp != 0)
1767 gen_op_addl_A0_im(disp);
1768 }
1769 } else {
1770 #ifdef TARGET_X86_64
1771 if (s->aflag == 2) {
1772 gen_op_movq_A0_im(disp);
1773 } else
1774 #endif
1775 {
1776 gen_op_movl_A0_im(disp);
1777 }
1778 }
1779 /* XXX: index == 4 is always invalid */
1780 if (havesib && (index != 4 || scale != 0)) {
1781 #ifdef TARGET_X86_64
1782 if (s->aflag == 2) {
1783 gen_op_addq_A0_reg_sN(scale, index);
1784 } else
1785 #endif
1786 {
1787 gen_op_addl_A0_reg_sN(scale, index);
1788 }
1789 }
1790 if (must_add_seg) {
1791 if (override < 0) {
1792 if (base == R_EBP || base == R_ESP)
1793 override = R_SS;
1794 else
1795 override = R_DS;
1796 }
1797 #ifdef TARGET_X86_64
1798 if (s->aflag == 2) {
1799 gen_op_addq_A0_seg(override);
1800 } else
1801 #endif
1802 {
1803 gen_op_addl_A0_seg(override);
1804 }
1805 }
1806 } else {
1807 switch (mod) {
1808 case 0:
1809 if (rm == 6) {
1810 disp = lduw_code(s->pc);
1811 s->pc += 2;
1812 gen_op_movl_A0_im(disp);
1813 rm = 0; /* avoid SS override */
1814 goto no_rm;
1815 } else {
1816 disp = 0;
1817 }
1818 break;
1819 case 1:
1820 disp = (int8_t)ldub_code(s->pc++);
1821 break;
1822 default:
1823 case 2:
1824 disp = lduw_code(s->pc);
1825 s->pc += 2;
1826 break;
1827 }
1828 switch(rm) {
1829 case 0:
1830 gen_op_movl_A0_reg(R_EBX);
1831 gen_op_addl_A0_reg_sN(0, R_ESI);
1832 break;
1833 case 1:
1834 gen_op_movl_A0_reg(R_EBX);
1835 gen_op_addl_A0_reg_sN(0, R_EDI);
1836 break;
1837 case 2:
1838 gen_op_movl_A0_reg(R_EBP);
1839 gen_op_addl_A0_reg_sN(0, R_ESI);
1840 break;
1841 case 3:
1842 gen_op_movl_A0_reg(R_EBP);
1843 gen_op_addl_A0_reg_sN(0, R_EDI);
1844 break;
1845 case 4:
1846 gen_op_movl_A0_reg(R_ESI);
1847 break;
1848 case 5:
1849 gen_op_movl_A0_reg(R_EDI);
1850 break;
1851 case 6:
1852 gen_op_movl_A0_reg(R_EBP);
1853 break;
1854 default:
1855 case 7:
1856 gen_op_movl_A0_reg(R_EBX);
1857 break;
1858 }
1859 if (disp != 0)
1860 gen_op_addl_A0_im(disp);
1861 gen_op_andl_A0_ffff();
1862 no_rm:
1863 if (must_add_seg) {
1864 if (override < 0) {
1865 if (rm == 2 || rm == 3 || rm == 6)
1866 override = R_SS;
1867 else
1868 override = R_DS;
1869 }
1870 gen_op_addl_A0_seg(override);
1871 }
1872 }
1873
1874 opreg = OR_A0;
1875 disp = 0;
1876 *reg_ptr = opreg;
1877 *offset_ptr = disp;
1878 }
1879
1880 static void gen_nop_modrm(DisasContext *s, int modrm)
1881 {
1882 int mod, rm, base, code;
1883
1884 mod = (modrm >> 6) & 3;
1885 if (mod == 3)
1886 return;
1887 rm = modrm & 7;
1888
1889 if (s->aflag) {
1890
1891 base = rm;
1892
1893 if (base == 4) {
1894 code = ldub_code(s->pc++);
1895 base = (code & 7);
1896 }
1897
1898 switch (mod) {
1899 case 0:
1900 if (base == 5) {
1901 s->pc += 4;
1902 }
1903 break;
1904 case 1:
1905 s->pc++;
1906 break;
1907 default:
1908 case 2:
1909 s->pc += 4;
1910 break;
1911 }
1912 } else {
1913 switch (mod) {
1914 case 0:
1915 if (rm == 6) {
1916 s->pc += 2;
1917 }
1918 break;
1919 case 1:
1920 s->pc++;
1921 break;
1922 default:
1923 case 2:
1924 s->pc += 2;
1925 break;
1926 }
1927 }
1928 }
1929
1930 /* used for LEA and MOV AX, mem */
1931 static void gen_add_A0_ds_seg(DisasContext *s)
1932 {
1933 int override, must_add_seg;
1934 must_add_seg = s->addseg;
1935 override = R_DS;
1936 if (s->override >= 0) {
1937 override = s->override;
1938 must_add_seg = 1;
1939 } else {
1940 override = R_DS;
1941 }
1942 if (must_add_seg) {
1943 #ifdef TARGET_X86_64
1944 if (CODE64(s)) {
1945 gen_op_addq_A0_seg(override);
1946 } else
1947 #endif
1948 {
1949 gen_op_addl_A0_seg(override);
1950 }
1951 }
1952 }
1953
1954 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1955 OR_TMP0 */
1956 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1957 {
1958 int mod, rm, opreg, disp;
1959
1960 mod = (modrm >> 6) & 3;
1961 rm = (modrm & 7) | REX_B(s);
1962 if (mod == 3) {
1963 if (is_store) {
1964 if (reg != OR_TMP0)
1965 gen_op_mov_TN_reg(ot, 0, reg);
1966 gen_op_mov_reg_T0(ot, rm);
1967 } else {
1968 gen_op_mov_TN_reg(ot, 0, rm);
1969 if (reg != OR_TMP0)
1970 gen_op_mov_reg_T0(ot, reg);
1971 }
1972 } else {
1973 gen_lea_modrm(s, modrm, &opreg, &disp);
1974 if (is_store) {
1975 if (reg != OR_TMP0)
1976 gen_op_mov_TN_reg(ot, 0, reg);
1977 gen_op_st_T0_A0(ot + s->mem_index);
1978 } else {
1979 gen_op_ld_T0_A0(ot + s->mem_index);
1980 if (reg != OR_TMP0)
1981 gen_op_mov_reg_T0(ot, reg);
1982 }
1983 }
1984 }
1985
1986 static inline uint32_t insn_get(DisasContext *s, int ot)
1987 {
1988 uint32_t ret;
1989
1990 switch(ot) {
1991 case OT_BYTE:
1992 ret = ldub_code(s->pc);
1993 s->pc++;
1994 break;
1995 case OT_WORD:
1996 ret = lduw_code(s->pc);
1997 s->pc += 2;
1998 break;
1999 default:
2000 case OT_LONG:
2001 ret = ldl_code(s->pc);
2002 s->pc += 4;
2003 break;
2004 }
2005 return ret;
2006 }
2007
2008 static inline int insn_const_size(unsigned int ot)
2009 {
2010 if (ot <= OT_LONG)
2011 return 1 << ot;
2012 else
2013 return 4;
2014 }
2015
2016 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2017 {
2018 TranslationBlock *tb;
2019 target_ulong pc;
2020
2021 pc = s->cs_base + eip;
2022 tb = s->tb;
2023 /* NOTE: we handle the case where the TB spans two pages here */
2024 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2025 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2026 /* jump to same page: we can use a direct jump */
2027 tcg_gen_goto_tb(tb_num);
2028 gen_jmp_im(eip);
2029 tcg_gen_exit_tb((long)tb + tb_num);
2030 } else {
2031 /* jump to another page: currently not optimized */
2032 gen_jmp_im(eip);
2033 gen_eob(s);
2034 }
2035 }
2036
2037 static inline void gen_jcc(DisasContext *s, int b,
2038 target_ulong val, target_ulong next_eip)
2039 {
2040 TranslationBlock *tb;
2041 int inv, jcc_op;
2042 GenOpFunc1 *func;
2043 target_ulong tmp;
2044 int l1, l2;
2045
2046 inv = b & 1;
2047 jcc_op = (b >> 1) & 7;
2048
2049 if (s->jmp_opt) {
2050 switch(s->cc_op) {
2051 /* we optimize the cmp/jcc case */
2052 case CC_OP_SUBB:
2053 case CC_OP_SUBW:
2054 case CC_OP_SUBL:
2055 case CC_OP_SUBQ:
2056 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2057 break;
2058
2059 /* some jumps are easy to compute */
2060 case CC_OP_ADDB:
2061 case CC_OP_ADDW:
2062 case CC_OP_ADDL:
2063 case CC_OP_ADDQ:
2064
2065 case CC_OP_ADCB:
2066 case CC_OP_ADCW:
2067 case CC_OP_ADCL:
2068 case CC_OP_ADCQ:
2069
2070 case CC_OP_SBBB:
2071 case CC_OP_SBBW:
2072 case CC_OP_SBBL:
2073 case CC_OP_SBBQ:
2074
2075 case CC_OP_LOGICB:
2076 case CC_OP_LOGICW:
2077 case CC_OP_LOGICL:
2078 case CC_OP_LOGICQ:
2079
2080 case CC_OP_INCB:
2081 case CC_OP_INCW:
2082 case CC_OP_INCL:
2083 case CC_OP_INCQ:
2084
2085 case CC_OP_DECB:
2086 case CC_OP_DECW:
2087 case CC_OP_DECL:
2088 case CC_OP_DECQ:
2089
2090 case CC_OP_SHLB:
2091 case CC_OP_SHLW:
2092 case CC_OP_SHLL:
2093 case CC_OP_SHLQ:
2094
2095 case CC_OP_SARB:
2096 case CC_OP_SARW:
2097 case CC_OP_SARL:
2098 case CC_OP_SARQ:
2099 switch(jcc_op) {
2100 case JCC_Z:
2101 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2102 break;
2103 case JCC_S:
2104 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2105 break;
2106 default:
2107 func = NULL;
2108 break;
2109 }
2110 break;
2111 default:
2112 func = NULL;
2113 break;
2114 }
2115
2116 if (s->cc_op != CC_OP_DYNAMIC) {
2117 gen_op_set_cc_op(s->cc_op);
2118 s->cc_op = CC_OP_DYNAMIC;
2119 }
2120
2121 if (!func) {
2122 gen_setcc_slow[jcc_op]();
2123 func = gen_op_jnz_T0_label;
2124 }
2125
2126 if (inv) {
2127 tmp = val;
2128 val = next_eip;
2129 next_eip = tmp;
2130 }
2131 tb = s->tb;
2132
2133 l1 = gen_new_label();
2134 func(l1);
2135
2136 gen_goto_tb(s, 0, next_eip);
2137
2138 gen_set_label(l1);
2139 gen_goto_tb(s, 1, val);
2140
2141 s->is_jmp = 3;
2142 } else {
2143
2144 if (s->cc_op != CC_OP_DYNAMIC) {
2145 gen_op_set_cc_op(s->cc_op);
2146 s->cc_op = CC_OP_DYNAMIC;
2147 }
2148 gen_setcc_slow[jcc_op]();
2149 if (inv) {
2150 tmp = val;
2151 val = next_eip;
2152 next_eip = tmp;
2153 }
2154 l1 = gen_new_label();
2155 l2 = gen_new_label();
2156 gen_op_jnz_T0_label(l1);
2157 gen_jmp_im(next_eip);
2158 gen_op_jmp_label(l2);
2159 gen_set_label(l1);
2160 gen_jmp_im(val);
2161 gen_set_label(l2);
2162 gen_eob(s);
2163 }
2164 }
2165
2166 static void gen_setcc(DisasContext *s, int b)
2167 {
2168 int inv, jcc_op;
2169 GenOpFunc *func;
2170
2171 inv = b & 1;
2172 jcc_op = (b >> 1) & 7;
2173 switch(s->cc_op) {
2174 /* we optimize the cmp/jcc case */
2175 case CC_OP_SUBB:
2176 case CC_OP_SUBW:
2177 case CC_OP_SUBL:
2178 case CC_OP_SUBQ:
2179 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2180 if (!func)
2181 goto slow_jcc;
2182 break;
2183
2184 /* some jumps are easy to compute */
2185 case CC_OP_ADDB:
2186 case CC_OP_ADDW:
2187 case CC_OP_ADDL:
2188 case CC_OP_ADDQ:
2189
2190 case CC_OP_LOGICB:
2191 case CC_OP_LOGICW:
2192 case CC_OP_LOGICL:
2193 case CC_OP_LOGICQ:
2194
2195 case CC_OP_INCB:
2196 case CC_OP_INCW:
2197 case CC_OP_INCL:
2198 case CC_OP_INCQ:
2199
2200 case CC_OP_DECB:
2201 case CC_OP_DECW:
2202 case CC_OP_DECL:
2203 case CC_OP_DECQ:
2204
2205 case CC_OP_SHLB:
2206 case CC_OP_SHLW:
2207 case CC_OP_SHLL:
2208 case CC_OP_SHLQ:
2209 switch(jcc_op) {
2210 case JCC_Z:
2211 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2212 break;
2213 case JCC_S:
2214 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2215 break;
2216 default:
2217 goto slow_jcc;
2218 }
2219 break;
2220 default:
2221 slow_jcc:
2222 if (s->cc_op != CC_OP_DYNAMIC)
2223 gen_op_set_cc_op(s->cc_op);
2224 func = gen_setcc_slow[jcc_op];
2225 break;
2226 }
2227 func();
2228 if (inv) {
2229 gen_op_xor_T0_1();
2230 }
2231 }
2232
2233 /* move T0 to seg_reg and compute if the CPU state may change. Never
2234 call this function with seg_reg == R_CS */
2235 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2236 {
2237 if (s->pe && !s->vm86) {
2238 /* XXX: optimize by finding processor state dynamically */
2239 if (s->cc_op != CC_OP_DYNAMIC)
2240 gen_op_set_cc_op(s->cc_op);
2241 gen_jmp_im(cur_eip);
2242 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2243 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2244 /* abort translation because the addseg value may change or
2245 because ss32 may change. For R_SS, translation must always
2246 stop as a special handling must be done to disable hardware
2247 interrupts for the next instruction */
2248 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2249 s->is_jmp = 3;
2250 } else {
2251 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2252 if (seg_reg == R_SS)
2253 s->is_jmp = 3;
2254 }
2255 }
2256
2257 static inline int svm_is_rep(int prefixes)
2258 {
2259 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2260 }
2261
2262 static inline int
2263 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2264 uint32_t type, uint64_t param)
2265 {
2266 if(!(s->flags & (INTERCEPT_SVM_MASK)))
2267 /* no SVM activated */
2268 return 0;
2269 switch(type) {
2270 /* CRx and DRx reads/writes */
2271 case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2272 if (s->cc_op != CC_OP_DYNAMIC) {
2273 gen_op_set_cc_op(s->cc_op);
2274 }
2275 gen_jmp_im(pc_start - s->cs_base);
2276 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2277 tcg_const_i32(type), tcg_const_i64(param));
2278 /* this is a special case as we do not know if the interception occurs
2279 so we assume there was none */
2280 return 0;
2281 case SVM_EXIT_MSR:
2282 if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2283 if (s->cc_op != CC_OP_DYNAMIC) {
2284 gen_op_set_cc_op(s->cc_op);
2285 }
2286 gen_jmp_im(pc_start - s->cs_base);
2287 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2288 tcg_const_i32(type), tcg_const_i64(param));
2289 /* this is a special case as we do not know if the interception occurs
2290 so we assume there was none */
2291 return 0;
2292 }
2293 break;
2294 default:
2295 if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2296 if (s->cc_op != CC_OP_DYNAMIC) {
2297 gen_op_set_cc_op(s->cc_op);
2298 }
2299 gen_jmp_im(pc_start - s->cs_base);
2300 tcg_gen_helper_0_2(helper_vmexit,
2301 tcg_const_i32(type), tcg_const_i64(param));
2302 /* we can optimize this one so TBs don't get longer
2303 than up to vmexit */
2304 gen_eob(s);
2305 return 1;
2306 }
2307 }
2308 return 0;
2309 }
2310
2311 static inline int
2312 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2313 {
2314 return gen_svm_check_intercept_param(s, pc_start, type, 0);
2315 }
2316
2317 static inline void gen_stack_update(DisasContext *s, int addend)
2318 {
2319 #ifdef TARGET_X86_64
2320 if (CODE64(s)) {
2321 gen_op_addq_ESP_im(addend);
2322 } else
2323 #endif
2324 if (s->ss32) {
2325 gen_op_addl_ESP_im(addend);
2326 } else {
2327 gen_op_addw_ESP_im(addend);
2328 }
2329 }
2330
2331 /* generate a push. It depends on ss32, addseg and dflag */
2332 static void gen_push_T0(DisasContext *s)
2333 {
2334 #ifdef TARGET_X86_64
2335 if (CODE64(s)) {
2336 gen_op_movq_A0_reg(R_ESP);
2337 if (s->dflag) {
2338 gen_op_addq_A0_im(-8);
2339 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2340 } else {
2341 gen_op_addq_A0_im(-2);
2342 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2343 }
2344 gen_op_mov_reg_A0(2, R_ESP);
2345 } else
2346 #endif
2347 {
2348 gen_op_movl_A0_reg(R_ESP);
2349 if (!s->dflag)
2350 gen_op_addl_A0_im(-2);
2351 else
2352 gen_op_addl_A0_im(-4);
2353 if (s->ss32) {
2354 if (s->addseg) {
2355 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2356 gen_op_addl_A0_seg(R_SS);
2357 }
2358 } else {
2359 gen_op_andl_A0_ffff();
2360 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2361 gen_op_addl_A0_seg(R_SS);
2362 }
2363 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2364 if (s->ss32 && !s->addseg)
2365 gen_op_mov_reg_A0(1, R_ESP);
2366 else
2367 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2368 }
2369 }
2370
2371 /* generate a push. It depends on ss32, addseg and dflag */
2372 /* slower version for T1, only used for call Ev */
2373 static void gen_push_T1(DisasContext *s)
2374 {
2375 #ifdef TARGET_X86_64
2376 if (CODE64(s)) {
2377 gen_op_movq_A0_reg(R_ESP);
2378 if (s->dflag) {
2379 gen_op_addq_A0_im(-8);
2380 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2381 } else {
2382 gen_op_addq_A0_im(-2);
2383 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2384 }
2385 gen_op_mov_reg_A0(2, R_ESP);
2386 } else
2387 #endif
2388 {
2389 gen_op_movl_A0_reg(R_ESP);
2390 if (!s->dflag)
2391 gen_op_addl_A0_im(-2);
2392 else
2393 gen_op_addl_A0_im(-4);
2394 if (s->ss32) {
2395 if (s->addseg) {
2396 gen_op_addl_A0_seg(R_SS);
2397 }
2398 } else {
2399 gen_op_andl_A0_ffff();
2400 gen_op_addl_A0_seg(R_SS);
2401 }
2402 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2403
2404 if (s->ss32 && !s->addseg)
2405 gen_op_mov_reg_A0(1, R_ESP);
2406 else
2407 gen_stack_update(s, (-2) << s->dflag);
2408 }
2409 }
2410
2411 /* two step pop is necessary for precise exceptions */
2412 static void gen_pop_T0(DisasContext *s)
2413 {
2414 #ifdef TARGET_X86_64
2415 if (CODE64(s)) {
2416 gen_op_movq_A0_reg(R_ESP);
2417 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2418 } else
2419 #endif
2420 {
2421 gen_op_movl_A0_reg(R_ESP);
2422 if (s->ss32) {
2423 if (s->addseg)
2424 gen_op_addl_A0_seg(R_SS);
2425 } else {
2426 gen_op_andl_A0_ffff();
2427 gen_op_addl_A0_seg(R_SS);
2428 }
2429 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2430 }
2431 }
2432
2433 static void gen_pop_update(DisasContext *s)
2434 {
2435 #ifdef TARGET_X86_64
2436 if (CODE64(s) && s->dflag) {
2437 gen_stack_update(s, 8);
2438 } else
2439 #endif
2440 {
2441 gen_stack_update(s, 2 << s->dflag);
2442 }
2443 }
2444
2445 static void gen_stack_A0(DisasContext *s)
2446 {
2447 gen_op_movl_A0_reg(R_ESP);
2448 if (!s->ss32)
2449 gen_op_andl_A0_ffff();
2450 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2451 if (s->addseg)
2452 gen_op_addl_A0_seg(R_SS);
2453 }
2454
2455 /* NOTE: wrap around in 16 bit not fully handled */
2456 static void gen_pusha(DisasContext *s)
2457 {
2458 int i;
2459 gen_op_movl_A0_reg(R_ESP);
2460 gen_op_addl_A0_im(-16 << s->dflag);
2461 if (!s->ss32)
2462 gen_op_andl_A0_ffff();
2463 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2464 if (s->addseg)
2465 gen_op_addl_A0_seg(R_SS);
2466 for(i = 0;i < 8; i++) {
2467 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2468 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2469 gen_op_addl_A0_im(2 << s->dflag);
2470 }
2471 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2472 }
2473
2474 /* NOTE: wrap around in 16 bit not fully handled */
2475 static void gen_popa(DisasContext *s)
2476 {
2477 int i;
2478 gen_op_movl_A0_reg(R_ESP);
2479 if (!s->ss32)
2480 gen_op_andl_A0_ffff();
2481 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2482 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2483 if (s->addseg)
2484 gen_op_addl_A0_seg(R_SS);
2485 for(i = 0;i < 8; i++) {
2486 /* ESP is not reloaded */
2487 if (i != 3) {
2488 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2489 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2490 }
2491 gen_op_addl_A0_im(2 << s->dflag);
2492 }
2493 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2494 }
2495
2496 static void gen_enter(DisasContext *s, int esp_addend, int level)
2497 {
2498 int ot, opsize;
2499
2500 level &= 0x1f;
2501 #ifdef TARGET_X86_64
2502 if (CODE64(s)) {
2503 ot = s->dflag ? OT_QUAD : OT_WORD;
2504 opsize = 1 << ot;
2505
2506 gen_op_movl_A0_reg(R_ESP);
2507 gen_op_addq_A0_im(-opsize);
2508 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2509
2510 /* push bp */
2511 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2512 gen_op_st_T0_A0(ot + s->mem_index);
2513 if (level) {
2514 /* XXX: must save state */
2515 tcg_gen_helper_0_3(helper_enter64_level,
2516 tcg_const_i32(level),
2517 tcg_const_i32((ot == OT_QUAD)),
2518 cpu_T[1]);
2519 }
2520 gen_op_mov_reg_T1(ot, R_EBP);
2521 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2522 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2523 } else
2524 #endif
2525 {
2526 ot = s->dflag + OT_WORD;
2527 opsize = 2 << s->dflag;
2528
2529 gen_op_movl_A0_reg(R_ESP);
2530 gen_op_addl_A0_im(-opsize);
2531 if (!s->ss32)
2532 gen_op_andl_A0_ffff();
2533 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2534 if (s->addseg)
2535 gen_op_addl_A0_seg(R_SS);
2536 /* push bp */
2537 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2538 gen_op_st_T0_A0(ot + s->mem_index);
2539 if (level) {
2540 /* XXX: must save state */
2541 tcg_gen_helper_0_3(helper_enter_level,
2542 tcg_const_i32(level),
2543 tcg_const_i32(s->dflag),
2544 cpu_T[1]);
2545 }
2546 gen_op_mov_reg_T1(ot, R_EBP);
2547 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2548 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2549 }
2550 }
2551
2552 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2553 {
2554 if (s->cc_op != CC_OP_DYNAMIC)
2555 gen_op_set_cc_op(s->cc_op);
2556 gen_jmp_im(cur_eip);
2557 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2558 s->is_jmp = 3;
2559 }
2560
2561 /* an interrupt is different from an exception because of the
2562 privilege checks */
2563 static void gen_interrupt(DisasContext *s, int intno,
2564 target_ulong cur_eip, target_ulong next_eip)
2565 {
2566 if (s->cc_op != CC_OP_DYNAMIC)
2567 gen_op_set_cc_op(s->cc_op);
2568 gen_jmp_im(cur_eip);
2569 tcg_gen_helper_0_2(helper_raise_interrupt,
2570 tcg_const_i32(intno),
2571 tcg_const_i32(next_eip - cur_eip));
2572 s->is_jmp = 3;
2573 }
2574
2575 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2576 {
2577 if (s->cc_op != CC_OP_DYNAMIC)
2578 gen_op_set_cc_op(s->cc_op);
2579 gen_jmp_im(cur_eip);
2580 tcg_gen_helper_0_0(helper_debug);
2581 s->is_jmp = 3;
2582 }
2583
2584 /* generate a generic end of block. Trace exception is also generated
2585 if needed */
2586 static void gen_eob(DisasContext *s)
2587 {
2588 if (s->cc_op != CC_OP_DYNAMIC)
2589 gen_op_set_cc_op(s->cc_op);
2590 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2591 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2592 }
2593 if (s->singlestep_enabled) {
2594 tcg_gen_helper_0_0(helper_debug);
2595 } else if (s->tf) {
2596 tcg_gen_helper_0_0(helper_single_step);
2597 } else {
2598 tcg_gen_exit_tb(0);
2599 }
2600 s->is_jmp = 3;
2601 }
2602
2603 /* generate a jump to eip. No segment change must happen before as a
2604 direct call to the next block may occur */
2605 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2606 {
2607 if (s->jmp_opt) {
2608 if (s->cc_op != CC_OP_DYNAMIC) {
2609 gen_op_set_cc_op(s->cc_op);
2610 s->cc_op = CC_OP_DYNAMIC;
2611 }
2612 gen_goto_tb(s, tb_num, eip);
2613 s->is_jmp = 3;
2614 } else {
2615 gen_jmp_im(eip);
2616 gen_eob(s);
2617 }
2618 }
2619
2620 static void gen_jmp(DisasContext *s, target_ulong eip)
2621 {
2622 gen_jmp_tb(s, eip, 0);
2623 }
2624
2625 static inline void gen_ldq_env_A0(int idx, int offset)
2626 {
2627 int mem_index = (idx >> 2) - 1;
2628 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2629 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2630 }
2631
2632 static inline void gen_stq_env_A0(int idx, int offset)
2633 {
2634 int mem_index = (idx >> 2) - 1;
2635 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2636 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2637 }
2638
2639 static inline void gen_ldo_env_A0(int idx, int offset)
2640 {
2641 int mem_index = (idx >> 2) - 1;
2642 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2643 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2644 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2645 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2646 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2647 }
2648
2649 static inline void gen_sto_env_A0(int idx, int offset)
2650 {
2651 int mem_index = (idx >> 2) - 1;
2652 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2653 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2654 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2655 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2656 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2657 }
2658
2659 static inline void gen_op_movo(int d_offset, int s_offset)
2660 {
2661 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2662 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2663 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2664 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2665 }
2666
2667 static inline void gen_op_movq(int d_offset, int s_offset)
2668 {
2669 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2670 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2671 }
2672
2673 static inline void gen_op_movl(int d_offset, int s_offset)
2674 {
2675 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2676 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2677 }
2678
2679 static inline void gen_op_movq_env_0(int d_offset)
2680 {
2681 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2682 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2683 }
2684
2685 #define SSE_SPECIAL ((void *)1)
2686 #define SSE_DUMMY ((void *)2)
2687
2688 #define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2689 #define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2690 helper_ ## x ## ss, helper_ ## x ## sd, }
2691
2692 static void *sse_op_table1[256][4] = {
2693 /* 3DNow! extensions */
2694 [0x0e] = { SSE_DUMMY }, /* femms */
2695 [0x0f] = { SSE_DUMMY }, /* pf... */
2696 /* pure SSE operations */
2697 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2698 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2699 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2700 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2701 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2702 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2703 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2704 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2705
2706 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2707 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2708 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2709 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2710 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2711 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2712 [0x2e] = { helper_ucomiss, helper_ucomisd },
2713 [0x2f] = { helper_comiss, helper_comisd },
2714 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2715 [0x51] = SSE_FOP(sqrt),
2716 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2717 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2718 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2719 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2720 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2721 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2722 [0x58] = SSE_FOP(add),
2723 [0x59] = SSE_FOP(mul),
2724 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2725 helper_cvtss2sd, helper_cvtsd2ss },
2726 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2727 [0x5c] = SSE_FOP(sub),
2728 [0x5d] = SSE_FOP(min),
2729 [0x5e] = SSE_FOP(div),
2730 [0x5f] = SSE_FOP(max),
2731
2732 [0xc2] = SSE_FOP(cmpeq),
2733 [0xc6] = { helper_shufps, helper_shufpd },
2734
2735 /* MMX ops and their SSE extensions */
2736 [0x60] = MMX_OP2(punpcklbw),
2737 [0x61] = MMX_OP2(punpcklwd),
2738 [0x62] = MMX_OP2(punpckldq),
2739 [0x63] = MMX_OP2(packsswb),
2740 [0x64] = MMX_OP2(pcmpgtb),
2741 [0x65] = MMX_OP2(pcmpgtw),
2742 [0x66] = MMX_OP2(pcmpgtl),
2743 [0x67] = MMX_OP2(packuswb),
2744 [0x68] = MMX_OP2(punpckhbw),
2745 [0x69] = MMX_OP2(punpckhwd),
2746 [0x6a] = MMX_OP2(punpckhdq),
2747 [0x6b] = MMX_OP2(packssdw),
2748 [0x6c] = { NULL, helper_punpcklqdq_xmm },
2749 [0x6d] = { NULL, helper_punpckhqdq_xmm },
2750 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2751 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2752 [0x70] = { helper_pshufw_mmx,
2753 helper_pshufd_xmm,
2754 helper_pshufhw_xmm,
2755 helper_pshuflw_xmm },
2756 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2757 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2758 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2759 [0x74] = MMX_OP2(pcmpeqb),
2760 [0x75] = MMX_OP2(pcmpeqw),
2761 [0x76] = MMX_OP2(pcmpeql),
2762 [0x77] = { SSE_DUMMY }, /* emms */
2763 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2764 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2765 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2766 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2767 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2768 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2769 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2770 [0xd1] = MMX_OP2(psrlw),
2771 [0xd2] = MMX_OP2(psrld),
2772 [0xd3] = MMX_OP2(psrlq),
2773 [0xd4] = MMX_OP2(paddq),
2774 [0xd5] = MMX_OP2(pmullw),
2775 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2776 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2777 [0xd8] = MMX_OP2(psubusb),
2778 [0xd9] = MMX_OP2(psubusw),
2779 [0xda] = MMX_OP2(pminub),
2780 [0xdb] = MMX_OP2(pand),
2781 [0xdc] = MMX_OP2(paddusb),
2782 [0xdd] = MMX_OP2(paddusw),
2783 [0xde] = MMX_OP2(pmaxub),
2784 [0xdf] = MMX_OP2(pandn),
2785 [0xe0] = MMX_OP2(pavgb),
2786 [0xe1] = MMX_OP2(psraw),
2787 [0xe2] = MMX_OP2(psrad),
2788 [0xe3] = MMX_OP2(pavgw),
2789 [0xe4] = MMX_OP2(pmulhuw),
2790 [0xe5] = MMX_OP2(pmulhw),
2791 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2792 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2793 [0xe8] = MMX_OP2(psubsb),
2794 [0xe9] = MMX_OP2(psubsw),
2795 [0xea] = MMX_OP2(pminsw),
2796 [0xeb] = MMX_OP2(por),
2797 [0xec] = MMX_OP2(paddsb),
2798 [0xed] = MMX_OP2(paddsw),
2799 [0xee] = MMX_OP2(pmaxsw),
2800 [0xef] = MMX_OP2(pxor),
2801 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2802 [0xf1] = MMX_OP2(psllw),
2803 [0xf2] = MMX_OP2(pslld),
2804 [0xf3] = MMX_OP2(psllq),
2805 [0xf4] = MMX_OP2(pmuludq),
2806 [0xf5] = MMX_OP2(pmaddwd),
2807 [0xf6] = MMX_OP2(psadbw),
2808 [0xf7] = MMX_OP2(maskmov),
2809 [0xf8] = MMX_OP2(psubb),
2810 [0xf9] = MMX_OP2(psubw),
2811 [0xfa] = MMX_OP2(psubl),
2812 [0xfb] = MMX_OP2(psubq),
2813 [0xfc] = MMX_OP2(paddb),
2814 [0xfd] = MMX_OP2(paddw),
2815 [0xfe] = MMX_OP2(paddl),
2816 };
2817
2818 static void *sse_op_table2[3 * 8][2] = {
2819 [0 + 2] = MMX_OP2(psrlw),
2820 [0 + 4] = MMX_OP2(psraw),
2821 [0 + 6] = MMX_OP2(psllw),
2822 [8 + 2] = MMX_OP2(psrld),
2823 [8 + 4] = MMX_OP2(psrad),
2824 [8 + 6] = MMX_OP2(pslld),
2825 [16 + 2] = MMX_OP2(psrlq),
2826 [16 + 3] = { NULL, helper_psrldq_xmm },
2827 [16 + 6] = MMX_OP2(psllq),
2828 [16 + 7] = { NULL, helper_pslldq_xmm },
2829 };
2830
2831 static void *sse_op_table3[4 * 3] = {
2832 helper_cvtsi2ss,
2833 helper_cvtsi2sd,
2834 X86_64_ONLY(helper_cvtsq2ss),
2835 X86_64_ONLY(helper_cvtsq2sd),
2836
2837 helper_cvttss2si,
2838 helper_cvttsd2si,
2839 X86_64_ONLY(helper_cvttss2sq),
2840 X86_64_ONLY(helper_cvttsd2sq),
2841
2842 helper_cvtss2si,
2843 helper_cvtsd2si,
2844 X86_64_ONLY(helper_cvtss2sq),
2845 X86_64_ONLY(helper_cvtsd2sq),
2846 };
2847
2848 static void *sse_op_table4[8][4] = {
2849 SSE_FOP(cmpeq),
2850 SSE_FOP(cmplt),
2851 SSE_FOP(cmple),
2852 SSE_FOP(cmpunord),
2853 SSE_FOP(cmpneq),
2854 SSE_FOP(cmpnlt),
2855 SSE_FOP(cmpnle),
2856 SSE_FOP(cmpord),
2857 };
2858
2859 static void *sse_op_table5[256] = {
2860 [0x0c] = helper_pi2fw,
2861 [0x0d] = helper_pi2fd,
2862 [0x1c] = helper_pf2iw,
2863 [0x1d] = helper_pf2id,
2864 [0x8a] = helper_pfnacc,
2865 [0x8e] = helper_pfpnacc,
2866 [0x90] = helper_pfcmpge,
2867 [0x94] = helper_pfmin,
2868 [0x96] = helper_pfrcp,
2869 [0x97] = helper_pfrsqrt,
2870 [0x9a] = helper_pfsub,
2871 [0x9e] = helper_pfadd,
2872 [0xa0] = helper_pfcmpgt,
2873 [0xa4] = helper_pfmax,
2874 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2875 [0xa7] = helper_movq, /* pfrsqit1 */
2876 [0xaa] = helper_pfsubr,
2877 [0xae] = helper_pfacc,
2878 [0xb0] = helper_pfcmpeq,
2879 [0xb4] = helper_pfmul,
2880 [0xb6] = helper_movq, /* pfrcpit2 */
2881 [0xb7] = helper_pmulhrw_mmx,
2882 [0xbb] = helper_pswapd,
2883 [0xbf] = helper_pavgb_mmx /* pavgusb */
2884 };
2885
2886 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2887 {
2888 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2889 int modrm, mod, rm, reg, reg_addr, offset_addr;
2890 void *sse_op2;
2891
2892 b &= 0xff;
2893 if (s->prefix & PREFIX_DATA)
2894 b1 = 1;
2895 else if (s->prefix & PREFIX_REPZ)
2896 b1 = 2;
2897 else if (s->prefix & PREFIX_REPNZ)
2898 b1 = 3;
2899 else
2900 b1 = 0;
2901 sse_op2 = sse_op_table1[b][b1];
2902 if (!sse_op2)
2903 goto illegal_op;
2904 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2905 is_xmm = 1;
2906 } else {
2907 if (b1 == 0) {
2908 /* MMX case */
2909 is_xmm = 0;
2910 } else {
2911 is_xmm = 1;
2912 }
2913 }
2914 /* simple MMX/SSE operation */
2915 if (s->flags & HF_TS_MASK) {
2916 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2917 return;
2918 }
2919 if (s->flags & HF_EM_MASK) {
2920 illegal_op:
2921 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2922 return;
2923 }
2924 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2925 goto illegal_op;
2926 if (b == 0x0e) {
2927 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2928 goto illegal_op;
2929 /* femms */
2930 tcg_gen_helper_0_0(helper_emms);
2931 return;
2932 }
2933 if (b == 0x77) {
2934 /* emms */
2935 tcg_gen_helper_0_0(helper_emms);
2936 return;
2937 }
2938 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2939 the static cpu state) */
2940 if (!is_xmm) {
2941 tcg_gen_helper_0_0(helper_enter_mmx);
2942 }
2943
2944 modrm = ldub_code(s->pc++);
2945 reg = ((modrm >> 3) & 7);
2946 if (is_xmm)
2947 reg |= rex_r;
2948 mod = (modrm >> 6) & 3;
2949 if (sse_op2 == SSE_SPECIAL) {
2950 b |= (b1 << 8);
2951 switch(b) {
2952 case 0x0e7: /* movntq */
2953 if (mod == 3)
2954 goto illegal_op;
2955 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2956 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2957 break;
2958 case 0x1e7: /* movntdq */
2959 case 0x02b: /* movntps */
2960 case 0x12b: /* movntps */
2961 case 0x3f0: /* lddqu */
2962 if (mod == 3)
2963 goto illegal_op;
2964 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2965 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2966 break;
2967 case 0x6e: /* movd mm, ea */
2968 #ifdef TARGET_X86_64
2969 if (s->dflag == 2) {
2970 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2971 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2972 } else
2973 #endif
2974 {
2975 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2976 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2977 offsetof(CPUX86State,fpregs[reg].mmx));
2978 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2979 }
2980 break;
2981 case 0x16e: /* movd xmm, ea */
2982 #ifdef TARGET_X86_64
2983 if (s->dflag == 2) {
2984 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2985 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2986 offsetof(CPUX86State,xmm_regs[reg]));
2987 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2988 } else
2989 #endif
2990 {
2991 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2992 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
2993 offsetof(CPUX86State,xmm_regs[reg]));
2994 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2995 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
2996 }
2997 break;
2998 case 0x6f: /* movq mm, ea */
2999 if (mod != 3) {
3000 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3001 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3002 } else {
3003 rm = (modrm & 7);
3004 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3005 offsetof(CPUX86State,fpregs[rm].mmx));
3006 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3007 offsetof(CPUX86State,fpregs[reg].mmx));
3008 }
3009 break;
3010 case 0x010: /* movups */
3011 case 0x110: /* movupd */
3012 case 0x028: /* movaps */
3013 case 0x128: /* movapd */
3014 case 0x16f: /* movdqa xmm, ea */
3015 case 0x26f: /* movdqu xmm, ea */
3016 if (mod != 3) {
3017 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3018 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3019 } else {
3020 rm = (modrm & 7) | REX_B(s);
3021 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3022 offsetof(CPUX86State,xmm_regs[rm]));
3023 }
3024 break;
3025 case 0x210: /* movss xmm, ea */
3026 if (mod != 3) {
3027 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3028 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3029 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3030 gen_op_movl_T0_0();
3031 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3032 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3033 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3034 } else {
3035 rm = (modrm & 7) | REX_B(s);
3036 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3037 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3038 }
3039 break;
3040 case 0x310: /* movsd xmm, ea */
3041 if (mod != 3) {
3042 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3043 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3044 gen_op_movl_T0_0();
3045 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3046 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3047 } else {
3048 rm = (modrm & 7) | REX_B(s);
3049 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3050 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3051 }
3052 break;
3053 case 0x012: /* movlps */
3054 case 0x112: /* movlpd */
3055 if (mod != 3) {
3056 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3057 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3058 } else {
3059 /* movhlps */
3060 rm = (modrm & 7) | REX_B(s);
3061 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3062 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3063 }
3064 break;
3065 case 0x212: /* movsldup */
3066 if (mod != 3) {
3067 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3068 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3069 } else {
3070 rm = (modrm & 7) | REX_B(s);
3071 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3072 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3073 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3074 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3075 }
3076 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3077 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3078 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3079 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3080 break;
3081 case 0x312: /* movddup */
3082 if (mod != 3) {
3083 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3084 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3085 } else {
3086 rm = (modrm & 7) | REX_B(s);
3087 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3088 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3089 }
3090 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3091 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3092 break;
3093 case 0x016: /* movhps */
3094 case 0x116: /* movhpd */
3095 if (mod != 3) {
3096 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3097 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3098 } else {
3099 /* movlhps */
3100 rm = (modrm & 7) | REX_B(s);
3101 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3102 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3103 }
3104 break;
3105 case 0x216: /* movshdup */
3106 if (mod != 3) {
3107 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3108 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3109 } else {
3110 rm = (modrm & 7) | REX_B(s);
3111 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3112 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3113 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3114 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3115 }
3116 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3117 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3118 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3119 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3120 break;
3121 case 0x7e: /* movd ea, mm */
3122 #ifdef TARGET_X86_64
3123 if (s->dflag == 2) {
3124 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3125 offsetof(CPUX86State,fpregs[reg].mmx));
3126 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3127 } else
3128 #endif
3129 {
3130 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3131 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3132 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3133 }
3134 break;
3135 case 0x17e: /* movd ea, xmm */
3136 #ifdef TARGET_X86_64
3137 if (s->dflag == 2) {
3138 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3139 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3140 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3141 } else
3142 #endif
3143 {
3144 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3145 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3146 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3147 }
3148 break;
3149 case 0x27e: /* movq xmm, ea */
3150 if (mod != 3) {
3151 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3152 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3153 } else {
3154 rm = (modrm & 7) | REX_B(s);
3155 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3156 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3157 }
3158 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3159 break;
3160 case 0x7f: /* movq ea, mm */
3161 if (mod != 3) {
3162 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3163 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3164 } else {
3165 rm = (modrm & 7);
3166 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3167 offsetof(CPUX86State,fpregs[reg].mmx));
3168 }
3169 break;
3170 case 0x011: /* movups */
3171 case 0x111: /* movupd */
3172 case 0x029: /* movaps */
3173 case 0x129: /* movapd */
3174 case 0x17f: /* movdqa ea, xmm */
3175 case 0x27f: /* movdqu ea, xmm */
3176 if (mod != 3) {
3177 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3178 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3179 } else {
3180 rm = (modrm & 7) | REX_B(s);
3181 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3182 offsetof(CPUX86State,xmm_regs[reg]));
3183 }
3184 break;
3185 case 0x211: /* movss ea, xmm */
3186 if (mod != 3) {
3187 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3188 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3189 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3190 } else {
3191 rm = (modrm & 7) | REX_B(s);
3192 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3193 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3194 }
3195 break;
3196 case 0x311: /* movsd ea, xmm */
3197 if (mod != 3) {
3198 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3199 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3200 } else {
3201 rm = (modrm & 7) | REX_B(s);
3202 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3203 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3204 }
3205 break;
3206 case 0x013: /* movlps */
3207 case 0x113: /* movlpd */
3208 if (mod != 3) {
3209 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3210 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3211 } else {
3212 goto illegal_op;
3213 }
3214 break;
3215 case 0x017: /* movhps */
3216 case 0x117: /* movhpd */
3217 if (mod != 3) {
3218 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3219 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3220 } else {
3221 goto illegal_op;
3222 }
3223 break;
3224 case 0x71: /* shift mm, im */
3225 case 0x72:
3226 case 0x73:
3227 case 0x171: /* shift xmm, im */
3228 case 0x172:
3229 case 0x173:
3230 val = ldub_code(s->pc++);
3231 if (is_xmm) {
3232 gen_op_movl_T0_im(val);
3233 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3234 gen_op_movl_T0_0();
3235 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3236 op1_offset = offsetof(CPUX86State,xmm_t0);
3237 } else {
3238 gen_op_movl_T0_im(val);
3239 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3240 gen_op_movl_T0_0();
3241 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3242 op1_offset = offsetof(CPUX86State,mmx_t0);
3243 }
3244 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3245 if (!sse_op2)
3246 goto illegal_op;
3247 if (is_xmm) {
3248 rm = (modrm & 7) | REX_B(s);
3249 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3250 } else {
3251 rm = (modrm & 7);
3252 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3253 }
3254 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3255 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3256 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3257 break;
3258 case 0x050: /* movmskps */
3259 rm = (modrm & 7) | REX_B(s);
3260 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3261 offsetof(CPUX86State,xmm_regs[rm]));
3262 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3263 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3264 gen_op_mov_reg_T0(OT_LONG, reg);
3265 break;
3266 case 0x150: /* movmskpd */
3267 rm = (modrm & 7) | REX_B(s);
3268 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3269 offsetof(CPUX86State,xmm_regs[rm]));
3270 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3271 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3272 gen_op_mov_reg_T0(OT_LONG, reg);
3273 break;
3274 case 0x02a: /* cvtpi2ps */
3275 case 0x12a: /* cvtpi2pd */
3276 tcg_gen_helper_0_0(helper_enter_mmx);
3277 if (mod != 3) {
3278 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3279 op2_offset = offsetof(CPUX86State,mmx_t0);
3280 gen_ldq_env_A0(s->mem_index, op2_offset);
3281 } else {
3282 rm = (modrm & 7);
3283 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3284 }
3285 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3286 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3287 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3288 switch(b >> 8) {
3289 case 0x0:
3290 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3291 break;
3292 default:
3293 case 0x1:
3294 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3295 break;
3296 }
3297 break;
3298 case 0x22a: /* cvtsi2ss */
3299 case 0x32a: /* cvtsi2sd */
3300 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3301 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3302 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3303 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3304 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3305 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3306 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3307 break;
3308 case 0x02c: /* cvttps2pi */
3309 case 0x12c: /* cvttpd2pi */
3310 case 0x02d: /* cvtps2pi */
3311 case 0x12d: /* cvtpd2pi */
3312 tcg_gen_helper_0_0(helper_enter_mmx);
3313 if (mod != 3) {
3314 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3315 op2_offset = offsetof(CPUX86State,xmm_t0);
3316 gen_ldo_env_A0(s->mem_index, op2_offset);
3317 } else {
3318 rm = (modrm & 7) | REX_B(s);
3319 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3320 }
3321 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3322 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3323 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3324 switch(b) {
3325 case 0x02c:
3326 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3327 break;
3328 case 0x12c:
3329 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3330 break;
3331 case 0x02d:
3332 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3333 break;
3334 case 0x12d:
3335 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3336 break;
3337 }
3338 break;
3339 case 0x22c: /* cvttss2si */
3340 case 0x32c: /* cvttsd2si */
3341 case 0x22d: /* cvtss2si */
3342 case 0x32d: /* cvtsd2si */
3343 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3344 if (mod != 3) {
3345 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3346 if ((b >> 8) & 1) {
3347 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3348 } else {
3349 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3350 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3351 }
3352 op2_offset = offsetof(CPUX86State,xmm_t0);
3353 } else {
3354 rm = (modrm & 7) | REX_B(s);
3355 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3356 }
3357 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3358 (b & 1) * 4];
3359 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3360 if (ot == OT_LONG) {
3361 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3362 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3363 } else {
3364 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3365 }
3366 gen_op_mov_reg_T0(ot, reg);
3367 break;
3368 case 0xc4: /* pinsrw */
3369 case 0x1c4:
3370 s->rip_offset = 1;
3371 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3372 val = ldub_code(s->pc++);
3373 if (b1) {
3374 val &= 7;
3375 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3376 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3377 } else {
3378 val &= 3;
3379 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3380 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3381 }
3382 break;
3383 case 0xc5: /* pextrw */
3384 case 0x1c5:
3385 if (mod != 3)
3386 goto illegal_op;
3387 val = ldub_code(s->pc++);
3388 if (b1) {
3389 val &= 7;
3390 rm = (modrm & 7) | REX_B(s);
3391 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3392 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3393 } else {
3394 val &= 3;
3395 rm = (modrm & 7);
3396 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3397 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3398 }
3399 reg = ((modrm >> 3) & 7) | rex_r;
3400 gen_op_mov_reg_T0(OT_LONG, reg);
3401 break;
3402 case 0x1d6: /* movq ea, xmm */
3403 if (mod != 3) {
3404 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3405 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3406 } else {
3407 rm = (modrm & 7) | REX_B(s);
3408 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3409 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3410 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3411 }
3412 break;
3413 case 0x2d6: /* movq2dq */
3414 tcg_gen_helper_0_0(helper_enter_mmx);
3415 rm = (modrm & 7);
3416 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3417 offsetof(CPUX86State,fpregs[rm].mmx));
3418 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3419 break;
3420 case 0x3d6: /* movdq2q */
3421 tcg_gen_helper_0_0(helper_enter_mmx);
3422 rm = (modrm & 7) | REX_B(s);
3423 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3424 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3425 break;
3426 case 0xd7: /* pmovmskb */
3427 case 0x1d7:
3428 if (mod != 3)
3429 goto illegal_op;
3430 if (b1) {
3431 rm = (modrm & 7) | REX_B(s);
3432 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3433 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3434 } else {
3435 rm = (modrm & 7);
3436 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3437 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3438 }
3439 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3440 reg = ((modrm >> 3) & 7) | rex_r;
3441 gen_op_mov_reg_T0(OT_LONG, reg);
3442 break;
3443 default:
3444 goto illegal_op;
3445 }
3446 } else {
3447 /* generic MMX or SSE operation */
3448 switch(b) {
3449 case 0x70: /* pshufx insn */
3450 case 0xc6: /* pshufx insn */
3451 case 0xc2: /* compare insns */
3452 s->rip_offset = 1;
3453 break;
3454 default:
3455 break;
3456 }
3457 if (is_xmm) {
3458 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3459 if (mod != 3) {
3460 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3461 op2_offset = offsetof(CPUX86State,xmm_t0);
3462 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3463 b == 0xc2)) {
3464 /* specific case for SSE single instructions */
3465 if (b1 == 2) {
3466 /* 32 bit access */
3467 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3468 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3469 } else {
3470 /* 64 bit access */
3471 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3472 }
3473 } else {
3474 gen_ldo_env_A0(s->mem_index, op2_offset);
3475 }
3476 } else {
3477 rm = (modrm & 7) | REX_B(s);
3478 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3479 }
3480 } else {
3481 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3482 if (mod != 3) {
3483 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3484 op2_offset = offsetof(CPUX86State,mmx_t0);
3485 gen_ldq_env_A0(s->mem_index, op2_offset);
3486 } else {
3487 rm = (modrm & 7);
3488 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3489 }
3490 }
3491 switch(b) {
3492 case 0x0f: /* 3DNow! data insns */
3493 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3494 goto illegal_op;
3495 val = ldub_code(s->pc++);
3496 sse_op2 = sse_op_table5[val];
3497 if (!sse_op2)
3498 goto illegal_op;
3499 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3500 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3501 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3502 break;
3503 case 0x70: /* pshufx insn */
3504 case 0xc6: /* pshufx insn */
3505 val = ldub_code(s->pc++);
3506 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3507 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3508 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3509 break;
3510 case 0xc2:
3511 /* compare insns */
3512 val = ldub_code(s->pc++);
3513 if (val >= 8)
3514 goto illegal_op;
3515 sse_op2 = sse_op_table4[val][b1];
3516 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3517 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3518 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3519 break;
3520 case 0xf7:
3521 /* maskmov : we must prepare A0 */
3522 if (mod != 3)
3523 goto illegal_op;
3524 #ifdef TARGET_X86_64
3525 if (s->aflag == 2) {
3526 gen_op_movq_A0_reg(R_EDI);
3527 } else
3528 #endif
3529 {
3530 gen_op_movl_A0_reg(R_EDI);
3531 if (s->aflag == 0)
3532 gen_op_andl_A0_ffff();
3533 }
3534 gen_add_A0_ds_seg(s);
3535
3536 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3537 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3538 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3539 break;
3540 default:
3541 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3542 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3543 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3544 break;
3545 }
3546 if (b == 0x2e || b == 0x2f) {
3547 /* just to keep the EFLAGS optimization correct */
3548 gen_op_com_dummy();
3549 s->cc_op = CC_OP_EFLAGS;
3550 }
3551 }
3552 }
3553
3554 /* convert one instruction. s->is_jmp is set if the translation must
3555 be stopped. Return the next pc value */
3556 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3557 {
3558 int b, prefixes, aflag, dflag;
3559 int shift, ot;
3560 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3561 target_ulong next_eip, tval;
3562 int rex_w, rex_r;
3563
3564 s->pc = pc_start;
3565 prefixes = 0;
3566 aflag = s->code32;
3567 dflag = s->code32;
3568 s->override = -1;
3569 rex_w = -1;
3570 rex_r = 0;
3571 #ifdef TARGET_X86_64
3572 s->rex_x = 0;
3573 s->rex_b = 0;
3574 x86_64_hregs = 0;
3575 #endif
3576 s->rip_offset = 0; /* for relative ip address */
3577 next_byte:
3578 b = ldub_code(s->pc);
3579 s->pc++;
3580 /* check prefixes */
3581 #ifdef TARGET_X86_64
3582 if (CODE64(s)) {
3583 switch (b) {
3584 case 0xf3:
3585 prefixes |= PREFIX_REPZ;
3586 goto next_byte;
3587 case 0xf2:
3588 prefixes |= PREFIX_REPNZ;
3589 goto next_byte;
3590 case 0xf0:
3591 prefixes |= PREFIX_LOCK;
3592 goto next_byte;
3593 case 0x2e:
3594 s->override = R_CS;
3595 goto next_byte;
3596 case 0x36:
3597 s->override = R_SS;
3598 goto next_byte;
3599 case 0x3e:
3600 s->override = R_DS;
3601 goto next_byte;
3602 case 0x26:
3603 s->override = R_ES;
3604 goto next_byte;
3605 case 0x64:
3606 s->override = R_FS;
3607 goto next_byte;
3608 case 0x65:
3609 s->override = R_GS;
3610 goto next_byte;
3611 case 0x66:
3612 prefixes |= PREFIX_DATA;
3613 goto next_byte;
3614 case 0x67:
3615 prefixes |= PREFIX_ADR;
3616 goto next_byte;
3617 case 0x40 ... 0x4f:
3618 /* REX prefix */
3619 rex_w = (b >> 3) & 1;
3620 rex_r = (b & 0x4) << 1;
3621 s->rex_x = (b & 0x2) << 2;
3622 REX_B(s) = (b & 0x1) << 3;
3623 x86_64_hregs = 1; /* select uniform byte register addressing */
3624 goto next_byte;
3625 }
3626 if (rex_w == 1) {
3627 /* 0x66 is ignored if rex.w is set */
3628 dflag = 2;
3629 } else {
3630 if (prefixes & PREFIX_DATA)
3631 dflag ^= 1;
3632 }
3633 if (!(prefixes & PREFIX_ADR))
3634 aflag = 2;
3635 } else
3636 #endif
3637 {
3638 switch (b) {
3639 case 0xf3:
3640 prefixes |= PREFIX_REPZ;
3641 goto next_byte;
3642 case 0xf2:
3643 prefixes |= PREFIX_REPNZ;
3644 goto next_byte;
3645 case 0xf0:
3646 prefixes |= PREFIX_LOCK;
3647 goto next_byte;
3648 case 0x2e:
3649 s->override = R_CS;
3650 goto next_byte;
3651 case 0x36:
3652 s->override = R_SS;
3653 goto next_byte;
3654 case 0x3e:
3655 s->override = R_DS;
3656 goto next_byte;
3657 case 0x26:
3658 s->override = R_ES;
3659 goto next_byte;
3660 case 0x64:
3661 s->override = R_FS;
3662 goto next_byte;
3663 case 0x65:
3664 s->override = R_GS;
3665 goto next_byte;
3666 case 0x66:
3667 prefixes |= PREFIX_DATA;
3668 goto next_byte;
3669 case 0x67:
3670 prefixes |= PREFIX_ADR;
3671 goto next_byte;
3672 }
3673 if (prefixes & PREFIX_DATA)
3674 dflag ^= 1;
3675 if (prefixes & PREFIX_ADR)
3676 aflag ^= 1;
3677 }
3678
3679 s->prefix = prefixes;
3680 s->aflag = aflag;
3681 s->dflag = dflag;
3682
3683 /* lock generation */
3684 if (prefixes & PREFIX_LOCK)
3685 tcg_gen_helper_0_0(helper_lock);
3686
3687 /* now check op code */
3688 reswitch:
3689 switch(b) {
3690 case 0x0f:
3691 /**************************/
3692 /* extended op code */
3693 b = ldub_code(s->pc++) | 0x100;
3694 goto reswitch;
3695
3696 /**************************/
3697 /* arith & logic */
3698 case 0x00 ... 0x05:
3699 case 0x08 ... 0x0d:
3700 case 0x10 ... 0x15:
3701 case 0x18 ... 0x1d:
3702 case 0x20 ... 0x25:
3703 case 0x28 ... 0x2d:
3704 case 0x30 ... 0x35:
3705 case 0x38 ... 0x3d:
3706 {
3707 int op, f, val;
3708 op = (b >> 3) & 7;
3709 f = (b >> 1) & 3;
3710
3711 if ((b & 1) == 0)
3712 ot = OT_BYTE;
3713 else
3714 ot = dflag + OT_WORD;
3715
3716 switch(f) {
3717 case 0: /* OP Ev, Gv */
3718 modrm = ldub_code(s->pc++);
3719 reg = ((modrm >> 3) & 7) | rex_r;
3720 mod = (modrm >> 6) & 3;
3721 rm = (modrm & 7) | REX_B(s);
3722 if (mod != 3) {
3723 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3724 opreg = OR_TMP0;
3725 } else if (op == OP_XORL && rm == reg) {
3726 xor_zero:
3727 /* xor reg, reg optimisation */
3728 gen_op_movl_T0_0();
3729 s->cc_op = CC_OP_LOGICB + ot;
3730 gen_op_mov_reg_T0(ot, reg);
3731 gen_op_update1_cc();
3732 break;
3733 } else {
3734 opreg = rm;
3735 }
3736 gen_op_mov_TN_reg(ot, 1, reg);
3737 gen_op(s, op, ot, opreg);
3738 break;
3739 case 1: /* OP Gv, Ev */
3740 modrm = ldub_code(s->pc++);
3741 mod = (modrm >> 6) & 3;
3742 reg = ((modrm >> 3) & 7) | rex_r;
3743 rm = (modrm & 7) | REX_B(s);
3744 if (mod != 3) {
3745 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3746 gen_op_ld_T1_A0(ot + s->mem_index);
3747 } else if (op == OP_XORL && rm == reg) {
3748 goto xor_zero;
3749 } else {
3750 gen_op_mov_TN_reg(ot, 1, rm);
3751 }
3752 gen_op(s, op, ot, reg);
3753 break;
3754 case 2: /* OP A, Iv */
3755 val = insn_get(s, ot);
3756 gen_op_movl_T1_im(val);
3757 gen_op(s, op, ot, OR_EAX);
3758 break;
3759 }
3760 }
3761 break;
3762
3763 case 0x80: /* GRP1 */
3764 case 0x81:
3765 case 0x82:
3766 case 0x83:
3767 {
3768 int val;
3769
3770 if ((b & 1) == 0)
3771 ot = OT_BYTE;
3772 else
3773 ot = dflag + OT_WORD;
3774
3775 modrm = ldub_code(s->pc++);
3776 mod = (modrm >> 6) & 3;
3777 rm = (modrm & 7) | REX_B(s);
3778 op = (modrm >> 3) & 7;
3779
3780 if (mod != 3) {
3781 if (b == 0x83)
3782 s->rip_offset = 1;
3783 else
3784 s->rip_offset = insn_const_size(ot);
3785 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3786 opreg = OR_TMP0;
3787 } else {
3788 opreg = rm;
3789 }
3790
3791 switch(b) {
3792 default:
3793 case 0x80:
3794 case 0x81:
3795 case 0x82:
3796 val = insn_get(s, ot);
3797 break;
3798 case 0x83:
3799 val = (int8_t)insn_get(s, OT_BYTE);
3800 break;
3801 }
3802 gen_op_movl_T1_im(val);
3803 gen_op(s, op, ot, opreg);
3804 }
3805 break;
3806
3807 /**************************/
3808 /* inc, dec, and other misc arith */
3809 case 0x40 ... 0x47: /* inc Gv */
3810 ot = dflag ? OT_LONG : OT_WORD;
3811 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3812 break;
3813 case 0x48 ... 0x4f: /* dec Gv */
3814 ot = dflag ? OT_LONG : OT_WORD;
3815 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3816 break;
3817 case 0xf6: /* GRP3 */
3818 case 0xf7:
3819 if ((b & 1) == 0)
3820 ot = OT_BYTE;
3821 else
3822 ot = dflag + OT_WORD;
3823
3824 modrm = ldub_code(s->pc++);
3825 mod = (modrm >> 6) & 3;
3826 rm = (modrm & 7) | REX_B(s);
3827 op = (modrm >> 3) & 7;
3828 if (mod != 3) {
3829 if (op == 0)
3830 s->rip_offset = insn_const_size(ot);
3831 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3832 gen_op_ld_T0_A0(ot + s->mem_index);
3833 } else {
3834 gen_op_mov_TN_reg(ot, 0, rm);
3835 }
3836
3837 switch(op) {
3838 case 0: /* test */
3839 val = insn_get(s, ot);
3840 gen_op_movl_T1_im(val);
3841 gen_op_testl_T0_T1_cc();
3842 s->cc_op = CC_OP_LOGICB + ot;
3843 break;
3844 case 2: /* not */
3845 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3846 if (mod != 3) {
3847 gen_op_st_T0_A0(ot + s->mem_index);
3848 } else {
3849 gen_op_mov_reg_T0(ot, rm);
3850 }
3851 break;
3852 case 3: /* neg */
3853 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3854 if (mod != 3) {
3855 gen_op_st_T0_A0(ot + s->mem_index);
3856 } else {
3857 gen_op_mov_reg_T0(ot, rm);
3858 }
3859 gen_op_update_neg_cc();
3860 s->cc_op = CC_OP_SUBB + ot;
3861 break;
3862 case 4: /* mul */
3863 switch(ot) {
3864 case OT_BYTE:
3865 gen_op_mulb_AL_T0();
3866 s->cc_op = CC_OP_MULB;
3867 break;
3868 case OT_WORD:
3869 gen_op_mulw_AX_T0();
3870 s->cc_op = CC_OP_MULW;
3871 break;
3872 default:
3873 case OT_LONG:
3874 gen_op_mull_EAX_T0();
3875 s->cc_op = CC_OP_MULL;
3876 break;
3877 #ifdef TARGET_X86_64
3878 case OT_QUAD:
3879 gen_op_mulq_EAX_T0();
3880 s->cc_op = CC_OP_MULQ;
3881 break;
3882 #endif
3883 }
3884 break;
3885 case 5: /* imul */
3886 switch(ot) {
3887 case OT_BYTE:
3888 gen_op_imulb_AL_T0();
3889 s->cc_op = CC_OP_MULB;
3890 break;
3891 case OT_WORD:
3892 gen_op_imulw_AX_T0();
3893 s->cc_op = CC_OP_MULW;
3894 break;
3895 default:
3896 case OT_LONG:
3897 gen_op_imull_EAX_T0();
3898 s->cc_op = CC_OP_MULL;
3899 break;
3900 #ifdef TARGET_X86_64
3901 case OT_QUAD:
3902 gen_op_imulq_EAX_T0();
3903 s->cc_op = CC_OP_MULQ;
3904 break;
3905 #endif
3906 }
3907 break;
3908 case 6: /* div */
3909 switch(ot) {
3910 case OT_BYTE:
3911 gen_jmp_im(pc_start - s->cs_base);
3912 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3913 break;
3914 case OT_WORD:
3915 gen_jmp_im(pc_start - s->cs_base);
3916 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3917 break;
3918 default:
3919 case OT_LONG:
3920 gen_jmp_im(pc_start - s->cs_base);
3921 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3922 break;
3923 #ifdef TARGET_X86_64
3924 case OT_QUAD:
3925 gen_jmp_im(pc_start - s->cs_base);
3926 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3927 break;
3928 #endif
3929 }
3930 break;
3931 case 7: /* idiv */
3932 switch(ot) {
3933 case OT_BYTE:
3934 gen_jmp_im(pc_start - s->cs_base);
3935 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3936 break;
3937 case OT_WORD:
3938 gen_jmp_im(pc_start - s->cs_base);
3939 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3940 break;
3941 default:
3942 case OT_LONG:
3943 gen_jmp_im(pc_start - s->cs_base);
3944 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3945 break;
3946 #ifdef TARGET_X86_64
3947 case OT_QUAD:
3948 gen_jmp_im(pc_start - s->cs_base);
3949 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
3950 break;
3951 #endif
3952 }
3953 break;
3954 default:
3955 goto illegal_op;
3956 }
3957 break;
3958
3959 case 0xfe: /* GRP4 */
3960 case 0xff: /* GRP5 */
3961 if ((b & 1) == 0)
3962 ot = OT_BYTE;
3963 else
3964 ot = dflag + OT_WORD;
3965
3966 modrm = ldub_code(s->pc++);
3967 mod = (modrm >> 6) & 3;
3968 rm = (modrm & 7) | REX_B(s);
3969 op = (modrm >> 3) & 7;
3970 if (op >= 2 && b == 0xfe) {
3971 goto illegal_op;
3972 }
3973 if (CODE64(s)) {
3974 if (op == 2 || op == 4) {
3975 /* operand size for jumps is 64 bit */
3976 ot = OT_QUAD;
3977 } else if (op == 3 || op == 5) {
3978 /* for call calls, the operand is 16 or 32 bit, even
3979 in long mode */
3980 ot = dflag ? OT_LONG : OT_WORD;
3981 } else if (op == 6) {
3982 /* default push size is 64 bit */
3983 ot = dflag ? OT_QUAD : OT_WORD;
3984 }
3985 }
3986 if (mod != 3) {
3987 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3988 if (op >= 2 && op != 3 && op != 5)
3989 gen_op_ld_T0_A0(ot + s->mem_index);
3990 } else {
3991 gen_op_mov_TN_reg(ot, 0, rm);
3992 }
3993
3994 switch(op) {
3995 case 0: /* inc Ev */
3996 if (mod != 3)
3997 opreg = OR_TMP0;
3998 else
3999 opreg = rm;
4000 gen_inc(s, ot, opreg, 1);
4001 break;
4002 case 1: /* dec Ev */
4003 if (mod != 3)
4004 opreg = OR_TMP0;
4005 else
4006 opreg = rm;
4007 gen_inc(s, ot, opreg, -1);
4008 break;
4009 case 2: /* call Ev */
4010 /* XXX: optimize if memory (no 'and' is necessary) */
4011 if (s->dflag == 0)
4012 gen_op_andl_T0_ffff();
4013 next_eip = s->pc - s->cs_base;
4014 gen_movtl_T1_im(next_eip);
4015 gen_push_T1(s);
4016 gen_op_jmp_T0();
4017 gen_eob(s);
4018 break;
4019 case 3: /* lcall Ev */
4020 gen_op_ld_T1_A0(ot + s->mem_index);
4021 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4022 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4023 do_lcall:
4024 if (s->pe && !s->vm86) {
4025 if (s->cc_op != CC_OP_DYNAMIC)
4026 gen_op_set_cc_op(s->cc_op);
4027 gen_jmp_im(pc_start - s->cs_base);
4028 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4029 tcg_gen_helper_0_4(helper_lcall_protected,
4030 cpu_tmp2_i32, cpu_T[1],
4031 tcg_const_i32(dflag),
4032 tcg_const_i32(s->pc - pc_start));
4033 } else {
4034 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4035 tcg_gen_helper_0_4(helper_lcall_real,
4036 cpu_tmp2_i32, cpu_T[1],
4037 tcg_const_i32(dflag),
4038 tcg_const_i32(s->pc - s->cs_base));
4039 }
4040 gen_eob(s);
4041 break;
4042 case 4: /* jmp Ev */
4043 if (s->dflag == 0)
4044 gen_op_andl_T0_ffff();
4045 gen_op_jmp_T0();
4046 gen_eob(s);
4047 break;
4048 case 5: /* ljmp Ev */
4049 gen_op_ld_T1_A0(ot + s->mem_index);
4050 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4051 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4052 do_ljmp:
4053 if (s->pe && !s->vm86) {
4054 if (s->cc_op != CC_OP_DYNAMIC)
4055 gen_op_set_cc_op(s->cc_op);
4056 gen_jmp_im(pc_start - s->cs_base);
4057 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4058 tcg_gen_helper_0_3(helper_ljmp_protected,
4059 cpu_tmp2_i32,
4060 cpu_T[1],
4061 tcg_const_i32(s->pc - pc_start));
4062 } else {
4063 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4064 gen_op_movl_T0_T1();
4065 gen_op_jmp_T0();
4066 }
4067 gen_eob(s);
4068 break;
4069 case 6: /* push Ev */
4070 gen_push_T0(s);
4071 break;
4072 default:
4073 goto illegal_op;
4074 }
4075 break;
4076
4077 case 0x84: /* test Ev, Gv */
4078 case 0x85:
4079 if ((b & 1) == 0)
4080 ot = OT_BYTE;
4081 else
4082 ot = dflag + OT_WORD;
4083
4084 modrm = ldub_code(s->pc++);
4085 mod = (modrm >> 6) & 3;
4086 rm = (modrm & 7) | REX_B(s);
4087 reg = ((modrm >> 3) & 7) | rex_r;
4088
4089 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4090 gen_op_mov_TN_reg(ot, 1, reg);
4091 gen_op_testl_T0_T1_cc();
4092 s->cc_op = CC_OP_LOGICB + ot;
4093 break;
4094
4095 case 0xa8: /* test eAX, Iv */
4096 case 0xa9:
4097 if ((b & 1) == 0)
4098 ot = OT_BYTE;
4099 else
4100 ot = dflag + OT_WORD;
4101 val = insn_get(s, ot);
4102
4103 gen_op_mov_TN_reg(ot, 0, OR_EAX);
4104 gen_op_movl_T1_im(val);
4105 gen_op_testl_T0_T1_cc();
4106 s->cc_op = CC_OP_LOGICB + ot;
4107 break;
4108
4109 case 0x98: /* CWDE/CBW */
4110 #ifdef TARGET_X86_64
4111 if (dflag == 2) {
4112 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4113 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4114 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4115 } else
4116 #endif
4117 if (dflag == 1) {
4118 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4119 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4120 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4121 } else {
4122 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4123 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4124 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4125 }
4126 break;
4127 case 0x99: /* CDQ/CWD */
4128 #ifdef TARGET_X86_64
4129 if (dflag == 2) {
4130 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4131 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4132 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4133 } else
4134 #endif
4135 if (dflag == 1) {
4136 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4137 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4138 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4139 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4140 } else {
4141 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4142 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4143 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4144 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4145 }
4146 break;
4147 case 0x1af: /* imul Gv, Ev */
4148 case 0x69: /* imul Gv, Ev, I */
4149 case 0x6b:
4150 ot = dflag + OT_WORD;
4151 modrm = ldub_code(s->pc++);
4152 reg = ((modrm >> 3) & 7) | rex_r;
4153 if (b == 0x69)
4154 s->rip_offset = insn_const_size(ot);
4155 else if (b == 0x6b)
4156 s->rip_offset = 1;
4157 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4158 if (b == 0x69) {
4159 val = insn_get(s, ot);
4160 gen_op_movl_T1_im(val);
4161 } else if (b == 0x6b) {
4162 val = (int8_t)insn_get(s, OT_BYTE);
4163 gen_op_movl_T1_im(val);
4164 } else {
4165 gen_op_mov_TN_reg(ot, 1, reg);
4166 }
4167
4168 #ifdef TARGET_X86_64
4169 if (ot == OT_QUAD) {
4170 gen_op_imulq_T0_T1();
4171 } else
4172 #endif
4173 if (ot == OT_LONG) {
4174 gen_op_imull_T0_T1();
4175 } else {
4176 gen_op_imulw_T0_T1();
4177 }
4178 gen_op_mov_reg_T0(ot, reg);
4179 s->cc_op = CC_OP_MULB + ot;
4180 break;
4181 case 0x1c0:
4182 case 0x1c1: /* xadd Ev, Gv */
4183 if ((b & 1) == 0)
4184 ot = OT_BYTE;
4185 else
4186 ot = dflag + OT_WORD;
4187 modrm = ldub_code(s->pc++);
4188 reg = ((modrm >> 3) & 7) | rex_r;
4189 mod = (modrm >> 6) & 3;
4190 if (mod == 3) {
4191 rm = (modrm & 7) | REX_B(s);
4192 gen_op_mov_TN_reg(ot, 0, reg);
4193 gen_op_mov_TN_reg(ot, 1, rm);
4194 gen_op_addl_T0_T1();
4195 gen_op_mov_reg_T1(ot, reg);
4196 gen_op_mov_reg_T0(ot, rm);
4197 } else {
4198 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4199 gen_op_mov_TN_reg(ot, 0, reg);
4200 gen_op_ld_T1_A0(ot + s->mem_index);
4201 gen_op_addl_T0_T1();
4202 gen_op_st_T0_A0(ot + s->mem_index);
4203 gen_op_mov_reg_T1(ot, reg);
4204 }
4205 gen_op_update2_cc();
4206 s->cc_op = CC_OP_ADDB + ot;
4207 break;
4208 case 0x1b0:
4209 case 0x1b1: /* cmpxchg Ev, Gv */
4210 {
4211 int label1;
4212
4213 if ((b & 1) == 0)
4214 ot = OT_BYTE;
4215 else
4216 ot = dflag + OT_WORD;
4217 modrm = ldub_code(s->pc++);
4218 reg = ((modrm >> 3) & 7) | rex_r;
4219 mod = (modrm >> 6) & 3;
4220 gen_op_mov_TN_reg(ot, 1, reg);
4221 if (mod == 3) {
4222 rm = (modrm & 7) | REX_B(s);
4223 gen_op_mov_TN_reg(ot, 0, rm);
4224 } else {
4225 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4226 gen_op_ld_T0_A0(ot + s->mem_index);
4227 rm = 0; /* avoid warning */
4228 }
4229 label1 = gen_new_label();
4230 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_EAX]));
4231 tcg_gen_sub_tl(cpu_T3, cpu_T3, cpu_T[0]);
4232 gen_extu(ot, cpu_T3);
4233 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
4234 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4235 gen_op_mov_reg_T0(ot, R_EAX);
4236 gen_set_label(label1);
4237 if (mod == 3) {
4238 gen_op_mov_reg_T1(ot, rm);
4239 } else {
4240 gen_op_st_T1_A0(ot + s->mem_index);
4241 }
4242 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4243 tcg_gen_mov_tl(cpu_cc_dst, cpu_T3);
4244 s->cc_op = CC_OP_SUBB + ot;
4245 }
4246 break;
4247 case 0x1c7: /* cmpxchg8b */
4248 modrm = ldub_code(s->pc++);
4249 mod = (modrm >> 6) & 3;
4250 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4251 goto illegal_op;
4252 gen_jmp_im(pc_start - s->cs_base);
4253 if (s->cc_op != CC_OP_DYNAMIC)
4254 gen_op_set_cc_op(s->cc_op);
4255 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4256 gen_op_cmpxchg8b();
4257 s->cc_op = CC_OP_EFLAGS;
4258 break;
4259
4260 /**************************/
4261 /* push/pop */
4262 case 0x50 ... 0x57: /* push */
4263 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4264 gen_push_T0(s);
4265 break;
4266 case 0x58 ... 0x5f: /* pop */
4267 if (CODE64(s)) {
4268 ot = dflag ? OT_QUAD : OT_WORD;
4269 } else {
4270 ot = dflag + OT_WORD;
4271 }
4272 gen_pop_T0(s);
4273 /* NOTE: order is important for pop %sp */
4274 gen_pop_update(s);
4275 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4276 break;
4277 case 0x60: /* pusha */
4278 if (CODE64(s))
4279 goto illegal_op;
4280 gen_pusha(s);
4281 break;
4282 case 0x61: /* popa */
4283 if (CODE64(s))
4284 goto illegal_op;
4285 gen_popa(s);
4286 break;
4287 case 0x68: /* push Iv */
4288 case 0x6a:
4289 if (CODE64(s)) {
4290 ot = dflag ? OT_QUAD : OT_WORD;
4291 } else {
4292 ot = dflag + OT_WORD;
4293 }
4294 if (b == 0x68)
4295 val = insn_get(s, ot);
4296 else
4297 val = (int8_t)insn_get(s, OT_BYTE);
4298 gen_op_movl_T0_im(val);
4299 gen_push_T0(s);
4300 break;
4301 case 0x8f: /* pop Ev */
4302 if (CODE64(s)) {
4303 ot = dflag ? OT_QUAD : OT_WORD;
4304 } else {
4305 ot = dflag + OT_WORD;
4306 }
4307 modrm = ldub_code(s->pc++);
4308 mod = (modrm >> 6) & 3;
4309 gen_pop_T0(s);
4310 if (mod == 3) {
4311 /* NOTE: order is important for pop %sp */
4312 gen_pop_update(s);
4313 rm = (modrm & 7) | REX_B(s);
4314 gen_op_mov_reg_T0(ot, rm);
4315 } else {
4316 /* NOTE: order is important too for MMU exceptions */
4317 s->popl_esp_hack = 1 << ot;
4318 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4319 s->popl_esp_hack = 0;
4320 gen_pop_update(s);
4321 }
4322 break;
4323 case 0xc8: /* enter */
4324 {
4325 int level;
4326 val = lduw_code(s->pc);
4327 s->pc += 2;
4328 level = ldub_code(s->pc++);
4329 gen_enter(s, val, level);
4330 }
4331 break;
4332 case 0xc9: /* leave */
4333 /* XXX: exception not precise (ESP is updated before potential exception) */
4334 if (CODE64(s)) {
4335 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4336 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4337 } else if (s->ss32) {
4338 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4339 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4340 } else {
4341 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4342 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4343 }
4344 gen_pop_T0(s);
4345 if (CODE64(s)) {
4346 ot = dflag ? OT_QUAD : OT_WORD;
4347 } else {
4348 ot = dflag + OT_WORD;
4349 }
4350 gen_op_mov_reg_T0(ot, R_EBP);
4351 gen_pop_update(s);
4352 break;
4353 case 0x06: /* push es */
4354 case 0x0e: /* push cs */
4355 case 0x16: /* push ss */
4356 case 0x1e: /* push ds */
4357 if (CODE64(s))
4358 goto illegal_op;
4359 gen_op_movl_T0_seg(b >> 3);
4360 gen_push_T0(s);
4361 break;
4362 case 0x1a0: /* push fs */
4363 case 0x1a8: /* push gs */
4364 gen_op_movl_T0_seg((b >> 3) & 7);
4365 gen_push_T0(s);
4366 break;
4367 case 0x07: /* pop es */
4368 case 0x17: /* pop ss */
4369 case 0x1f: /* pop ds */
4370 if (CODE64(s))
4371 goto illegal_op;
4372 reg = b >> 3;
4373 gen_pop_T0(s);
4374 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4375 gen_pop_update(s);
4376 if (reg == R_SS) {
4377 /* if reg == SS, inhibit interrupts/trace. */
4378 /* If several instructions disable interrupts, only the
4379 _first_ does it */
4380 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4381 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4382 s->tf = 0;
4383 }
4384 if (s->is_jmp) {
4385 gen_jmp_im(s->pc - s->cs_base);
4386 gen_eob(s);
4387 }
4388 break;
4389 case 0x1a1: /* pop fs */
4390 case 0x1a9: /* pop gs */
4391 gen_pop_T0(s);
4392 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4393 gen_pop_update(s);
4394 if (s->is_jmp) {
4395 gen_jmp_im(s->pc - s->cs_base);
4396 gen_eob(s);
4397 }
4398 break;
4399
4400 /**************************/
4401 /* mov */
4402 case 0x88:
4403 case 0x89: /* mov Gv, Ev */
4404 if ((b & 1) == 0)
4405 ot = OT_BYTE;
4406 else
4407 ot = dflag + OT_WORD;
4408 modrm = ldub_code(s->pc++);
4409 reg = ((modrm >> 3) & 7) | rex_r;
4410
4411 /* generate a generic store */
4412 gen_ldst_modrm(s, modrm, ot, reg, 1);
4413 break;
4414 case 0xc6:
4415 case 0xc7: /* mov Ev, Iv */
4416 if ((b & 1) == 0)
4417 ot = OT_BYTE;
4418 else
4419 ot = dflag + OT_WORD;
4420 modrm = ldub_code(s->pc++);
4421 mod = (modrm >> 6) & 3;
4422 if (mod != 3) {
4423 s->rip_offset = insn_const_size(ot);
4424 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4425 }
4426 val = insn_get(s, ot);
4427 gen_op_movl_T0_im(val);
4428 if (mod != 3)
4429 gen_op_st_T0_A0(ot + s->mem_index);
4430 else
4431 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4432 break;
4433 case 0x8a:
4434 case 0x8b: /* mov Ev, Gv */
4435 if ((b & 1) == 0)
4436 ot = OT_BYTE;
4437 else
4438 ot = OT_WORD + dflag;
4439 modrm = ldub_code(s->pc++);
4440 reg = ((modrm >> 3) & 7) | rex_r;
4441
4442 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4443 gen_op_mov_reg_T0(ot, reg);
4444 break;
4445 case 0x8e: /* mov seg, Gv */
4446 modrm = ldub_code(s->pc++);
4447 reg = (modrm >> 3) & 7;
4448 if (reg >= 6 || reg == R_CS)
4449 goto illegal_op;
4450 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4451 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4452 if (reg == R_SS) {
4453 /* if reg == SS, inhibit interrupts/trace */
4454 /* If several instructions disable interrupts, only the
4455 _first_ does it */
4456 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4457 tcg_gen_helper_0_0(helper_set_inhibit_irq);
4458 s->tf = 0;
4459 }
4460 if (s->is_jmp) {
4461 gen_jmp_im(s->pc - s->cs_base);
4462 gen_eob(s);
4463 }
4464 break;
4465 case 0x8c: /* mov Gv, seg */
4466 modrm = ldub_code(s->pc++);
4467 reg = (modrm >> 3) & 7;
4468 mod = (modrm >> 6) & 3;
4469 if (reg >= 6)
4470 goto illegal_op;
4471 gen_op_movl_T0_seg(reg);
4472 if (mod == 3)
4473 ot = OT_WORD + dflag;
4474 else
4475 ot = OT_WORD;
4476 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4477 break;
4478
4479 case 0x1b6: /* movzbS Gv, Eb */
4480 case 0x1b7: /* movzwS Gv, Eb */
4481 case 0x1be: /* movsbS Gv, Eb */
4482 case 0x1bf: /* movswS Gv, Eb */
4483 {
4484 int d_ot;
4485 /* d_ot is the size of destination */
4486 d_ot = dflag + OT_WORD;
4487 /* ot is the size of source */
4488 ot = (b & 1) + OT_BYTE;
4489 modrm = ldub_code(s->pc++);
4490 reg = ((modrm >> 3) & 7) | rex_r;
4491 mod = (modrm >> 6) & 3;
4492 rm = (modrm & 7) | REX_B(s);
4493
4494 if (mod == 3) {
4495 gen_op_mov_TN_reg(ot, 0, rm);
4496 switch(ot | (b & 8)) {
4497 case OT_BYTE:
4498 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4499 break;
4500 case OT_BYTE | 8:
4501 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4502 break;
4503 case OT_WORD:
4504 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4505 break;
4506 default:
4507 case OT_WORD | 8:
4508 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4509 break;
4510 }
4511 gen_op_mov_reg_T0(d_ot, reg);
4512 } else {
4513 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4514 if (b & 8) {
4515 gen_op_lds_T0_A0(ot + s->mem_index);
4516 } else {
4517 gen_op_ldu_T0_A0(ot + s->mem_index);
4518 }
4519 gen_op_mov_reg_T0(d_ot, reg);
4520 }
4521 }
4522 break;
4523
4524 case 0x8d: /* lea */
4525 ot = dflag + OT_WORD;
4526 modrm = ldub_code(s->pc++);
4527 mod = (modrm >> 6) & 3;
4528 if (mod == 3)
4529 goto illegal_op;
4530 reg = ((modrm >> 3) & 7) | rex_r;
4531 /* we must ensure that no segment is added */
4532 s->override = -1;
4533 val = s->addseg;
4534 s->addseg = 0;
4535 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4536 s->addseg = val;
4537 gen_op_mov_reg_A0(ot - OT_WORD, reg);
4538 break;
4539
4540 case 0xa0: /* mov EAX, Ov */
4541 case 0xa1:
4542 case 0xa2: /* mov Ov, EAX */
4543 case 0xa3:
4544 {
4545 target_ulong offset_addr;
4546
4547 if ((b & 1) == 0)
4548 ot = OT_BYTE;
4549 else
4550 ot = dflag + OT_WORD;
4551 #ifdef TARGET_X86_64
4552 if (s->aflag == 2) {
4553 offset_addr = ldq_code(s->pc);
4554 s->pc += 8;
4555 gen_op_movq_A0_im(offset_addr);
4556 } else
4557 #endif
4558 {
4559 if (s->aflag) {
4560 offset_addr = insn_get(s, OT_LONG);
4561 } else {
4562 offset_addr = insn_get(s, OT_WORD);
4563 }
4564 gen_op_movl_A0_im(offset_addr);
4565 }
4566 gen_add_A0_ds_seg(s);
4567 if ((b & 2) == 0) {
4568 gen_op_ld_T0_A0(ot + s->mem_index);
4569 gen_op_mov_reg_T0(ot, R_EAX);
4570 } else {
4571 gen_op_mov_TN_reg(ot, 0, R_EAX);
4572 gen_op_st_T0_A0(ot + s->mem_index);
4573 }
4574 }
4575 break;
4576 case 0xd7: /* xlat */
4577 #ifdef TARGET_X86_64
4578 if (s->aflag == 2) {
4579 gen_op_movq_A0_reg(R_EBX);
4580 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4581 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4582 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4583 } else
4584 #endif
4585 {
4586 gen_op_movl_A0_reg(R_EBX);
4587 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4588 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4589 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4590 if (s->aflag == 0)
4591 gen_op_andl_A0_ffff();
4592 else
4593 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
4594 }
4595 gen_add_A0_ds_seg(s);
4596 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4597 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4598 break;
4599 case 0xb0 ... 0xb7: /* mov R, Ib */
4600 val = insn_get(s, OT_BYTE);
4601 gen_op_movl_T0_im(val);
4602 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4603 break;
4604 case 0xb8 ... 0xbf: /* mov R, Iv */
4605 #ifdef TARGET_X86_64
4606 if (dflag == 2) {
4607 uint64_t tmp;
4608 /* 64 bit case */
4609 tmp = ldq_code(s->pc);
4610 s->pc += 8;
4611 reg = (b & 7) | REX_B(s);
4612 gen_movtl_T0_im(tmp);
4613 gen_op_mov_reg_T0(OT_QUAD, reg);
4614 } else
4615 #endif
4616 {
4617 ot = dflag ? OT_LONG : OT_WORD;
4618 val = insn_get(s, ot);
4619 reg = (b & 7) | REX_B(s);
4620 gen_op_movl_T0_im(val);
4621 gen_op_mov_reg_T0(ot, reg);
4622 }
4623 break;
4624
4625 case 0x91 ... 0x97: /* xchg R, EAX */
4626 ot = dflag + OT_WORD;
4627 reg = (b & 7) | REX_B(s);
4628 rm = R_EAX;
4629 goto do_xchg_reg;
4630 case 0x86:
4631 case 0x87: /* xchg Ev, Gv */
4632 if ((b & 1) == 0)
4633 ot = OT_BYTE;
4634 else
4635 ot = dflag + OT_WORD;
4636 modrm = ldub_code(s->pc++);
4637 reg = ((modrm >> 3) & 7) | rex_r;
4638 mod = (modrm >> 6) & 3;
4639 if (mod == 3) {
4640 rm = (modrm & 7) | REX_B(s);
4641 do_xchg_reg:
4642 gen_op_mov_TN_reg(ot, 0, reg);
4643 gen_op_mov_TN_reg(ot, 1, rm);
4644 gen_op_mov_reg_T0(ot, rm);
4645 gen_op_mov_reg_T1(ot, reg);
4646 } else {
4647 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4648 gen_op_mov_TN_reg(ot, 0, reg);
4649 /* for xchg, lock is implicit */
4650 if (!(prefixes & PREFIX_LOCK))
4651 tcg_gen_helper_0_0(helper_lock);
4652 gen_op_ld_T1_A0(ot + s->mem_index);
4653 gen_op_st_T0_A0(ot + s->mem_index);
4654 if (!(prefixes & PREFIX_LOCK))
4655 tcg_gen_helper_0_0(helper_unlock);
4656 gen_op_mov_reg_T1(ot, reg);
4657 }
4658 break;
4659 case 0xc4: /* les Gv */
4660 if (CODE64(s))
4661 goto illegal_op;
4662 op = R_ES;
4663 goto do_lxx;
4664 case 0xc5: /* lds Gv */
4665 if (CODE64(s))
4666 goto illegal_op;
4667 op = R_DS;
4668 goto do_lxx;
4669 case 0x1b2: /* lss Gv */
4670 op = R_SS;
4671 goto do_lxx;
4672 case 0x1b4: /* lfs Gv */
4673 op = R_FS;
4674 goto do_lxx;
4675 case 0x1b5: /* lgs Gv */
4676 op = R_GS;
4677 do_lxx:
4678 ot = dflag ? OT_LONG : OT_WORD;
4679 modrm = ldub_code(s->pc++);
4680 reg = ((modrm >> 3) & 7) | rex_r;
4681 mod = (modrm >> 6) & 3;
4682 if (mod == 3)
4683 goto illegal_op;
4684 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4685 gen_op_ld_T1_A0(ot + s->mem_index);
4686 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4687 /* load the segment first to handle exceptions properly */
4688 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4689 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4690 /* then put the data */
4691 gen_op_mov_reg_T1(ot, reg);
4692 if (s->is_jmp) {
4693 gen_jmp_im(s->pc - s->cs_base);
4694 gen_eob(s);
4695 }
4696 break;
4697
4698 /************************/
4699 /* shifts */
4700 case 0xc0:
4701 case 0xc1:
4702 /* shift Ev,Ib */
4703 shift = 2;
4704 grp2:
4705 {
4706 if ((b & 1) == 0)
4707 ot = OT_BYTE;
4708 else
4709 ot = dflag + OT_WORD;
4710
4711 modrm = ldub_code(s->pc++);
4712 mod = (modrm >> 6) & 3;
4713 op = (modrm >> 3) & 7;
4714
4715 if (mod != 3) {
4716 if (shift == 2) {
4717 s->rip_offset = 1;
4718 }
4719 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4720 opreg = OR_TMP0;
4721 } else {
4722 opreg = (modrm & 7) | REX_B(s);
4723 }
4724
4725 /* simpler op */
4726 if (shift == 0) {
4727 gen_shift(s, op, ot, opreg, OR_ECX);
4728 } else {
4729 if (shift == 2) {
4730 shift = ldub_code(s->pc++);
4731 }
4732 gen_shifti(s, op, ot, opreg, shift);
4733 }
4734 }
4735 break;
4736 case 0xd0:
4737 case 0xd1:
4738 /* shift Ev,1 */
4739 shift = 1;
4740 goto grp2;
4741 case 0xd2:
4742 case 0xd3:
4743 /* shift Ev,cl */
4744 shift = 0;
4745 goto grp2;
4746
4747 case 0x1a4: /* shld imm */
4748 op = 0;
4749 shift = 1;
4750 goto do_shiftd;
4751 case 0x1a5: /* shld cl */
4752 op = 0;
4753 shift = 0;
4754 goto do_shiftd;
4755 case 0x1ac: /* shrd imm */
4756 op = 1;
4757 shift = 1;
4758 goto do_shiftd;
4759 case 0x1ad: /* shrd cl */
4760 op = 1;
4761 shift = 0;
4762 do_shiftd:
4763 ot = dflag + OT_WORD;
4764 modrm = ldub_code(s->pc++);
4765 mod = (modrm >> 6) & 3;
4766 rm = (modrm & 7) | REX_B(s);
4767 reg = ((modrm >> 3) & 7) | rex_r;
4768 if (mod != 3) {
4769 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4770 opreg = OR_TMP0;
4771 } else {
4772 opreg = rm;
4773 }
4774 gen_op_mov_TN_reg(ot, 1, reg);
4775
4776 if (shift) {
4777 val = ldub_code(s->pc++);
4778 tcg_gen_movi_tl(cpu_T3, val);
4779 } else {
4780 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4781 }
4782 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4783 break;
4784
4785 /************************/
4786 /* floats */
4787 case 0xd8 ... 0xdf:
4788 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4789 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4790 /* XXX: what to do if illegal op ? */
4791 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4792 break;
4793 }
4794 modrm = ldub_code(s->pc++);
4795 mod = (modrm >> 6) & 3;
4796 rm = modrm & 7;
4797 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4798 if (mod != 3) {
4799 /* memory op */
4800 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4801 switch(op) {
4802 case 0x00 ... 0x07: /* fxxxs */
4803 case 0x10 ... 0x17: /* fixxxl */
4804 case 0x20 ... 0x27: /* fxxxl */
4805 case 0x30 ... 0x37: /* fixxx */
4806 {
4807 int op1;
4808 op1 = op & 7;
4809
4810 switch(op >> 4) {
4811 case 0:
4812 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4813 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4814 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4815 break;
4816 case 1:
4817 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4818 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4819 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4820 break;
4821 case 2:
4822 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4823 (s->mem_index >> 2) - 1);
4824 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
4825 break;
4826 case 3:
4827 default:
4828 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4829 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4830 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4831 break;
4832 }
4833
4834 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4835 if (op1 == 3) {
4836 /* fcomp needs pop */
4837 tcg_gen_helper_0_0(helper_fpop);
4838 }
4839 }
4840 break;
4841 case 0x08: /* flds */
4842 case 0x0a: /* fsts */
4843 case 0x0b: /* fstps */
4844 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4845 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4846 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4847 switch(op & 7) {
4848 case 0:
4849 switch(op >> 4) {
4850 case 0:
4851 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4852 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4853 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
4854 break;
4855 case 1:
4856 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4857 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4858 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4859 break;
4860 case 2:
4861 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4862 (s->mem_index >> 2) - 1);
4863 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
4864 break;
4865 case 3:
4866 default:
4867 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4868 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4869 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4870 break;
4871 }
4872 break;
4873 case 1:
4874 /* XXX: the corresponding CPUID bit must be tested ! */
4875 switch(op >> 4) {
4876 case 1:
4877 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
4878 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4879 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4880 break;
4881 case 2:
4882 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
4883 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4884 (s->mem_index >> 2) - 1);
4885 break;
4886 case 3:
4887 default:
4888 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
4889 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4890 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4891 break;
4892 }
4893 tcg_gen_helper_0_0(helper_fpop);
4894 break;
4895 default:
4896 switch(op >> 4) {
4897 case 0:
4898 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
4899 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4900 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4901 break;
4902 case 1:
4903 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
4904 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4905 gen_op_st_T0_A0(OT_LONG + s->mem_index);
4906 break;
4907 case 2:
4908 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
4909 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4910 (s->mem_index >> 2) - 1);
4911 break;
4912 case 3:
4913 default:
4914 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
4915 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4916 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4917 break;
4918 }
4919 if ((op & 7) == 3)
4920 tcg_gen_helper_0_0(helper_fpop);
4921 break;
4922 }
4923 break;
4924 case 0x0c: /* fldenv mem */
4925 if (s->cc_op != CC_OP_DYNAMIC)
4926 gen_op_set_cc_op(s->cc_op);
4927 gen_jmp_im(pc_start - s->cs_base);
4928 tcg_gen_helper_0_2(helper_fldenv,
4929 cpu_A0, tcg_const_i32(s->dflag));
4930 break;
4931 case 0x0d: /* fldcw mem */
4932 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
4933 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4934 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
4935 break;
4936 case 0x0e: /* fnstenv mem */
4937 if (s->cc_op != CC_OP_DYNAMIC)
4938 gen_op_set_cc_op(s->cc_op);
4939 gen_jmp_im(pc_start - s->cs_base);
4940 tcg_gen_helper_0_2(helper_fstenv,
4941 cpu_A0, tcg_const_i32(s->dflag));
4942 break;
4943 case 0x0f: /* fnstcw mem */
4944 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
4945 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4946 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4947 break;
4948 case 0x1d: /* fldt mem */
4949 if (s->cc_op != CC_OP_DYNAMIC)
4950 gen_op_set_cc_op(s->cc_op);
4951 gen_jmp_im(pc_start - s->cs_base);
4952 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
4953 break;
4954 case 0x1f: /* fstpt mem */
4955 if (s->cc_op != CC_OP_DYNAMIC)
4956 gen_op_set_cc_op(s->cc_op);
4957 gen_jmp_im(pc_start - s->cs_base);
4958 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
4959 tcg_gen_helper_0_0(helper_fpop);
4960 break;
4961 case 0x2c: /* frstor mem */
4962 if (s->cc_op != CC_OP_DYNAMIC)
4963 gen_op_set_cc_op(s->cc_op);
4964 gen_jmp_im(pc_start - s->cs_base);
4965 tcg_gen_helper_0_2(helper_frstor,
4966 cpu_A0, tcg_const_i32(s->dflag));
4967 break;
4968 case 0x2e: /* fnsave mem */
4969 if (s->cc_op != CC_OP_DYNAMIC)
4970 gen_op_set_cc_op(s->cc_op);
4971 gen_jmp_im(pc_start - s->cs_base);
4972 tcg_gen_helper_0_2(helper_fsave,
4973 cpu_A0, tcg_const_i32(s->dflag));
4974 break;
4975 case 0x2f: /* fnstsw mem */
4976 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
4977 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4978 gen_op_st_T0_A0(OT_WORD + s->mem_index);
4979 break;
4980 case 0x3c: /* fbld */
4981 if (s->cc_op != CC_OP_DYNAMIC)
4982 gen_op_set_cc_op(s->cc_op);
4983 gen_jmp_im(pc_start - s->cs_base);
4984 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
4985 break;
4986 case 0x3e: /* fbstp */
4987 if (s->cc_op != CC_OP_DYNAMIC)
4988 gen_op_set_cc_op(s->cc_op);
4989 gen_jmp_im(pc_start - s->cs_base);
4990 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
4991 tcg_gen_helper_0_0(helper_fpop);
4992 break;
4993 case 0x3d: /* fildll */
4994 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4995 (s->mem_index >> 2) - 1);
4996 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
4997 break;
4998 case 0x3f: /* fistpll */
4999 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5000 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5001 (s->mem_index >> 2) - 1);
5002 tcg_gen_helper_0_0(helper_fpop);
5003 break;
5004 default:
5005 goto illegal_op;
5006 }
5007 } else {
5008 /* register float ops */
5009 opreg = rm;
5010
5011 switch(op) {
5012 case 0x08: /* fld sti */
5013 tcg_gen_helper_0_0(helper_fpush);
5014 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5015 break;
5016 case 0x09: /* fxchg sti */
5017 case 0x29: /* fxchg4 sti, undocumented op */
5018 case 0x39: /* fxchg7 sti, undocumented op */
5019 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5020 break;
5021 case 0x0a: /* grp d9/2 */
5022 switch(rm) {
5023 case 0: /* fnop */
5024 /* check exceptions (FreeBSD FPU probe) */
5025 if (s->cc_op != CC_OP_DYNAMIC)
5026 gen_op_set_cc_op(s->cc_op);
5027 gen_jmp_im(pc_start - s->cs_base);
5028 tcg_gen_helper_0_0(helper_fwait);
5029 break;
5030 default:
5031 goto illegal_op;
5032 }
5033 break;
5034 case 0x0c: /* grp d9/4 */
5035 switch(rm) {
5036 case 0: /* fchs */
5037 tcg_gen_helper_0_0(helper_fchs_ST0);
5038 break;
5039 case 1: /* fabs */
5040 tcg_gen_helper_0_0(helper_fabs_ST0);
5041 break;
5042 case 4: /* ftst */
5043 tcg_gen_helper_0_0(helper_fldz_FT0);
5044 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5045 break;
5046 case 5: /* fxam */
5047 tcg_gen_helper_0_0(helper_fxam_ST0);
5048 break;
5049 default:
5050 goto illegal_op;
5051 }
5052 break;
5053 case 0x0d: /* grp d9/5 */
5054 {
5055 switch(rm) {
5056 case 0:
5057 tcg_gen_helper_0_0(helper_fpush);
5058 tcg_gen_helper_0_0(helper_fld1_ST0);
5059 break;
5060 case 1:
5061 tcg_gen_helper_0_0(helper_fpush);
5062 tcg_gen_helper_0_0(helper_fldl2t_ST0);
5063 break;
5064 case 2:
5065 tcg_gen_helper_0_0(helper_fpush);
5066 tcg_gen_helper_0_0(helper_fldl2e_ST0);
5067 break;
5068 case 3:
5069 tcg_gen_helper_0_0(helper_fpush);
5070 tcg_gen_helper_0_0(helper_fldpi_ST0);
5071 break;
5072 case 4:
5073 tcg_gen_helper_0_0(helper_fpush);
5074 tcg_gen_helper_0_0(helper_fldlg2_ST0);
5075 break;
5076 case 5:
5077 tcg_gen_helper_0_0(helper_fpush);
5078 tcg_gen_helper_0_0(helper_fldln2_ST0);
5079 break;
5080 case 6:
5081 tcg_gen_helper_0_0(helper_fpush);
5082 tcg_gen_helper_0_0(helper_fldz_ST0);
5083 break;
5084 default:
5085 goto illegal_op;
5086 }
5087 }
5088 break;
5089 case 0x0e: /* grp d9/6 */
5090 switch(rm) {
5091 case 0: /* f2xm1 */
5092 tcg_gen_helper_0_0(helper_f2xm1);
5093 break;
5094 case 1: /* fyl2x */
5095 tcg_gen_helper_0_0(helper_fyl2x);
5096 break;
5097 case 2: /* fptan */
5098 tcg_gen_helper_0_0(helper_fptan);
5099 break;
5100 case 3: /* fpatan */
5101 tcg_gen_helper_0_0(helper_fpatan);
5102 break;
5103 case 4: /* fxtract */
5104 tcg_gen_helper_0_0(helper_fxtract);
5105 break;
5106 case 5: /* fprem1 */
5107 tcg_gen_helper_0_0(helper_fprem1);
5108 break;
5109 case 6: /* fdecstp */
5110 tcg_gen_helper_0_0(helper_fdecstp);
5111 break;
5112 default:
5113 case 7: /* fincstp */
5114 tcg_gen_helper_0_0(helper_fincstp);
5115 break;
5116 }
5117 break;
5118 case 0x0f: /* grp d9/7 */
5119 switch(rm) {
5120 case 0: /* fprem */
5121 tcg_gen_helper_0_0(helper_fprem);
5122 break;
5123 case 1: /* fyl2xp1 */
5124 tcg_gen_helper_0_0(helper_fyl2xp1);
5125 break;
5126 case 2: /* fsqrt */
5127 tcg_gen_helper_0_0(helper_fsqrt);
5128 break;
5129 case 3: /* fsincos */
5130 tcg_gen_helper_0_0(helper_fsincos);
5131 break;
5132 case 5: /* fscale */
5133 tcg_gen_helper_0_0(helper_fscale);
5134 break;
5135 case 4: /* frndint */
5136 tcg_gen_helper_0_0(helper_frndint);
5137 break;
5138 case 6: /* fsin */
5139 tcg_gen_helper_0_0(helper_fsin);
5140 break;
5141 default:
5142 case 7: /* fcos */
5143 tcg_gen_helper_0_0(helper_fcos);
5144 break;
5145 }
5146 break;
5147 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5148 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5149 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5150 {
5151 int op1;
5152
5153 op1 = op & 7;
5154 if (op >= 0x20) {
5155 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5156 if (op >= 0x30)
5157 tcg_gen_helper_0_0(helper_fpop);
5158 } else {
5159 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5160 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5161 }
5162 }
5163 break;
5164 case 0x02: /* fcom */
5165 case 0x22: /* fcom2, undocumented op */
5166 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5167 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5168 break;
5169 case 0x03: /* fcomp */
5170 case 0x23: /* fcomp3, undocumented op */
5171 case 0x32: /* fcomp5, undocumented op */
5172 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5173 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5174 tcg_gen_helper_0_0(helper_fpop);
5175 break;
5176 case 0x15: /* da/5 */
5177 switch(rm) {
5178 case 1: /* fucompp */
5179 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5180 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5181 tcg_gen_helper_0_0(helper_fpop);
5182 tcg_gen_helper_0_0(helper_fpop);
5183 break;
5184 default:
5185 goto illegal_op;
5186 }
5187 break;
5188 case 0x1c:
5189 switch(rm) {
5190 case 0: /* feni (287 only, just do nop here) */
5191 break;
5192 case 1: /* fdisi (287 only, just do nop here) */
5193 break;
5194 case 2: /* fclex */
5195 tcg_gen_helper_0_0(helper_fclex);
5196 break;
5197 case 3: /* fninit */
5198 tcg_gen_helper_0_0(helper_fninit);
5199 break;
5200 case 4: /* fsetpm (287 only, just do nop here) */
5201 break;
5202 default:
5203 goto illegal_op;
5204 }
5205 break;
5206 case 0x1d: /* fucomi */
5207 if (s->cc_op != CC_OP_DYNAMIC)
5208 gen_op_set_cc_op(s->cc_op);
5209 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5210 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5211 gen_op_fcomi_dummy();
5212 s->cc_op = CC_OP_EFLAGS;
5213 break;
5214 case 0x1e: /* fcomi */
5215 if (s->cc_op != CC_OP_DYNAMIC)
5216 gen_op_set_cc_op(s->cc_op);
5217 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5218 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5219 gen_op_fcomi_dummy();
5220 s->cc_op = CC_OP_EFLAGS;
5221 break;
5222 case 0x28: /* ffree sti */
5223 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5224 break;
5225 case 0x2a: /* fst sti */
5226 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5227 break;
5228 case 0x2b: /* fstp sti */
5229 case 0x0b: /* fstp1 sti, undocumented op */
5230 case 0x3a: /* fstp8 sti, undocumented op */
5231 case 0x3b: /* fstp9 sti, undocumented op */
5232 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5233 tcg_gen_helper_0_0(helper_fpop);
5234 break;
5235 case 0x2c: /* fucom st(i) */
5236 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5237 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5238 break;
5239 case 0x2d: /* fucomp st(i) */
5240 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5241 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5242 tcg_gen_helper_0_0(helper_fpop);
5243 break;
5244 case 0x33: /* de/3 */
5245 switch(rm) {
5246 case 1: /* fcompp */
5247 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5248 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5249 tcg_gen_helper_0_0(helper_fpop);
5250 tcg_gen_helper_0_0(helper_fpop);
5251 break;
5252 default:
5253 goto illegal_op;
5254 }
5255 break;
5256 case 0x38: /* ffreep sti, undocumented op */
5257 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5258 tcg_gen_helper_0_0(helper_fpop);
5259 break;
5260 case 0x3c: /* df/4 */
5261 switch(rm) {
5262 case 0:
5263 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5264 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5265 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5266 break;
5267 default:
5268 goto illegal_op;
5269 }
5270 break;
5271 case 0x3d: /* fucomip */
5272 if (s->cc_op != CC_OP_DYNAMIC)
5273 gen_op_set_cc_op(s->cc_op);
5274 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5275 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5276 tcg_gen_helper_0_0(helper_fpop);
5277 gen_op_fcomi_dummy();
5278 s->cc_op = CC_OP_EFLAGS;
5279 break;
5280 case 0x3e: /* fcomip */
5281 if (s->cc_op != CC_OP_DYNAMIC)
5282 gen_op_set_cc_op(s->cc_op);
5283 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5284 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5285 tcg_gen_helper_0_0(helper_fpop);
5286 gen_op_fcomi_dummy();
5287 s->cc_op = CC_OP_EFLAGS;
5288 break;
5289 case 0x10 ... 0x13: /* fcmovxx */
5290 case 0x18 ... 0x1b:
5291 {
5292 int op1, l1;
5293 const static uint8_t fcmov_cc[8] = {
5294 (JCC_B << 1),
5295 (JCC_Z << 1),
5296 (JCC_BE << 1),
5297 (JCC_P << 1),
5298 };
5299 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5300 gen_setcc(s, op1);
5301 l1 = gen_new_label();
5302 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5303 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5304 gen_set_label(l1);
5305 }
5306 break;
5307 default:
5308 goto illegal_op;
5309 }
5310 }
5311 break;
5312 /************************/
5313 /* string ops */
5314
5315 case 0xa4: /* movsS */
5316 case 0xa5:
5317 if ((b & 1) == 0)
5318 ot = OT_BYTE;
5319 else
5320 ot = dflag + OT_WORD;
5321
5322 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5323 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5324 } else {
5325 gen_movs(s, ot);
5326 }
5327 break;
5328
5329 case 0xaa: /* stosS */
5330 case 0xab:
5331 if ((b & 1) == 0)
5332 ot = OT_BYTE;
5333 else
5334 ot = dflag + OT_WORD;
5335
5336 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5337 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5338 } else {
5339 gen_stos(s, ot);
5340 }
5341 break;
5342 case 0xac: /* lodsS */
5343 case 0xad:
5344 if ((b & 1) == 0)
5345 ot = OT_BYTE;
5346 else
5347 ot = dflag + OT_WORD;
5348 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5349 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5350 } else {
5351 gen_lods(s, ot);
5352 }
5353 break;
5354 case 0xae: /* scasS */
5355 case 0xaf:
5356 if ((b & 1) == 0)
5357 ot = OT_BYTE;
5358 else
5359 ot = dflag + OT_WORD;
5360 if (prefixes & PREFIX_REPNZ) {
5361 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5362 } else if (prefixes & PREFIX_REPZ) {
5363 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5364 } else {
5365 gen_scas(s, ot);
5366 s->cc_op = CC_OP_SUBB + ot;
5367 }
5368 break;
5369
5370 case 0xa6: /* cmpsS */
5371 case 0xa7:
5372 if ((b & 1) == 0)
5373 ot = OT_BYTE;
5374 else
5375 ot = dflag + OT_WORD;
5376 if (prefixes & PREFIX_REPNZ) {
5377 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5378 } else if (prefixes & PREFIX_REPZ) {
5379 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5380 } else {
5381 gen_cmps(s, ot);
5382 s->cc_op = CC_OP_SUBB + ot;
5383 }
5384 break;
5385 case 0x6c: /* insS */
5386 case 0x6d:
5387 if ((b & 1) == 0)
5388 ot = OT_BYTE;
5389 else
5390 ot = dflag ? OT_LONG : OT_WORD;
5391 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5392 gen_op_andl_T0_ffff();
5393 gen_check_io(s, ot, pc_start - s->cs_base,
5394 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5395 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5396 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5397 } else {
5398 gen_ins(s, ot);
5399 }
5400 break;
5401 case 0x6e: /* outsS */
5402 case 0x6f:
5403 if ((b & 1) == 0)
5404 ot = OT_BYTE;
5405 else
5406 ot = dflag ? OT_LONG : OT_WORD;
5407 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5408 gen_op_andl_T0_ffff();
5409 gen_check_io(s, ot, pc_start - s->cs_base,
5410 svm_is_rep(prefixes) | 4);
5411 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5412 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5413 } else {
5414 gen_outs(s, ot);
5415 }
5416 break;
5417
5418 /************************/
5419 /* port I/O */
5420
5421 case 0xe4:
5422 case 0xe5:
5423 if ((b & 1) == 0)
5424 ot = OT_BYTE;
5425 else
5426 ot = dflag ? OT_LONG : OT_WORD;
5427 val = ldub_code(s->pc++);
5428 gen_op_movl_T0_im(val);
5429 gen_check_io(s, ot, pc_start - s->cs_base,
5430 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5431 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5432 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5433 gen_op_mov_reg_T1(ot, R_EAX);
5434 break;
5435 case 0xe6:
5436 case 0xe7:
5437 if ((b & 1) == 0)
5438 ot = OT_BYTE;
5439 else
5440 ot = dflag ? OT_LONG : OT_WORD;
5441 val = ldub_code(s->pc++);
5442 gen_op_movl_T0_im(val);
5443 gen_check_io(s, ot, pc_start - s->cs_base,
5444 svm_is_rep(prefixes));
5445 gen_op_mov_TN_reg(ot, 1, R_EAX);
5446
5447 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5448 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5449 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5450 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5451 break;
5452 case 0xec:
5453 case 0xed:
5454 if ((b & 1) == 0)
5455 ot = OT_BYTE;
5456 else
5457 ot = dflag ? OT_LONG : OT_WORD;
5458 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5459 gen_op_andl_T0_ffff();
5460 gen_check_io(s, ot, pc_start - s->cs_base,
5461 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5462 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5463 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5464 gen_op_mov_reg_T1(ot, R_EAX);
5465 break;
5466 case 0xee:
5467 case 0xef:
5468 if ((b & 1) == 0)
5469 ot = OT_BYTE;
5470 else
5471 ot = dflag ? OT_LONG : OT_WORD;
5472 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5473 gen_op_andl_T0_ffff();
5474 gen_check_io(s, ot, pc_start - s->cs_base,
5475 svm_is_rep(prefixes));
5476 gen_op_mov_TN_reg(ot, 1, R_EAX);
5477
5478 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5479 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5480 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5481 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5482 break;
5483
5484 /************************/
5485 /* control */
5486 case 0xc2: /* ret im */
5487 val = ldsw_code(s->pc);
5488 s->pc += 2;
5489 gen_pop_T0(s);
5490 if (CODE64(s) && s->dflag)
5491 s->dflag = 2;
5492 gen_stack_update(s, val + (2 << s->dflag));
5493 if (s->dflag == 0)
5494 gen_op_andl_T0_ffff();
5495 gen_op_jmp_T0();
5496 gen_eob(s);
5497 break;
5498 case 0xc3: /* ret */
5499 gen_pop_T0(s);
5500 gen_pop_update(s);
5501 if (s->dflag == 0)
5502 gen_op_andl_T0_ffff();
5503 gen_op_jmp_T0();
5504 gen_eob(s);
5505 break;
5506 case 0xca: /* lret im */
5507 val = ldsw_code(s->pc);
5508 s->pc += 2;
5509 do_lret:
5510 if (s->pe && !s->vm86) {
5511 if (s->cc_op != CC_OP_DYNAMIC)
5512 gen_op_set_cc_op(s->cc_op);
5513 gen_jmp_im(pc_start - s->cs_base);
5514 tcg_gen_helper_0_2(helper_lret_protected,
5515 tcg_const_i32(s->dflag),
5516 tcg_const_i32(val));
5517 } else {
5518 gen_stack_A0(s);
5519 /* pop offset */
5520 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5521 if (s->dflag == 0)
5522 gen_op_andl_T0_ffff();
5523 /* NOTE: keeping EIP updated is not a problem in case of
5524 exception */
5525 gen_op_jmp_T0();
5526 /* pop selector */
5527 gen_op_addl_A0_im(2 << s->dflag);
5528 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5529 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5530 /* add stack offset */
5531 gen_stack_update(s, val + (4 << s->dflag));
5532 }
5533 gen_eob(s);
5534 break;
5535 case 0xcb: /* lret */
5536 val = 0;
5537 goto do_lret;
5538 case 0xcf: /* iret */
5539 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5540 break;
5541 if (!s->pe) {
5542 /* real mode */
5543 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5544 s->cc_op = CC_OP_EFLAGS;
5545 } else if (s->vm86) {
5546 if (s->iopl != 3) {
5547 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5548 } else {
5549 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5550 s->cc_op = CC_OP_EFLAGS;
5551 }
5552 } else {
5553 if (s->cc_op != CC_OP_DYNAMIC)
5554 gen_op_set_cc_op(s->cc_op);
5555 gen_jmp_im(pc_start - s->cs_base);
5556 tcg_gen_helper_0_2(helper_iret_protected,
5557 tcg_const_i32(s->dflag),
5558 tcg_const_i32(s->pc - s->cs_base));
5559 s->cc_op = CC_OP_EFLAGS;
5560 }
5561 gen_eob(s);
5562 break;
5563 case 0xe8: /* call im */
5564 {
5565 if (dflag)
5566 tval = (int32_t)insn_get(s, OT_LONG);
5567 else
5568 tval = (int16_t)insn_get(s, OT_WORD);
5569 next_eip = s->pc - s->cs_base;
5570 tval += next_eip;
5571 if (s->dflag == 0)
5572 tval &= 0xffff;
5573 gen_movtl_T0_im(next_eip);
5574 gen_push_T0(s);
5575 gen_jmp(s, tval);
5576 }
5577 break;
5578 case 0x9a: /* lcall im */
5579 {
5580 unsigned int selector, offset;
5581
5582 if (CODE64(s))
5583 goto illegal_op;
5584 ot = dflag ? OT_LONG : OT_WORD;
5585 offset = insn_get(s, ot);
5586 selector = insn_get(s, OT_WORD);
5587
5588 gen_op_movl_T0_im(selector);
5589 gen_op_movl_T1_imu(offset);
5590 }
5591 goto do_lcall;
5592 case 0xe9: /* jmp im */
5593 if (dflag)
5594 tval = (int32_t)insn_get(s, OT_LONG);
5595 else
5596 tval = (int16_t)insn_get(s, OT_WORD);
5597 tval += s->pc - s->cs_base;
5598 if (s->dflag == 0)
5599 tval &= 0xffff;
5600 gen_jmp(s, tval);
5601 break;
5602 case 0xea: /* ljmp im */
5603 {
5604 unsigned int selector, offset;
5605
5606 if (CODE64(s))
5607 goto illegal_op;
5608 ot = dflag ? OT_LONG : OT_WORD;
5609 offset = insn_get(s, ot);
5610 selector = insn_get(s, OT_WORD);
5611
5612 gen_op_movl_T0_im(selector);
5613 gen_op_movl_T1_imu(offset);
5614 }
5615 goto do_ljmp;
5616 case 0xeb: /* jmp Jb */
5617 tval = (int8_t)insn_get(s, OT_BYTE);
5618 tval += s->pc - s->cs_base;
5619 if (s->dflag == 0)
5620 tval &= 0xffff;
5621 gen_jmp(s, tval);
5622 break;
5623 case 0x70 ... 0x7f: /* jcc Jb */
5624 tval = (int8_t)insn_get(s, OT_BYTE);
5625 goto do_jcc;
5626 case 0x180 ... 0x18f: /* jcc Jv */
5627 if (dflag) {
5628 tval = (int32_t)insn_get(s, OT_LONG);
5629 } else {
5630 tval = (int16_t)insn_get(s, OT_WORD);
5631 }
5632 do_jcc:
5633 next_eip = s->pc - s->cs_base;
5634 tval += next_eip;
5635 if (s->dflag == 0)
5636 tval &= 0xffff;
5637 gen_jcc(s, b, tval, next_eip);
5638 break;
5639
5640 case 0x190 ... 0x19f: /* setcc Gv */
5641 modrm = ldub_code(s->pc++);
5642 gen_setcc(s, b);
5643 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5644 break;
5645 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5646 ot = dflag + OT_WORD;
5647 modrm = ldub_code(s->pc++);
5648 reg = ((modrm >> 3) & 7) | rex_r;
5649 mod = (modrm >> 6) & 3;
5650 gen_setcc(s, b);
5651 if (mod != 3) {
5652 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5653 gen_op_ld_T1_A0(ot + s->mem_index);
5654 } else {
5655 rm = (modrm & 7) | REX_B(s);
5656 gen_op_mov_TN_reg(ot, 1, rm);
5657 }
5658 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5659 break;
5660
5661 /************************/
5662 /* flags */
5663 case 0x9c: /* pushf */
5664 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5665 break;
5666 if (s->vm86 && s->iopl != 3) {
5667 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5668 } else {
5669 if (s->cc_op != CC_OP_DYNAMIC)
5670 gen_op_set_cc_op(s->cc_op);
5671 gen_op_movl_T0_eflags();
5672 gen_push_T0(s);
5673 }
5674 break;
5675 case 0x9d: /* popf */
5676 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5677 break;
5678 if (s->vm86 && s->iopl != 3) {
5679 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5680 } else {
5681 gen_pop_T0(s);
5682 if (s->cpl == 0) {
5683 if (s->dflag) {
5684 gen_op_movl_eflags_T0_cpl0();
5685 } else {
5686 gen_op_movw_eflags_T0_cpl0();
5687 }
5688 } else {
5689 if (s->cpl <= s->iopl) {
5690 if (s->dflag) {
5691 gen_op_movl_eflags_T0_io();
5692 } else {
5693 gen_op_movw_eflags_T0_io();
5694 }
5695 } else {
5696 if (s->dflag) {
5697 gen_op_movl_eflags_T0();
5698 } else {
5699 gen_op_movw_eflags_T0();
5700 }
5701 }
5702 }
5703 gen_pop_update(s);
5704 s->cc_op = CC_OP_EFLAGS;
5705 /* abort translation because TF flag may change */
5706 gen_jmp_im(s->pc - s->cs_base);
5707 gen_eob(s);
5708 }
5709 break;
5710 case 0x9e: /* sahf */
5711 if (CODE64(s))
5712 goto illegal_op;
5713 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5714 if (s->cc_op != CC_OP_DYNAMIC)
5715 gen_op_set_cc_op(s->cc_op);
5716 gen_op_movb_eflags_T0();
5717 s->cc_op = CC_OP_EFLAGS;
5718 break;
5719 case 0x9f: /* lahf */
5720 if (CODE64(s))
5721 goto illegal_op;
5722 if (s->cc_op != CC_OP_DYNAMIC)
5723 gen_op_set_cc_op(s->cc_op);
5724 gen_op_movl_T0_eflags();
5725 gen_op_mov_reg_T0(OT_BYTE, R_AH);
5726 break;
5727 case 0xf5: /* cmc */
5728 if (s->cc_op != CC_OP_DYNAMIC)
5729 gen_op_set_cc_op(s->cc_op);
5730 gen_op_cmc();
5731 s->cc_op = CC_OP_EFLAGS;
5732 break;
5733 case 0xf8: /* clc */
5734 if (s->cc_op != CC_OP_DYNAMIC)
5735 gen_op_set_cc_op(s->cc_op);
5736 gen_op_clc();
5737 s->cc_op = CC_OP_EFLAGS;
5738 break;
5739 case 0xf9: /* stc */
5740 if (s->cc_op != CC_OP_DYNAMIC)
5741 gen_op_set_cc_op(s->cc_op);
5742 gen_op_stc();
5743 s->cc_op = CC_OP_EFLAGS;
5744 break;
5745 case 0xfc: /* cld */
5746 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5747 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5748 break;
5749 case 0xfd: /* std */
5750 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5751 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5752 break;
5753
5754 /************************/
5755 /* bit operations */
5756 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5757 ot = dflag + OT_WORD;
5758 modrm = ldub_code(s->pc++);
5759 op = (modrm >> 3) & 7;
5760 mod = (modrm >> 6) & 3;
5761 rm = (modrm & 7) | REX_B(s);
5762 if (mod != 3) {
5763 s->rip_offset = 1;
5764 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5765 gen_op_ld_T0_A0(ot + s->mem_index);
5766 } else {
5767 gen_op_mov_TN_reg(ot, 0, rm);
5768 }
5769 /* load shift */
5770 val = ldub_code(s->pc++);
5771 gen_op_movl_T1_im(val);
5772 if (op < 4)
5773 goto illegal_op;
5774 op -= 4;
5775 goto bt_op;
5776 case 0x1a3: /* bt Gv, Ev */
5777 op = 0;
5778 goto do_btx;
5779 case 0x1ab: /* bts */
5780 op = 1;
5781 goto do_btx;
5782 case 0x1b3: /* btr */
5783 op = 2;
5784 goto do_btx;
5785 case 0x1bb: /* btc */
5786 op = 3;
5787 do_btx:
5788 ot = dflag + OT_WORD;
5789 modrm = ldub_code(s->pc++);
5790 reg = ((modrm >> 3) & 7) | rex_r;
5791 mod = (modrm >> 6) & 3;
5792 rm = (modrm & 7) | REX_B(s);
5793 gen_op_mov_TN_reg(OT_LONG, 1, reg);
5794 if (mod != 3) {
5795 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5796 /* specific case: we need to add a displacement */
5797 gen_exts(ot, cpu_T[1]);
5798 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
5799 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
5800 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
5801 gen_op_ld_T0_A0(ot + s->mem_index);
5802 } else {
5803 gen_op_mov_TN_reg(ot, 0, rm);
5804 }
5805 bt_op:
5806 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
5807 switch(op) {
5808 case 0:
5809 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
5810 tcg_gen_movi_tl(cpu_cc_dst, 0);
5811 break;
5812 case 1:
5813 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5814 tcg_gen_movi_tl(cpu_tmp0, 1);
5815 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5816 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5817 break;
5818 case 2:
5819 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5820 tcg_gen_movi_tl(cpu_tmp0, 1);
5821 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5822 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
5823 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5824 break;
5825 default:
5826 case 3:
5827 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5828 tcg_gen_movi_tl(cpu_tmp0, 1);
5829 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5830 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5831 break;
5832 }
5833 s->cc_op = CC_OP_SARB + ot;
5834 if (op != 0) {
5835 if (mod != 3)
5836 gen_op_st_T0_A0(ot + s->mem_index);
5837 else
5838 gen_op_mov_reg_T0(ot, rm);
5839 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
5840 tcg_gen_movi_tl(cpu_cc_dst, 0);
5841 }
5842 break;
5843 case 0x1bc: /* bsf */
5844 case 0x1bd: /* bsr */
5845 {
5846 int label1;
5847 ot = dflag + OT_WORD;
5848 modrm = ldub_code(s->pc++);
5849 reg = ((modrm >> 3) & 7) | rex_r;
5850 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5851 gen_extu(ot, cpu_T[0]);
5852 label1 = gen_new_label();
5853 tcg_gen_movi_tl(cpu_cc_dst, 0);
5854 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), label1);
5855 if (b & 1) {
5856 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], cpu_T[0]);
5857 } else {
5858 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], cpu_T[0]);
5859 }
5860 gen_op_mov_reg_T0(ot, reg);
5861 tcg_gen_movi_tl(cpu_cc_dst, 1);
5862 gen_set_label(label1);
5863 tcg_gen_discard_tl(cpu_cc_src);
5864 s->cc_op = CC_OP_LOGICB + ot;
5865 }
5866 break;
5867 /************************/
5868 /* bcd */
5869 case 0x27: /* daa */
5870 if (CODE64(s))
5871 goto illegal_op;
5872 if (s->cc_op != CC_OP_DYNAMIC)
5873 gen_op_set_cc_op(s->cc_op);
5874 gen_op_daa();
5875 s->cc_op = CC_OP_EFLAGS;
5876 break;
5877 case 0x2f: /* das */
5878 if (CODE64(s))
5879 goto illegal_op;
5880 if (s->cc_op != CC_OP_DYNAMIC)
5881 gen_op_set_cc_op(s->cc_op);
5882 gen_op_das();
5883 s->cc_op = CC_OP_EFLAGS;
5884 break;
5885 case 0x37: /* aaa */
5886 if (CODE64(s))
5887 goto illegal_op;
5888 if (s->cc_op != CC_OP_DYNAMIC)
5889 gen_op_set_cc_op(s->cc_op);
5890 gen_op_aaa();
5891 s->cc_op = CC_OP_EFLAGS;
5892 break;
5893 case 0x3f: /* aas */
5894 if (CODE64(s))
5895 goto illegal_op;
5896 if (s->cc_op != CC_OP_DYNAMIC)
5897 gen_op_set_cc_op(s->cc_op);
5898 gen_op_aas();
5899 s->cc_op = CC_OP_EFLAGS;
5900 break;
5901 case 0xd4: /* aam */
5902 if (CODE64(s))
5903 goto illegal_op;
5904 val = ldub_code(s->pc++);
5905 if (val == 0) {
5906 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5907 } else {
5908 gen_op_aam(val);
5909 s->cc_op = CC_OP_LOGICB;
5910 }
5911 break;
5912 case 0xd5: /* aad */
5913 if (CODE64(s))
5914 goto illegal_op;
5915 val = ldub_code(s->pc++);
5916 gen_op_aad(val);
5917 s->cc_op = CC_OP_LOGICB;
5918 break;
5919 /************************/
5920 /* misc */
5921 case 0x90: /* nop */
5922 /* XXX: xchg + rex handling */
5923 /* XXX: correct lock test for all insn */
5924 if (prefixes & PREFIX_LOCK)
5925 goto illegal_op;
5926 if (prefixes & PREFIX_REPZ) {
5927 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5928 }
5929 break;
5930 case 0x9b: /* fwait */
5931 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5932 (HF_MP_MASK | HF_TS_MASK)) {
5933 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5934 } else {
5935 if (s->cc_op != CC_OP_DYNAMIC)
5936 gen_op_set_cc_op(s->cc_op);
5937 gen_jmp_im(pc_start - s->cs_base);
5938 tcg_gen_helper_0_0(helper_fwait);
5939 }
5940 break;
5941 case 0xcc: /* int3 */
5942 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5943 break;
5944 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5945 break;
5946 case 0xcd: /* int N */
5947 val = ldub_code(s->pc++);
5948 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5949 break;
5950 if (s->vm86 && s->iopl != 3) {
5951 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5952 } else {
5953 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5954 }
5955 break;
5956 case 0xce: /* into */
5957 if (CODE64(s))
5958 goto illegal_op;
5959 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5960 break;
5961 if (s->cc_op != CC_OP_DYNAMIC)
5962 gen_op_set_cc_op(s->cc_op);
5963 gen_jmp_im(pc_start - s->cs_base);
5964 gen_op_into(s->pc - pc_start);
5965 break;
5966 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5967 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5968 break;
5969 #if 1
5970 gen_debug(s, pc_start - s->cs_base);
5971 #else
5972 /* start debug */
5973 tb_flush(cpu_single_env);
5974 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5975 #endif
5976 break;
5977 case 0xfa: /* cli */
5978 if (!s->vm86) {
5979 if (s->cpl <= s->iopl) {
5980 tcg_gen_helper_0_0(helper_cli);
5981 } else {
5982 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5983 }
5984 } else {
5985 if (s->iopl == 3) {
5986 tcg_gen_helper_0_0(helper_cli);
5987 } else {
5988 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5989 }
5990 }
5991 break;
5992 case 0xfb: /* sti */
5993 if (!s->vm86) {
5994 if (s->cpl <= s->iopl) {
5995 gen_sti:
5996 tcg_gen_helper_0_0(helper_sti);
5997 /* interruptions are enabled only the first insn after sti */
5998 /* If several instructions disable interrupts, only the
5999 _first_ does it */
6000 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6001 tcg_gen_helper_0_0(helper_set_inhibit_irq);
6002 /* give a chance to handle pending irqs */
6003 gen_jmp_im(s->pc - s->cs_base);
6004 gen_eob(s);
6005 } else {
6006 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6007 }
6008 } else {
6009 if (s->iopl == 3) {
6010 goto gen_sti;
6011 } else {
6012 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6013 }
6014 }
6015 break;
6016 case 0x62: /* bound */
6017 if (CODE64(s))
6018 goto illegal_op;
6019 ot = dflag ? OT_LONG : OT_WORD;
6020 modrm = ldub_code(s->pc++);
6021 reg = (modrm >> 3) & 7;
6022 mod = (modrm >> 6) & 3;
6023 if (mod == 3)
6024 goto illegal_op;
6025 gen_op_mov_TN_reg(ot, 0, reg);
6026 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6027 gen_jmp_im(pc_start - s->cs_base);
6028 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6029 if (ot == OT_WORD)
6030 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6031 else
6032 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6033 break;
6034 case 0x1c8 ... 0x1cf: /* bswap reg */
6035 reg = (b & 7) | REX_B(s);
6036 #ifdef TARGET_X86_64
6037 if (dflag == 2) {
6038 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6039 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6040 gen_op_mov_reg_T0(OT_QUAD, reg);
6041 } else
6042 {
6043 TCGv tmp0;
6044 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6045
6046 tmp0 = tcg_temp_new(TCG_TYPE_I32);
6047 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6048 tcg_gen_bswap_i32(tmp0, tmp0);
6049 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6050 gen_op_mov_reg_T0(OT_LONG, reg);
6051 }
6052 #else
6053 {
6054 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6055 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6056 gen_op_mov_reg_T0(OT_LONG, reg);
6057 }
6058 #endif
6059 break;
6060 case 0xd6: /* salc */
6061 if (CODE64(s))
6062 goto illegal_op;
6063 if (s->cc_op != CC_OP_DYNAMIC)
6064 gen_op_set_cc_op(s->cc_op);
6065 gen_op_salc();
6066 break;
6067 case 0xe0: /* loopnz */
6068 case 0xe1: /* loopz */
6069 if (s->cc_op != CC_OP_DYNAMIC)
6070 gen_op_set_cc_op(s->cc_op);
6071 /* FALL THRU */
6072 case 0xe2: /* loop */
6073 case 0xe3: /* jecxz */
6074 {
6075 int l1, l2;
6076
6077 tval = (int8_t)insn_get(s, OT_BYTE);
6078 next_eip = s->pc - s->cs_base;
6079 tval += next_eip;
6080 if (s->dflag == 0)
6081 tval &= 0xffff;
6082
6083 l1 = gen_new_label();
6084 l2 = gen_new_label();
6085 b &= 3;
6086 if (b == 3) {
6087 gen_op_jz_ecx[s->aflag](l1);
6088 } else {
6089 gen_op_dec_ECX[s->aflag]();
6090 if (b <= 1)
6091 gen_op_mov_T0_cc();
6092 gen_op_loop[s->aflag][b](l1);
6093 }
6094
6095 gen_jmp_im(next_eip);
6096 gen_op_jmp_label(l2);
6097 gen_set_label(l1);
6098 gen_jmp_im(tval);
6099 gen_set_label(l2);
6100 gen_eob(s);
6101 }
6102 break;
6103 case 0x130: /* wrmsr */
6104 case 0x132: /* rdmsr */
6105 if (s->cpl != 0) {
6106 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6107 } else {
6108 int retval = 0;
6109 if (b & 2) {
6110 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6111 tcg_gen_helper_0_0(helper_rdmsr);
6112 } else {
6113 retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6114 tcg_gen_helper_0_0(helper_wrmsr);
6115 }
6116 if(retval)
6117 gen_eob(s);
6118 }
6119 break;
6120 case 0x131: /* rdtsc */
6121 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6122 break;
6123 gen_jmp_im(pc_start - s->cs_base);
6124 tcg_gen_helper_0_0(helper_rdtsc);
6125 break;
6126 case 0x133: /* rdpmc */
6127 gen_jmp_im(pc_start - s->cs_base);
6128 tcg_gen_helper_0_0(helper_rdpmc);
6129 break;
6130 case 0x134: /* sysenter */
6131 if (CODE64(s))
6132 goto illegal_op;
6133 if (!s->pe) {
6134 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6135 } else {
6136 if (s->cc_op != CC_OP_DYNAMIC) {
6137 gen_op_set_cc_op(s->cc_op);
6138 s->cc_op = CC_OP_DYNAMIC;
6139 }
6140 gen_jmp_im(pc_start - s->cs_base);
6141 tcg_gen_helper_0_0(helper_sysenter);
6142 gen_eob(s);
6143 }
6144 break;
6145 case 0x135: /* sysexit */
6146 if (CODE64(s))
6147 goto illegal_op;
6148 if (!s->pe) {
6149 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6150 } else {
6151 if (s->cc_op != CC_OP_DYNAMIC) {
6152 gen_op_set_cc_op(s->cc_op);
6153 s->cc_op = CC_OP_DYNAMIC;
6154 }
6155 gen_jmp_im(pc_start - s->cs_base);
6156 tcg_gen_helper_0_0(helper_sysexit);
6157 gen_eob(s);
6158 }
6159 break;
6160 #ifdef TARGET_X86_64
6161 case 0x105: /* syscall */
6162 /* XXX: is it usable in real mode ? */
6163 if (s->cc_op != CC_OP_DYNAMIC) {
6164 gen_op_set_cc_op(s->cc_op);
6165 s->cc_op = CC_OP_DYNAMIC;
6166 }
6167 gen_jmp_im(pc_start - s->cs_base);
6168 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6169 gen_eob(s);
6170 break;
6171 case 0x107: /* sysret */
6172 if (!s->pe) {
6173 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6174 } else {
6175 if (s->cc_op != CC_OP_DYNAMIC) {
6176 gen_op_set_cc_op(s->cc_op);
6177 s->cc_op = CC_OP_DYNAMIC;
6178 }
6179 gen_jmp_im(pc_start - s->cs_base);
6180 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6181 /* condition codes are modified only in long mode */
6182 if (s->lma)
6183 s->cc_op = CC_OP_EFLAGS;
6184 gen_eob(s);
6185 }
6186 break;
6187 #endif
6188 case 0x1a2: /* cpuid */
6189 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6190 break;
6191 tcg_gen_helper_0_0(helper_cpuid);
6192 break;
6193 case 0xf4: /* hlt */
6194 if (s->cpl != 0) {
6195 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6196 } else {
6197 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6198 break;
6199 if (s->cc_op != CC_OP_DYNAMIC)
6200 gen_op_set_cc_op(s->cc_op);
6201 gen_jmp_im(s->pc - s->cs_base);
6202 tcg_gen_helper_0_0(helper_hlt);
6203 s->is_jmp = 3;
6204 }
6205 break;
6206 case 0x100:
6207 modrm = ldub_code(s->pc++);
6208 mod = (modrm >> 6) & 3;
6209 op = (modrm >> 3) & 7;
6210 switch(op) {
6211 case 0: /* sldt */
6212 if (!s->pe || s->vm86)
6213 goto illegal_op;
6214 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6215 break;
6216 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
6217 ot = OT_WORD;
6218 if (mod == 3)
6219 ot += s->dflag;
6220 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6221 break;
6222 case 2: /* lldt */
6223 if (!s->pe || s->vm86)
6224 goto illegal_op;
6225 if (s->cpl != 0) {
6226 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6227 } else {
6228 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6229 break;
6230 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6231 gen_jmp_im(pc_start - s->cs_base);
6232 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6233 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6234 }
6235 break;
6236 case 1: /* str */
6237 if (!s->pe || s->vm86)
6238 goto illegal_op;
6239 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6240 break;
6241 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
6242 ot = OT_WORD;
6243 if (mod == 3)
6244 ot += s->dflag;
6245 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6246 break;
6247 case 3: /* ltr */
6248 if (!s->pe || s->vm86)
6249 goto illegal_op;
6250 if (s->cpl != 0) {
6251 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6252 } else {
6253 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6254 break;
6255 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6256 gen_jmp_im(pc_start - s->cs_base);
6257 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6258 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6259 }
6260 break;
6261 case 4: /* verr */
6262 case 5: /* verw */
6263 if (!s->pe || s->vm86)
6264 goto illegal_op;
6265 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6266 if (s->cc_op != CC_OP_DYNAMIC)
6267 gen_op_set_cc_op(s->cc_op);
6268 if (op == 4)
6269 gen_op_verr();
6270 else
6271 gen_op_verw();
6272 s->cc_op = CC_OP_EFLAGS;
6273 break;
6274 default:
6275 goto illegal_op;
6276 }
6277 break;
6278 case 0x101:
6279 modrm = ldub_code(s->pc++);
6280 mod = (modrm >> 6) & 3;
6281 op = (modrm >> 3) & 7;
6282 rm = modrm & 7;
6283 switch(op) {
6284 case 0: /* sgdt */
6285 if (mod == 3)
6286 goto illegal_op;
6287 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6288 break;
6289 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6290 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
6291 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6292 gen_add_A0_im(s, 2);
6293 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
6294 if (!s->dflag)
6295 gen_op_andl_T0_im(0xffffff);
6296 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6297 break;
6298 case 1:
6299 if (mod == 3) {
6300 switch (rm) {
6301 case 0: /* monitor */
6302 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6303 s->cpl != 0)
6304 goto illegal_op;
6305 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6306 break;
6307 gen_jmp_im(pc_start - s->cs_base);
6308 #ifdef TARGET_X86_64
6309 if (s->aflag == 2) {
6310 gen_op_movq_A0_reg(R_EAX);
6311 } else
6312 #endif
6313 {
6314 gen_op_movl_A0_reg(R_EAX);
6315 if (s->aflag == 0)
6316 gen_op_andl_A0_ffff();
6317 }
6318 gen_add_A0_ds_seg(s);
6319 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6320 break;
6321 case 1: /* mwait */
6322 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6323 s->cpl != 0)
6324 goto illegal_op;
6325 if (s->cc_op != CC_OP_DYNAMIC) {
6326 gen_op_set_cc_op(s->cc_op);
6327 s->cc_op = CC_OP_DYNAMIC;
6328 }
6329 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6330 break;
6331 gen_jmp_im(s->pc - s->cs_base);
6332 tcg_gen_helper_0_0(helper_mwait);
6333 gen_eob(s);
6334 break;
6335 default:
6336 goto illegal_op;
6337 }
6338 } else { /* sidt */
6339 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6340 break;
6341 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6342 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6343 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6344 gen_add_A0_im(s, 2);
6345 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6346 if (!s->dflag)
6347 gen_op_andl_T0_im(0xffffff);
6348 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6349 }
6350 break;
6351 case 2: /* lgdt */
6352 case 3: /* lidt */
6353 if (mod == 3) {
6354 switch(rm) {
6355 case 0: /* VMRUN */
6356 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6357 break;
6358 if (s->cc_op != CC_OP_DYNAMIC)
6359 gen_op_set_cc_op(s->cc_op);
6360 gen_jmp_im(s->pc - s->cs_base);
6361 tcg_gen_helper_0_0(helper_vmrun);
6362 s->cc_op = CC_OP_EFLAGS;
6363 gen_eob(s);
6364 break;
6365 case 1: /* VMMCALL */
6366 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6367 break;
6368 /* FIXME: cause #UD if hflags & SVM */
6369 tcg_gen_helper_0_0(helper_vmmcall);
6370 break;
6371 case 2: /* VMLOAD */
6372 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6373 break;
6374 tcg_gen_helper_0_0(helper_vmload);
6375 break;
6376 case 3: /* VMSAVE */
6377 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6378 break;
6379 tcg_gen_helper_0_0(helper_vmsave);
6380 break;
6381 case 4: /* STGI */
6382 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6383 break;
6384 tcg_gen_helper_0_0(helper_stgi);
6385 break;
6386 case 5: /* CLGI */
6387 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6388 break;
6389 tcg_gen_helper_0_0(helper_clgi);
6390 break;
6391 case 6: /* SKINIT */
6392 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6393 break;
6394 tcg_gen_helper_0_0(helper_skinit);
6395 break;
6396 case 7: /* INVLPGA */
6397 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6398 break;
6399 tcg_gen_helper_0_0(helper_invlpga);
6400 break;
6401 default:
6402 goto illegal_op;
6403 }
6404 } else if (s->cpl != 0) {
6405 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6406 } else {
6407 if (gen_svm_check_intercept(s, pc_start,
6408 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6409 break;
6410 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6411 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6412 gen_add_A0_im(s, 2);
6413 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6414 if (!s->dflag)
6415 gen_op_andl_T0_im(0xffffff);
6416 if (op == 2) {
6417 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6418 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6419 } else {
6420 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6421 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6422 }
6423 }
6424 break;
6425 case 4: /* smsw */
6426 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6427 break;
6428 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6429 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6430 break;
6431 case 6: /* lmsw */
6432 if (s->cpl != 0) {
6433 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6434 } else {
6435 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6436 break;
6437 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6438 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6439 gen_jmp_im(s->pc - s->cs_base);
6440 gen_eob(s);
6441 }
6442 break;
6443 case 7: /* invlpg */
6444 if (s->cpl != 0) {
6445 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6446 } else {
6447 if (mod == 3) {
6448 #ifdef TARGET_X86_64
6449 if (CODE64(s) && rm == 0) {
6450 /* swapgs */
6451 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6452 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6453 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6454 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6455 } else
6456 #endif
6457 {
6458 goto illegal_op;
6459 }
6460 } else {
6461 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6462 break;
6463 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6464 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6465 gen_jmp_im(s->pc - s->cs_base);
6466 gen_eob(s);
6467 }
6468 }
6469 break;
6470 default:
6471 goto illegal_op;
6472 }
6473 break;
6474 case 0x108: /* invd */
6475 case 0x109: /* wbinvd */
6476 if (s->cpl != 0) {
6477 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6478 } else {
6479 if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6480 break;
6481 /* nothing to do */
6482 }
6483 break;
6484 case 0x63: /* arpl or movslS (x86_64) */
6485 #ifdef TARGET_X86_64
6486 if (CODE64(s)) {
6487 int d_ot;
6488 /* d_ot is the size of destination */
6489 d_ot = dflag + OT_WORD;
6490
6491 modrm = ldub_code(s->pc++);
6492 reg = ((modrm >> 3) & 7) | rex_r;
6493 mod = (modrm >> 6) & 3;
6494 rm = (modrm & 7) | REX_B(s);
6495
6496 if (mod == 3) {
6497 gen_op_mov_TN_reg(OT_LONG, 0, rm);
6498 /* sign extend */
6499 if (d_ot == OT_QUAD)
6500 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
6501 gen_op_mov_reg_T0(d_ot, reg);
6502 } else {
6503 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6504 if (d_ot == OT_QUAD) {
6505 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6506 } else {
6507 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6508 }
6509 gen_op_mov_reg_T0(d_ot, reg);
6510 }
6511 } else
6512 #endif
6513 {
6514 if (!s->pe || s->vm86)
6515 goto illegal_op;
6516 ot = dflag ? OT_LONG : OT_WORD;
6517 modrm = ldub_code(s->pc++);
6518 reg = (modrm >> 3) & 7;
6519 mod = (modrm >> 6) & 3;
6520 rm = modrm & 7;
6521 if (mod != 3) {
6522 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6523 gen_op_ld_T0_A0(ot + s->mem_index);
6524 } else {
6525 gen_op_mov_TN_reg(ot, 0, rm);
6526 }
6527 gen_op_mov_TN_reg(ot, 1, reg);
6528 if (s->cc_op != CC_OP_DYNAMIC)
6529 gen_op_set_cc_op(s->cc_op);
6530 gen_op_arpl();
6531 s->cc_op = CC_OP_EFLAGS;
6532 if (mod != 3) {
6533 gen_op_st_T0_A0(ot + s->mem_index);
6534 } else {
6535 gen_op_mov_reg_T0(ot, rm);
6536 }
6537 gen_op_arpl_update();
6538 }
6539 break;
6540 case 0x102: /* lar */
6541 case 0x103: /* lsl */
6542 if (!s->pe || s->vm86)
6543 goto illegal_op;
6544 ot = dflag ? OT_LONG : OT_WORD;
6545 modrm = ldub_code(s->pc++);
6546 reg = ((modrm >> 3) & 7) | rex_r;
6547 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6548 gen_op_mov_TN_reg(ot, 1, reg);
6549 if (s->cc_op != CC_OP_DYNAMIC)
6550 gen_op_set_cc_op(s->cc_op);
6551 if (b == 0x102)
6552 gen_op_lar();
6553 else
6554 gen_op_lsl();
6555 s->cc_op = CC_OP_EFLAGS;
6556 gen_op_mov_reg_T1(ot, reg);
6557 break;
6558 case 0x118:
6559 modrm = ldub_code(s->pc++);
6560 mod = (modrm >> 6) & 3;
6561 op = (modrm >> 3) & 7;
6562 switch(op) {
6563 case 0: /* prefetchnta */
6564 case 1: /* prefetchnt0 */
6565 case 2: /* prefetchnt0 */
6566 case 3: /* prefetchnt0 */
6567 if (mod == 3)
6568 goto illegal_op;
6569 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6570 /* nothing more to do */
6571 break;
6572 default: /* nop (multi byte) */
6573 gen_nop_modrm(s, modrm);
6574 break;
6575 }
6576 break;
6577 case 0x119 ... 0x11f: /* nop (multi byte) */
6578 modrm = ldub_code(s->pc++);
6579 gen_nop_modrm(s, modrm);
6580 break;
6581 case 0x120: /* mov reg, crN */
6582 case 0x122: /* mov crN, reg */
6583 if (s->cpl != 0) {
6584 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6585 } else {
6586 modrm = ldub_code(s->pc++);
6587 if ((modrm & 0xc0) != 0xc0)
6588 goto illegal_op;
6589 rm = (modrm & 7) | REX_B(s);
6590 reg = ((modrm >> 3) & 7) | rex_r;
6591 if (CODE64(s))
6592 ot = OT_QUAD;
6593 else
6594 ot = OT_LONG;
6595 switch(reg) {
6596 case 0:
6597 case 2:
6598 case 3:
6599 case 4:
6600 case 8:
6601 if (b & 2) {
6602 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6603 gen_op_mov_TN_reg(ot, 0, rm);
6604 tcg_gen_helper_0_2(helper_movl_crN_T0,
6605 tcg_const_i32(reg), cpu_T[0]);
6606 gen_jmp_im(s->pc - s->cs_base);
6607 gen_eob(s);
6608 } else {
6609 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6610 #if !defined(CONFIG_USER_ONLY)
6611 if (reg == 8)
6612 tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6613 else
6614 #endif
6615 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6616 gen_op_mov_reg_T0(ot, rm);
6617 }
6618 break;
6619 default:
6620 goto illegal_op;
6621 }
6622 }
6623 break;
6624 case 0x121: /* mov reg, drN */
6625 case 0x123: /* mov drN, reg */
6626 if (s->cpl != 0) {
6627 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6628 } else {
6629 modrm = ldub_code(s->pc++);
6630 if ((modrm & 0xc0) != 0xc0)
6631 goto illegal_op;
6632 rm = (modrm & 7) | REX_B(s);
6633 reg = ((modrm >> 3) & 7) | rex_r;
6634 if (CODE64(s))
6635 ot = OT_QUAD;
6636 else
6637 ot = OT_LONG;
6638 /* XXX: do it dynamically with CR4.DE bit */
6639 if (reg == 4 || reg == 5 || reg >= 8)
6640 goto illegal_op;
6641 if (b & 2) {
6642 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6643 gen_op_mov_TN_reg(ot, 0, rm);
6644 tcg_gen_helper_0_2(helper_movl_drN_T0,
6645 tcg_const_i32(reg), cpu_T[0]);
6646 gen_jmp_im(s->pc - s->cs_base);
6647 gen_eob(s);
6648 } else {
6649 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6650 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6651 gen_op_mov_reg_T0(ot, rm);
6652 }
6653 }
6654 break;
6655 case 0x106: /* clts */
6656 if (s->cpl != 0) {
6657 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6658 } else {
6659 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6660 tcg_gen_helper_0_0(helper_clts);
6661 /* abort block because static cpu state changed */
6662 gen_jmp_im(s->pc - s->cs_base);
6663 gen_eob(s);
6664 }
6665 break;
6666 /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6667 case 0x1c3: /* MOVNTI reg, mem */
6668 if (!(s->cpuid_features & CPUID_SSE2))
6669 goto illegal_op;
6670 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6671 modrm = ldub_code(s->pc++);
6672 mod = (modrm >> 6) & 3;
6673 if (mod == 3)
6674 goto illegal_op;
6675 reg = ((modrm >> 3) & 7) | rex_r;
6676 /* generate a generic store */
6677 gen_ldst_modrm(s, modrm, ot, reg, 1);
6678 break;
6679 case 0x1ae:
6680 modrm = ldub_code(s->pc++);
6681 mod = (modrm >> 6) & 3;
6682 op = (modrm >> 3) & 7;
6683 switch(op) {
6684 case 0: /* fxsave */
6685 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6686 (s->flags & HF_EM_MASK))
6687 goto illegal_op;
6688 if (s->flags & HF_TS_MASK) {
6689 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6690 break;
6691 }
6692 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6693 if (s->cc_op != CC_OP_DYNAMIC)
6694 gen_op_set_cc_op(s->cc_op);
6695 gen_jmp_im(pc_start - s->cs_base);
6696 tcg_gen_helper_0_2(helper_fxsave,
6697 cpu_A0, tcg_const_i32((s->dflag == 2)));
6698 break;
6699 case 1: /* fxrstor */
6700 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6701 (s->flags & HF_EM_MASK))
6702 goto illegal_op;
6703 if (s->flags & HF_TS_MASK) {
6704 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6705 break;
6706 }
6707 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6708 if (s->cc_op != CC_OP_DYNAMIC)
6709 gen_op_set_cc_op(s->cc_op);
6710 gen_jmp_im(pc_start - s->cs_base);
6711 tcg_gen_helper_0_2(helper_fxrstor,
6712 cpu_A0, tcg_const_i32((s->dflag == 2)));
6713 break;
6714 case 2: /* ldmxcsr */
6715 case 3: /* stmxcsr */
6716 if (s->flags & HF_TS_MASK) {
6717 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6718 break;
6719 }
6720 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6721 mod == 3)
6722 goto illegal_op;
6723 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6724 if (op == 2) {
6725 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6726 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6727 } else {
6728 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6729 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6730 }
6731 break;
6732 case 5: /* lfence */
6733 case 6: /* mfence */
6734 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6735 goto illegal_op;
6736 break;
6737 case 7: /* sfence / clflush */
6738 if ((modrm & 0xc7) == 0xc0) {
6739 /* sfence */
6740 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6741 if (!(s->cpuid_features & CPUID_SSE))
6742 goto illegal_op;
6743 } else {
6744 /* clflush */
6745 if (!(s->cpuid_features & CPUID_CLFLUSH))
6746 goto illegal_op;
6747 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6748 }
6749 break;
6750 default:
6751 goto illegal_op;
6752 }
6753 break;
6754 case 0x10d: /* 3DNow! prefetch(w) */
6755 modrm = ldub_code(s->pc++);
6756 mod = (modrm >> 6) & 3;
6757 if (mod == 3)
6758 goto illegal_op;
6759 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6760 /* ignore for now */
6761 break;
6762 case 0x1aa: /* rsm */
6763 if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6764 break;
6765 if (!(s->flags & HF_SMM_MASK))
6766 goto illegal_op;
6767 if (s->cc_op != CC_OP_DYNAMIC) {
6768 gen_op_set_cc_op(s->cc_op);
6769 s->cc_op = CC_OP_DYNAMIC;
6770 }
6771 gen_jmp_im(s->pc - s->cs_base);
6772 tcg_gen_helper_0_0(helper_rsm);
6773 gen_eob(s);
6774 break;
6775 case 0x10e ... 0x10f:
6776 /* 3DNow! instructions, ignore prefixes */
6777 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6778 case 0x110 ... 0x117:
6779 case 0x128 ... 0x12f:
6780 case 0x150 ... 0x177:
6781 case 0x17c ... 0x17f:
6782 case 0x1c2:
6783 case 0x1c4 ... 0x1c6:
6784 case 0x1d0 ... 0x1fe:
6785 gen_sse(s, b, pc_start, rex_r);
6786 break;
6787 default:
6788 goto illegal_op;
6789 }
6790 /* lock generation */
6791 if (s->prefix & PREFIX_LOCK)
6792 tcg_gen_helper_0_0(helper_unlock);
6793 return s->pc;
6794 illegal_op:
6795 if (s->prefix & PREFIX_LOCK)
6796 tcg_gen_helper_0_0(helper_unlock);
6797 /* XXX: ensure that no lock was generated */
6798 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6799 return s->pc;
6800 }
6801
6802 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6803 {
6804 switch(macro_id) {
6805 #ifdef MACRO_TEST
6806 case MACRO_TEST:
6807 tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6808 break;
6809 #endif
6810 }
6811 }
6812
6813 void optimize_flags_init(void)
6814 {
6815 #if TCG_TARGET_REG_BITS == 32
6816 assert(sizeof(CCTable) == (1 << 3));
6817 #else
6818 assert(sizeof(CCTable) == (1 << 4));
6819 #endif
6820 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6821
6822 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6823 #if TARGET_LONG_BITS > HOST_LONG_BITS
6824 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
6825 TCG_AREG0, offsetof(CPUState, t0), "T0");
6826 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6827 TCG_AREG0, offsetof(CPUState, t1), "T1");
6828 cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6829 TCG_AREG0, offsetof(CPUState, t2), "A0");
6830 #else
6831 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6832 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6833 cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6834 #endif
6835 cpu_T3 = tcg_global_mem_new(TCG_TYPE_TL,
6836 TCG_AREG0, offsetof(CPUState, t3), "T3");
6837 #if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6838 /* XXX: must be suppressed once there are less fixed registers */
6839 cpu_tmp1_i64 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6840 #endif
6841 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
6842 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
6843 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
6844 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
6845 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
6846 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
6847 }
6848
6849 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6850 basic block 'tb'. If search_pc is TRUE, also generate PC
6851 information for each intermediate instruction. */
6852 static inline int gen_intermediate_code_internal(CPUState *env,
6853 TranslationBlock *tb,
6854 int search_pc)
6855 {
6856 DisasContext dc1, *dc = &dc1;
6857 target_ulong pc_ptr;
6858 uint16_t *gen_opc_end;
6859 int j, lj, cflags;
6860 uint64_t flags;
6861 target_ulong pc_start;
6862 target_ulong cs_base;
6863
6864 /* generate intermediate code */
6865 pc_start = tb->pc;
6866 cs_base = tb->cs_base;
6867 flags = tb->flags;
6868 cflags = tb->cflags;
6869
6870 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6871 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6872 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6873 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6874 dc->f_st = 0;
6875 dc->vm86 = (flags >> VM_SHIFT) & 1;
6876 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6877 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6878 dc->tf = (flags >> TF_SHIFT) & 1;
6879 dc->singlestep_enabled = env->singlestep_enabled;
6880 dc->cc_op = CC_OP_DYNAMIC;
6881 dc->cs_base = cs_base;
6882 dc->tb = tb;
6883 dc->popl_esp_hack = 0;
6884 /* select memory access functions */
6885 dc->mem_index = 0;
6886 if (flags & HF_SOFTMMU_MASK) {
6887 if (dc->cpl == 3)
6888 dc->mem_index = 2 * 4;
6889 else
6890 dc->mem_index = 1 * 4;
6891 }
6892 dc->cpuid_features = env->cpuid_features;
6893 dc->cpuid_ext_features = env->cpuid_ext_features;
6894 dc->cpuid_ext2_features = env->cpuid_ext2_features;
6895 #ifdef TARGET_X86_64
6896 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6897 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6898 #endif
6899 dc->flags = flags;
6900 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6901 (flags & HF_INHIBIT_IRQ_MASK)
6902 #ifndef CONFIG_SOFTMMU
6903 || (flags & HF_SOFTMMU_MASK)
6904 #endif
6905 );
6906 #if 0
6907 /* check addseg logic */
6908 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6909 printf("ERROR addseg\n");
6910 #endif
6911
6912 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6913 #if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
6914 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
6915 #endif
6916 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
6917 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
6918 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
6919 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
6920 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
6921 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6922 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6923
6924 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6925
6926 dc->is_jmp = DISAS_NEXT;
6927 pc_ptr = pc_start;
6928 lj = -1;
6929
6930 for(;;) {
6931 if (env->nb_breakpoints > 0) {
6932 for(j = 0; j < env->nb_breakpoints; j++) {
6933 if (env->breakpoints[j] == pc_ptr) {
6934 gen_debug(dc, pc_ptr - dc->cs_base);
6935 break;
6936 }
6937 }
6938 }
6939 if (search_pc) {
6940 j = gen_opc_ptr - gen_opc_buf;
6941 if (lj < j) {
6942 lj++;
6943 while (lj < j)
6944 gen_opc_instr_start[lj++] = 0;
6945 }
6946 gen_opc_pc[lj] = pc_ptr;
6947 gen_opc_cc_op[lj] = dc->cc_op;
6948 gen_opc_instr_start[lj] = 1;
6949 }
6950 pc_ptr = disas_insn(dc, pc_ptr);
6951 /* stop translation if indicated */
6952 if (dc->is_jmp)
6953 break;
6954 /* if single step mode, we generate only one instruction and
6955 generate an exception */
6956 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6957 the flag and abort the translation to give the irqs a
6958 change to be happen */
6959 if (dc->tf || dc->singlestep_enabled ||
6960 (flags & HF_INHIBIT_IRQ_MASK) ||
6961 (cflags & CF_SINGLE_INSN)) {
6962 gen_jmp_im(pc_ptr - dc->cs_base);
6963 gen_eob(dc);
6964 break;
6965 }
6966 /* if too long translation, stop generation too */
6967 if (gen_opc_ptr >= gen_opc_end ||
6968 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6969 gen_jmp_im(pc_ptr - dc->cs_base);
6970 gen_eob(dc);
6971 break;
6972 }
6973 }
6974 *gen_opc_ptr = INDEX_op_end;
6975 /* we don't forget to fill the last values */
6976 if (search_pc) {
6977 j = gen_opc_ptr - gen_opc_buf;
6978 lj++;
6979 while (lj <= j)
6980 gen_opc_instr_start[lj++] = 0;
6981 }
6982
6983 #ifdef DEBUG_DISAS
6984 if (loglevel & CPU_LOG_TB_CPU) {
6985 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6986 }
6987 if (loglevel & CPU_LOG_TB_IN_ASM) {
6988 int disas_flags;
6989 fprintf(logfile, "----------------\n");
6990 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6991 #ifdef TARGET_X86_64
6992 if (dc->code64)
6993 disas_flags = 2;
6994 else
6995 #endif
6996 disas_flags = !dc->code32;
6997 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6998 fprintf(logfile, "\n");
6999 if (loglevel & CPU_LOG_TB_OP_OPT) {
7000 fprintf(logfile, "OP before opt:\n");
7001 tcg_dump_ops(&tcg_ctx, logfile);
7002 fprintf(logfile, "\n");
7003 }
7004 }
7005 #endif
7006
7007 if (!search_pc)
7008 tb->size = pc_ptr - pc_start;
7009 return 0;
7010 }
7011
7012 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7013 {
7014 return gen_intermediate_code_internal(env, tb, 0);
7015 }
7016
7017 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7018 {
7019 return gen_intermediate_code_internal(env, tb, 1);
7020 }
7021
7022 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7023 unsigned long searched_pc, int pc_pos, void *puc)
7024 {
7025 int cc_op;
7026 #ifdef DEBUG_DISAS
7027 if (loglevel & CPU_LOG_TB_OP) {
7028 int i;
7029 fprintf(logfile, "RESTORE:\n");
7030 for(i = 0;i <= pc_pos; i++) {
7031 if (gen_opc_instr_start[i]) {
7032 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7033 }
7034 }
7035 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7036 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7037 (uint32_t)tb->cs_base);
7038 }
7039 #endif
7040 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7041 cc_op = gen_opc_cc_op[pc_pos];
7042 if (cc_op != CC_OP_DYNAMIC)
7043 env->cc_op = cc_op;
7044 }